-- cgit 1.2.3-korg From 8c51a2f78333e3217dac4345955f89fc3867a503 Mon Sep 17 00:00:00 2001 From: Pavel Aharoni Date: Wed, 7 Jun 2017 17:24:33 +0300 Subject: [SDC-28] JTOSCA initial commit Change-Id: Ic0ede56c1cc513e632c36954a4227317ae357020 Signed-off-by: Pavel Aharoni --- .gitignore | 20 + .gitreview | 4 + LICENSE.TXT | 21 + README.md | 28 + pom.xml | 212 +++++ .../openecomp/sdc/toscaparser/api/Capability.java | 121 +++ .../openecomp/sdc/toscaparser/api/DataEntity.java | 450 +++++++++ .../sdc/toscaparser/api/DataEntity.java.orig | 453 +++++++++ .../sdc/toscaparser/api/EntityTemplate.java | 832 ++++++++++++++++ .../org/openecomp/sdc/toscaparser/api/Group.java | 137 +++ .../sdc/toscaparser/api/ImportsLoader.java | 728 ++++++++++++++ .../sdc/toscaparser/api/NodeTemplate.java | 755 +++++++++++++++ .../org/openecomp/sdc/toscaparser/api/Policy.java | 187 ++++ .../openecomp/sdc/toscaparser/api/Property.java | 177 ++++ .../sdc/toscaparser/api/RelationshipTemplate.java | 199 ++++ .../openecomp/sdc/toscaparser/api/Repository.java | 117 +++ .../sdc/toscaparser/api/SubstitutionMappings.java | 520 ++++++++++ .../sdc/toscaparser/api/TopologyTemplate.java | 857 +++++++++++++++++ .../sdc/toscaparser/api/TopologyTemplate.java.orig | 857 +++++++++++++++++ .../openecomp/sdc/toscaparser/api/ToscaGraph.java | 109 +++ .../sdc/toscaparser/api/ToscaTemplate.java | 1002 ++++++++++++++++++++ .../openecomp/sdc/toscaparser/api/Triggers.java | 183 ++++ .../sdc/toscaparser/api/UnsupportedType.java | 78 ++ .../toscaparser/api/common/ExceptionCollector.java | 122 +++ .../toscaparser/api/common/JToscaException.java | 27 + .../sdc/toscaparser/api/common/TOSCAException.java | 39 + .../toscaparser/api/elements/ArtifactTypeDef.java | 105 ++ .../sdc/toscaparser/api/elements/AttributeDef.java | 40 + .../api/elements/CapabilityTypeDef.java | 222 +++++ .../sdc/toscaparser/api/elements/DataType.java | 116 +++ .../sdc/toscaparser/api/elements/EntityType.java | 418 ++++++++ .../sdc/toscaparser/api/elements/GroupType.java | 215 +++++ .../toscaparser/api/elements/InterfacesDef.java | 228 +++++ .../sdc/toscaparser/api/elements/Metadata.java | 35 + .../sdc/toscaparser/api/elements/NodeType.java | 523 ++++++++++ .../sdc/toscaparser/api/elements/PolicyType.java | 290 ++++++ .../sdc/toscaparser/api/elements/PortSpec.java | 160 ++++ .../sdc/toscaparser/api/elements/PropertyDef.java | 231 +++++ .../toscaparser/api/elements/RelationshipType.java | 103 ++ .../sdc/toscaparser/api/elements/ScalarUnit.java | 262 +++++ .../api/elements/ScalarUnitFrequency.java | 14 + .../toscaparser/api/elements/ScalarUnitSize.java | 19 + .../toscaparser/api/elements/ScalarUnitTime.java | 17 + .../api/elements/StatefulEntityType.java | 220 +++++ .../toscaparser/api/elements/TypeValidation.java | 151 +++ .../api/elements/constraints/Constraint.java | 237 +++++ .../api/elements/constraints/Equal.java | 61 ++ .../api/elements/constraints/GreaterOrEqual.java | 113 +++ .../api/elements/constraints/GreaterThan.java | 102 ++ .../api/elements/constraints/InRange.java | 171 ++++ .../api/elements/constraints/Length.java | 79 ++ .../api/elements/constraints/LessOrEqual.java | 106 +++ .../api/elements/constraints/LessThan.java | 104 ++ .../api/elements/constraints/MaxLength.java | 90 ++ .../api/elements/constraints/MinLength.java | 90 ++ .../api/elements/constraints/Pattern.java | 96 ++ .../api/elements/constraints/Schema.java | 278 ++++++ .../api/elements/constraints/Schema.java.orig | 281 ++++++ .../api/elements/constraints/ValidValues.java | 84 ++ .../sdc/toscaparser/api/extensions/ExtTools.java | 210 ++++ .../sdc/toscaparser/api/functions/Concat.java | 77 ++ .../sdc/toscaparser/api/functions/Function.java | 191 ++++ .../toscaparser/api/functions/GetAttribute.java | 535 +++++++++++ .../sdc/toscaparser/api/functions/GetInput.java | 110 +++ .../api/functions/GetOperationOutput.java | 225 +++++ .../sdc/toscaparser/api/functions/GetProperty.java | 636 +++++++++++++ .../sdc/toscaparser/api/functions/Token.java | 112 +++ .../sdc/toscaparser/api/parameters/Input.java | 226 +++++ .../sdc/toscaparser/api/parameters/Output.java | 109 +++ .../openecomp/sdc/toscaparser/api/prereq/CSAR.java | 782 +++++++++++++++ .../sdc/toscaparser/api/prereq/CSAR.java.orig | 767 +++++++++++++++ .../sdc/toscaparser/api/utils/CopyUtils.java | 29 + .../sdc/toscaparser/api/utils/DumpUtils.java | 55 ++ .../toscaparser/api/utils/JToscaErrorCodes.java | 32 + .../api/utils/TOSCAVersionProperty.java | 182 ++++ .../toscaparser/api/utils/ThreadLocalsHolder.java | 24 + .../sdc/toscaparser/api/utils/UrlUtils.java | 123 +++ .../sdc/toscaparser/api/utils/ValidateUtils.java | 409 ++++++++ src/main/resources/TOSCA_definition_1_0.yaml | 967 +++++++++++++++++++ .../extensions/nfv/TOSCA_nfv_definition_1_0.yaml | 240 +++++ src/main/resources/extensions/nfv/nfv.py | 19 + .../JToscaMetadataParse.java | 26 + src/test/resources/csars/csar_hello_world.csar | Bin 0 -> 936 bytes .../resources/csars/service-ServiceFdnt-csar.csar | Bin 0 -> 40171 bytes version.properties | 13 + 85 files changed, 19595 insertions(+) create mode 100644 .gitignore create mode 100644 .gitreview create mode 100644 LICENSE.TXT create mode 100644 README.md create mode 100644 pom.xml create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/Capability.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java.orig create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/Group.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/Property.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/RelationshipTemplate.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java.orig create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/ToscaGraph.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/UnsupportedType.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/common/ExceptionCollector.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaException.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/common/TOSCAException.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/ArtifactTypeDef.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/AttributeDef.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/DataType.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitFrequency.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitSize.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitTime.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Equal.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java.orig create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java.orig create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/utils/CopyUtils.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/utils/DumpUtils.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java create mode 100644 src/main/resources/TOSCA_definition_1_0.yaml create mode 100644 src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml create mode 100644 src/main/resources/extensions/nfv/nfv.py create mode 100644 src/test/java/org.openecomp.sdc.toscaparser/JToscaMetadataParse.java create mode 100644 src/test/resources/csars/csar_hello_world.csar create mode 100644 src/test/resources/csars/service-ServiceFdnt-csar.csar create mode 100644 version.properties diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..59bfdd1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,20 @@ +# Eclipse +.classpath +.project +.settings/ + +# Maven +log/ +target/ + +# Package Files # +*.jar +*.war +*.ear + +# Other +*.class +*.orig +.idea/* +/bin/ +*.iml diff --git a/.gitreview b/.gitreview new file mode 100644 index 0000000..369108b --- /dev/null +++ b/.gitreview @@ -0,0 +1,4 @@ +[gerrit] +host=gerrit.onap.org +port=29418 +project=sdc/jtosca.git \ No newline at end of file diff --git a/LICENSE.TXT b/LICENSE.TXT new file mode 100644 index 0000000..724329f --- /dev/null +++ b/LICENSE.TXT @@ -0,0 +1,21 @@ +/* +* ============LICENSE_START========================================== +* =================================================================== +* Copyright © 2017 AT&T Intellectual Property. +* Copyright © 2017 Amdocs +* All rights reserved. +* =================================================================== +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* ============LICENSE_END============================================ +* ECOMP is a trademark and service mark of AT&T Intellectual Property. +*/ \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..2c0f5e0 --- /dev/null +++ b/README.md @@ -0,0 +1,28 @@ +# OpenECOMP JTOSCA + + +--- +--- + +# Introduction + +OpenECOMP JTOSCA is delivered as helper JAR that can be used by clients that work with TOSCA CSAR files. +It parses the CSAR and returns the model object which represents the CSAR contents. +Prior to that, it performs validations on the CSAR to check its TOSCA compliance. + + +# Compiling OpenECOMP JTOSCA + +OpenECOMP JTOSCA can be compiled easily using maven command: `mvn clean install` +The result is JAR file under "target" folder + +# Getting Help + +*** to be completed on release *** + +SDC@lists.openecomp.org + +SDC Javadoc and Maven site + +*** to be completed on rrelease *** + diff --git a/pom.xml b/pom.xml new file mode 100644 index 0000000..61a504b --- /dev/null +++ b/pom.xml @@ -0,0 +1,212 @@ + + 4.0.0 + + org.openecomp.sdc.jtosca + jtosca + 1.1.0-SNAPSHOT + + + + + + + UTF-8 + + + + + + + + true + ${project.basedir} + ${project.basedir}/target/jacoco.exec + https://nexus.onap.org + /content/sites/site/org/openecomp/sdc/jtosca/${project.version} + snapshots + releases + + + + + + + + org.yaml + snakeyaml + 1.14 + compile + + + + org.slf4j + slf4j-api + 1.7.25 + + + + + + junit + junit + 4.12 + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 2.10.4 + + false + org.umlgraph.doclet.UmlGraphDoc + + org.umlgraph + umlgraph + 5.6 + + -views + true + + + + + + + + + org.apache.maven.plugins + maven-site-plugin + 3.4 + + + org.apache.maven.wagon + wagon-webdav-jackrabbit + 2.10 + + + + + + org.jacoco + jacoco-maven-plugin + 0.7.8 + + + + prepare-agent + + prepare-agent + + + ${sonar.jacoco.reportPath} + + + + + + + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.7 + true + + ${nexus.proxy} + ${staging.profile.id} + ecomp-staging + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.5.1 + true + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-javadoc-plugin + 2.10.3 + + + + org.codehaus.mojo + license-maven-plugin + 1.10 + + false + ============LICENSE_START======================================================= + ============LICENSE_END========================================================= + ================================================================================ + apache_v2 + 2017 + AT&T Intellectual Property. All rights + reserved. + jtosca + true + true + true + true + false + + **/*.java + + + + + first + + update-file-header + + + + + + + + + + + central + Official Maven repository + http://repo2.maven.org/maven2/ + + + ecomp-releases + Release Repository + ${nexus.proxy}/content/repositories/releases/ + + + ecomp-staging + Staging Repository + ${nexus.proxy}/content/repositories/staging/ + + + + + + ecomp-releases + Release Repository + ${nexus.proxy}/content/repositories/${releases.path}/ + + + ecomp-snapshots + Snapshot Repository + ${nexus.proxy}/content/repositories/${snapshots.path}/ + + + ecomp-site + dav:${nexus.proxy}${sitePath} + + + + \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Capability.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Capability.java new file mode 100644 index 0000000..09571db --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/Capability.java @@ -0,0 +1,121 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.elements.CapabilityTypeDef; +import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; + +public class Capability { + + private String name; + private LinkedHashMap _properties; + private CapabilityTypeDef _definition; + + public Capability(String cname, + LinkedHashMap cproperties, + CapabilityTypeDef cdefinition) { + name = cname; + _properties = cproperties; + _definition = cdefinition; + } + + public ArrayList getPropertiesObjects() { + // Return a list of property objects + ArrayList properties = new ArrayList(); + LinkedHashMap props = _properties; + if(props != null) { + for(Map.Entry me: props.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + + LinkedHashMap propsDef = _definition.getPropertiesDef(); + if(propsDef != null) { + PropertyDef pd = (PropertyDef)propsDef.get(pname); + if(pd != null) { + properties.add(new Property(pname,pvalue,pd.getSchema(),null)); + } + } + } + } + return properties; + } + + public LinkedHashMap getProperties() { + // Return a dictionary of property name-object pairs + LinkedHashMap npps = new LinkedHashMap<>(); + for(Property p: getPropertiesObjects()) { + npps.put(p.getName(),p); + } + return npps; + } + + public Object getPropertyValue(String pname) { + // Return the value of a given property name + LinkedHashMap props = getProperties(); + if(props != null && props.get(pname) != null) { + return props.get(name).getValue(); + } + return null; + } + + public String getName() { + return name; + } + + public CapabilityTypeDef getDefinition() { + return _definition; + } + + // setter + public void setProperty(String pname,Object pvalue) { + _properties.put(pname,pvalue); + } + + @Override + public String toString() { + return "Capability{" + + "name='" + name + '\'' + + ", _properties=" + _properties + + ", _definition=" + _definition + + '}'; + } +} + +/*python + +from toscaparser.properties import Property + + +class Capability(object): + '''TOSCA built-in capabilities type.''' + + def __init__(self, name, properties, definition): + self.name = name + self._properties = properties + self.definition = definition + + def get_properties_objects(self): + '''Return a list of property objects.''' + properties = [] + props = self._properties + if props: + for name, value in props.items(): + props_def = self.definition.get_properties_def() + if props_def and name in props_def: + properties.append(Property(name, value, + props_def[name].schema)) + return properties + + def get_properties(self): + '''Return a dictionary of property name-object pairs.''' + return {prop.name: prop + for prop in self.get_properties_objects()} + + def get_property_value(self, name): + '''Return the value of a given property name.''' + props = self.get_properties() + if props and name in props: + return props[name].value +*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java b/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java new file mode 100644 index 0000000..350068b --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java @@ -0,0 +1,450 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.List; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.*; +import org.openecomp.sdc.toscaparser.api.elements.constraints.Constraint; +import org.openecomp.sdc.toscaparser.api.elements.constraints.Schema; +import org.openecomp.sdc.toscaparser.api.functions.Function; +import org.openecomp.sdc.toscaparser.api.utils.TOSCAVersionProperty; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; + +public class DataEntity { + // A complex data value entity + + private LinkedHashMap customDef; + private DataType dataType; + private LinkedHashMap schema; + private Object value; + private String propertyName; + + public DataEntity(String _dataTypeName,Object _valueDict, + LinkedHashMap _customDef,String _propName) { + + customDef = _customDef; + dataType = new DataType(_dataTypeName,_customDef); + schema = dataType.getAllProperties(); + value = _valueDict; + propertyName = _propName; + } + + @SuppressWarnings("unchecked") + public Object validate() { + // Validate the value by the definition of the datatype + + // A datatype can not have both 'type' and 'properties' definitions. + // If the datatype has 'type' definition + if(dataType.getValueType() != null) { + value = DataEntity.validateDatatype(dataType.getValueType(),value,null,customDef,null); + Schema schemaCls = new Schema(propertyName,dataType.getDefs()); + for(Constraint constraint: schemaCls.getConstraints()) { + constraint.validate(value); + } + } + // If the datatype has 'properties' definition + else { + if(!(value instanceof LinkedHashMap)) { + //ERROR under investigation + ThreadLocalsHolder.getCollector().appendWarning(String.format( + "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"", + value.toString(),dataType.getType())); + + if (value instanceof List && ((List) value).size() > 0) { + value = ((List) value).get(0); + } + + if (!(value instanceof LinkedHashMap)) { + return value; + } + } + + + + LinkedHashMap valueDict = (LinkedHashMap)value; + ArrayList allowedProps = new ArrayList<>(); + ArrayList requiredProps = new ArrayList<>(); + LinkedHashMap defaultProps = new LinkedHashMap<>(); + if(schema != null) { + allowedProps.addAll(schema.keySet()); + for(String name: schema.keySet()) { + PropertyDef propDef = schema.get(name); + if(propDef.isRequired()) { + requiredProps.add(name); + } + if(propDef.getDefault() != null) { + defaultProps.put(name,propDef.getDefault()); + } + } + } + + // check allowed field + for(String valueKey: valueDict.keySet()) { + //1710 devlop JSON validation + if(!("json").equals(dataType.getType()) && !allowedProps.contains(valueKey)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: Data value of type \"%s\" contains unknown field \"%s\"", + dataType.getType(),valueKey)); + } + } + + // check default field + for(String defKey: defaultProps.keySet()) { + Object defValue = defaultProps.get(defKey); + if(valueDict.get(defKey) == null) { + valueDict.put(defKey, defValue); + } + + } + + // check missing field + ArrayList missingProp = new ArrayList<>(); + for(String reqKey: requiredProps) { + if(!valueDict.keySet().contains(reqKey)) { + missingProp.add(reqKey); + } + } + if(missingProp.size() > 0) { + ThreadLocalsHolder.getCollector().appendWarning(String.format( + "MissingRequiredFieldError: Data value of type \"%s\" is missing required field(s) \"%s\"", + dataType.getType(),missingProp.toString())); + } + + // check every field + for(String vname: valueDict.keySet()) { + Object vvalue = valueDict.get(vname); + LinkedHashMap schemaName = _findSchema(vname); + if(schemaName == null) { + continue; + } + Schema propSchema = new Schema(vname,schemaName); + // check if field value meets type defined + DataEntity.validateDatatype(propSchema.getType(), + vvalue, + propSchema.getEntrySchema(), + customDef, + null); + + // check if field value meets constraints defined + if(propSchema.getConstraints() != null) { + for(Constraint constraint: propSchema.getConstraints()) { + if(vvalue instanceof ArrayList) { + for(Object val: (ArrayList)vvalue) { + constraint.validate(val); + } + } + else { + constraint.validate(vvalue); + } + } + } + } + } + return value; + } + + private LinkedHashMap _findSchema(String name) { + if(schema != null && schema.get(name) != null) { + return schema.get(name).getSchema(); + } + return null; + } + + public static Object validateDatatype(String type, + Object value, + LinkedHashMap entrySchema, + LinkedHashMap customDef, + String propName) { + // Validate value with given type + + // If type is list or map, validate its entry by entry_schema(if defined) + // If type is a user-defined complex datatype, custom_def is required. + + if(Function.isFunction(value)) { + return value; + } + else if (type == null) { + //NOT ANALYZED + ThreadLocalsHolder.getCollector().appendWarning(String.format( + "MissingType: Type is missing for value \"%s\"", + value.toString())); + return value; + } + else if(type.equals(Schema.STRING)) { + return ValidateUtils.validateString(value); + } + else if(type.equals(Schema.INTEGER)) { + return ValidateUtils.validateInteger(value); + } + else if(type.equals(Schema.FLOAT)) { + return ValidateUtils.validateFloat(value); + } + else if(type.equals(Schema.NUMBER)) { + return ValidateUtils.validateNumeric(value); + } + else if(type.equals(Schema.BOOLEAN)) { + return ValidateUtils.validateBoolean(value); + } + else if(type.equals(Schema.RANGE)) { + return ValidateUtils.validateRange(value); + } + else if(type.equals(Schema.TIMESTAMP)) { + ValidateUtils.validateTimestamp(value); + return value; + } + else if(type.equals(Schema.LIST)) { + ValidateUtils.validateList(value); + if(entrySchema != null) { + DataEntity.validateEntry(value,entrySchema,customDef); + } + return value; + } + else if(type.equals(Schema.SCALAR_UNIT_SIZE)) { + return (new ScalarUnitSize(value)).validateScalarUnit(); + } + else if(type.equals(Schema.SCALAR_UNIT_FREQUENCY)) { + return (new ScalarUnitFrequency(value)).validateScalarUnit(); + } + else if(type.equals(Schema.SCALAR_UNIT_TIME)) { + return (new ScalarUnitTime(value)).validateScalarUnit(); + } + else if(type.equals(Schema.VERSION)) { + return (new TOSCAVersionProperty(value)).getVersion(); + } + else if(type.equals(Schema.MAP)) { + ValidateUtils.validateMap(value); + if(entrySchema != null) { + DataEntity.validateEntry(value,entrySchema,customDef); + } + return value; + } + else if(type.equals(Schema.PORTSPEC)) { + // tODO(TBD) bug 1567063, validate source & target as PortDef type + // as complex types not just as integers + PortSpec.validateAdditionalReq(value,propName,customDef); + } + else { + DataEntity data = new DataEntity(type,value,customDef,null); + return data.validate(); + } + + return value; + } + + @SuppressWarnings("unchecked") + public static Object validateEntry(Object value, + LinkedHashMap entrySchema, + LinkedHashMap customDef) { + + // Validate entries for map and list + Schema schema = new Schema(null,entrySchema); + Object valueob = value; + ArrayList valueList = null; + if(valueob instanceof LinkedHashMap) { + valueList = new ArrayList(((LinkedHashMap)valueob).values()); + } + else if(valueob instanceof ArrayList) { + valueList = (ArrayList)valueob; + } + if(valueList != null) { + for(Object v: valueList) { + DataEntity.validateDatatype(schema.getType(),v,schema.getEntrySchema(),customDef,null); + if(schema.getConstraints() != null) { + for(Constraint constraint: schema.getConstraints()) { + constraint.validate(v); + } + } + } + } + return value; + } + + @Override + public String toString() { + return "DataEntity{" + + "customDef=" + customDef + + ", dataType=" + dataType + + ", schema=" + schema + + ", value=" + value + + ", propertyName='" + propertyName + '\'' + + '}'; + } +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import TypeMismatchError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.constraints import Schema +from toscaparser.elements.datatype import DataType +from toscaparser.elements.portspectype import PortSpec +from toscaparser.elements.scalarunit import ScalarUnit_Frequency +from toscaparser.elements.scalarunit import ScalarUnit_Size +from toscaparser.elements.scalarunit import ScalarUnit_Time +from toscaparser.utils.gettextutils import _ +from toscaparser.utils import validateutils + + +class DataEntity(object): + '''A complex data value entity.''' + + def __init__(self, datatypename, value_dict, custom_def=None, + prop_name=None): + self.custom_def = custom_def + self.datatype = DataType(datatypename, custom_def) + self.schema = self.datatype.get_all_properties() + self.value = value_dict + self.property_name = prop_name + + def validate(self): + '''Validate the value by the definition of the datatype.''' + + # A datatype can not have both 'type' and 'properties' definitions. + # If the datatype has 'type' definition + if self.datatype.value_type: + self.value = DataEntity.validate_datatype(self.datatype.value_type, + self.value, + None, + self.custom_def) + schema = Schema(self.property_name, self.datatype.defs) + for constraint in schema.constraints: + constraint.validate(self.value) + # If the datatype has 'properties' definition + else: + if not isinstance(self.value, dict): + ExceptionCollector.appendException( + TypeMismatchError(what=self.value, + type=self.datatype.type)) + allowed_props = [] + required_props = [] + default_props = {} + if self.schema: + allowed_props = self.schema.keys() + for name, prop_def in self.schema.items(): + if prop_def.required: + required_props.append(name) + if prop_def.default: + default_props[name] = prop_def.default + + # check allowed field + for value_key in list(self.value.keys()): + if value_key not in allowed_props: + ExceptionCollector.appendException( + UnknownFieldError(what=(_('Data value of type "%s"') + % self.datatype.type), + field=value_key)) + + # check default field + for def_key, def_value in list(default_props.items()): + if def_key not in list(self.value.keys()): + self.value[def_key] = def_value + + # check missing field + missingprop = [] + for req_key in required_props: + if req_key not in list(self.value.keys()): + missingprop.append(req_key) + if missingprop: + ExceptionCollector.appendException( + MissingRequiredFieldError( + what=(_('Data value of type "%s"') + % self.datatype.type), required=missingprop)) + + # check every field + for name, value in list(self.value.items()): + schema_name = self._find_schema(name) + if not schema_name: + continue + prop_schema = Schema(name, schema_name) + # check if field value meets type defined + DataEntity.validate_datatype(prop_schema.type, value, + prop_schema.entry_schema, + self.custom_def) + # check if field value meets constraints defined + if prop_schema.constraints: + for constraint in prop_schema.constraints: + if isinstance(value, list): + for val in value: + constraint.validate(val) + else: + constraint.validate(value) + + return self.value + + def _find_schema(self, name): + if self.schema and name in self.schema.keys(): + return self.schema[name].schema + + @staticmethod + def validate_datatype(type, value, entry_schema=None, custom_def=None, + prop_name=None): + '''Validate value with given type. + + If type is list or map, validate its entry by entry_schema(if defined) + If type is a user-defined complex datatype, custom_def is required. + ''' + from toscaparser.functions import is_function + if is_function(value): + return value + if type == Schema.STRING: + return validateutils.validate_string(value) + elif type == Schema.INTEGER: + return validateutils.validate_integer(value) + elif type == Schema.FLOAT: + return validateutils.validate_float(value) + elif type == Schema.NUMBER: + return validateutils.validate_numeric(value) + elif type == Schema.BOOLEAN: + return validateutils.validate_boolean(value) + elif type == Schema.RANGE: + return validateutils.validate_range(value) + elif type == Schema.TIMESTAMP: + validateutils.validate_timestamp(value) + return value + elif type == Schema.LIST: + validateutils.validate_list(value) + if entry_schema: + DataEntity.validate_entry(value, entry_schema, custom_def) + return value + elif type == Schema.SCALAR_UNIT_SIZE: + return ScalarUnit_Size(value).validate_scalar_unit() + elif type == Schema.SCALAR_UNIT_FREQUENCY: + return ScalarUnit_Frequency(value).validate_scalar_unit() + elif type == Schema.SCALAR_UNIT_TIME: + return ScalarUnit_Time(value).validate_scalar_unit() + elif type == Schema.VERSION: + return validateutils.TOSCAVersionProperty(value).get_version() + elif type == Schema.MAP: + validateutils.validate_map(value) + if entry_schema: + DataEntity.validate_entry(value, entry_schema, custom_def) + return value + elif type == Schema.PORTSPEC: + # tODO(TBD) bug 1567063, validate source & target as PortDef type + # as complex types not just as integers + PortSpec.validate_additional_req(value, prop_name, custom_def) + else: + data = DataEntity(type, value, custom_def) + return data.validate() + + @staticmethod + def validate_entry(value, entry_schema, custom_def=None): + '''Validate entries for map and list.''' + schema = Schema(None, entry_schema) + valuelist = value + if isinstance(value, dict): + valuelist = list(value.values()) + for v in valuelist: + DataEntity.validate_datatype(schema.type, v, schema.entry_schema, + custom_def) + if schema.constraints: + for constraint in schema.constraints: + constraint.validate(v) + return value +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java.orig b/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java.orig new file mode 100644 index 0000000..c3f8fb5 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java.orig @@ -0,0 +1,453 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.List; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.*; +import org.openecomp.sdc.toscaparser.api.elements.constraints.Constraint; +import org.openecomp.sdc.toscaparser.api.elements.constraints.Schema; +import org.openecomp.sdc.toscaparser.api.functions.Function; +import org.openecomp.sdc.toscaparser.api.utils.TOSCAVersionProperty; +import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; + +public class DataEntity { + // A complex data value entity + + private LinkedHashMap customDef; + private DataType dataType; + private LinkedHashMap schema; + private Object value; + private String propertyName; + + public DataEntity(String _dataTypeName,Object _valueDict, + LinkedHashMap _customDef,String _propName) { + + customDef = _customDef; + dataType = new DataType(_dataTypeName,_customDef); + schema = dataType.getAllProperties(); + value = _valueDict; + propertyName = _propName; + } + + @SuppressWarnings("unchecked") + public Object validate() { + // Validate the value by the definition of the datatype + + // A datatype can not have both 'type' and 'properties' definitions. + // If the datatype has 'type' definition + if(dataType.getValueType() != null) { + value = DataEntity.validateDatatype(dataType.getValueType(),value,null,customDef,null); + Schema schemaCls = new Schema(propertyName,dataType.getDefs()); + for(Constraint constraint: schemaCls.getConstraints()) { + constraint.validate(value); + } + } + // If the datatype has 'properties' definition + else { + if(!(value instanceof LinkedHashMap)) { + //ERROR under investigation + ExceptionCollector.appendWarning(String.format( + "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"", + value.toString(),dataType.getType())); +<<<<<<< HEAD + return value; + } + LinkedHashMap valueDict = (LinkedHashMap)value; +======= + + if (value instanceof List) + value = ((List) value).get(0); + + if (!(value instanceof LinkedHashMap)) + return value; + } + + + + LinkedHashMap valueDict = (LinkedHashMap)value; +>>>>>>> master + ArrayList allowedProps = new ArrayList<>(); + ArrayList requiredProps = new ArrayList<>(); + LinkedHashMap defaultProps = new LinkedHashMap<>(); + if(schema != null) { + allowedProps.addAll(schema.keySet()); + for(String name: schema.keySet()) { + PropertyDef propDef = schema.get(name); + if(propDef.isRequired()) { + requiredProps.add(name); + } + if(propDef.getDefault() != null) { + defaultProps.put(name,propDef.getDefault()); + } + } + } + + // check allowed field + for(String valueKey: valueDict.keySet()) { + //1710 devlop JSON validation + if(!("json").equals(dataType.getType()) && !allowedProps.contains(valueKey)) { + ExceptionCollector.appendException(String.format( + "UnknownFieldError: Data value of type \"%s\" contains unknown field \"%s\"", + dataType.getType(),valueKey)); + } + } + + // check default field + for(String defKey: defaultProps.keySet()) { + Object defValue = defaultProps.get(defKey); + if(valueDict.get(defKey) == null) { + valueDict.put(defKey, defValue); + } + + } + + // check missing field + ArrayList missingProp = new ArrayList<>(); + for(String reqKey: requiredProps) { + if(!valueDict.keySet().contains(reqKey)) { + missingProp.add(reqKey); + } + } + if(missingProp.size() > 0) { + ExceptionCollector.appendWarning(String.format( + "MissingRequiredFieldError: Data value of type \"%s\" is missing required field(s) \"%s\"", + dataType.getType(),missingProp.toString())); + } + + // check every field + for(String vname: valueDict.keySet()) { + Object vvalue = valueDict.get(vname); + LinkedHashMap schemaName = _findSchema(vname); + if(schemaName == null) { + continue; + } + Schema propSchema = new Schema(vname,schemaName); + // check if field value meets type defined + DataEntity.validateDatatype(propSchema.getType(), + vvalue, + propSchema.getEntrySchema(), + customDef, + null); + + // check if field value meets constraints defined + if(propSchema.getConstraints() != null) { + for(Constraint constraint: propSchema.getConstraints()) { + if(vvalue instanceof ArrayList) { + for(Object val: (ArrayList)vvalue) { + constraint.validate(val); + } + } + else { + constraint.validate(vvalue); + } + } + } + } + } + return value; + } + + private LinkedHashMap _findSchema(String name) { + if(schema != null && schema.get(name) != null) { + return schema.get(name).getSchema(); + } + return null; + } + + public static Object validateDatatype(String type, + Object value, + LinkedHashMap entrySchema, + LinkedHashMap customDef, + String propName) { + // Validate value with given type + + // If type is list or map, validate its entry by entry_schema(if defined) + // If type is a user-defined complex datatype, custom_def is required. + + if(Function.isFunction(value)) { + return value; + } + else if (type == null) { + //NOT ANALYZED + ExceptionCollector.appendWarning(String.format( + "MissingType: Type is missing for value \"%s\"", + value.toString())); + return value; + } + else if(type.equals(Schema.STRING)) { + return ValidateUtils.validateString(value); + } + else if(type.equals(Schema.INTEGER)) { + return ValidateUtils.validateInteger(value); + } + else if(type.equals(Schema.FLOAT)) { + return ValidateUtils.validateFloat(value); + } + else if(type.equals(Schema.NUMBER)) { + return ValidateUtils.validateNumeric(value); + } + else if(type.equals(Schema.BOOLEAN)) { + return ValidateUtils.validateBoolean(value); + } + else if(type.equals(Schema.RANGE)) { + return ValidateUtils.validateRange(value); + } + else if(type.equals(Schema.TIMESTAMP)) { + ValidateUtils.validateTimestamp(value); + return value; + } + else if(type.equals(Schema.LIST)) { + ValidateUtils.validateList(value); + if(entrySchema != null) { + DataEntity.validateEntry(value,entrySchema,customDef); + } + return value; + } + else if(type.equals(Schema.SCALAR_UNIT_SIZE)) { + return (new ScalarUnitSize(value)).validateScalarUnit(); + } + else if(type.equals(Schema.SCALAR_UNIT_FREQUENCY)) { + return (new ScalarUnitFrequency(value)).validateScalarUnit(); + } + else if(type.equals(Schema.SCALAR_UNIT_TIME)) { + return (new ScalarUnitTime(value)).validateScalarUnit(); + } + else if(type.equals(Schema.VERSION)) { + return (new TOSCAVersionProperty(value)).getVersion(); + } + else if(type.equals(Schema.MAP)) { + ValidateUtils.validateMap(value); + if(entrySchema != null) { + DataEntity.validateEntry(value,entrySchema,customDef); + } + return value; + } + else if(type.equals(Schema.PORTSPEC)) { + // tODO(TBD) bug 1567063, validate source & target as PortDef type + // as complex types not just as integers + PortSpec.validateAdditionalReq(value,propName,customDef); + } + else { + DataEntity data = new DataEntity(type,value,customDef,null); + return data.validate(); + } + + return value; + } + + @SuppressWarnings("unchecked") + public static Object validateEntry(Object value, + LinkedHashMap entrySchema, + LinkedHashMap customDef) { + + // Validate entries for map and list + Schema schema = new Schema(null,entrySchema); + Object valueob = value; + ArrayList valueList = null; + if(valueob instanceof LinkedHashMap) { + valueList = new ArrayList(((LinkedHashMap)valueob).values()); + } + else if(valueob instanceof ArrayList) { + valueList = (ArrayList)valueob; + } + if(valueList != null) { + for(Object v: valueList) { + DataEntity.validateDatatype(schema.getType(),v,schema.getEntrySchema(),customDef,null); + if(schema.getConstraints() != null) { + for(Constraint constraint: schema.getConstraints()) { + constraint.validate(v); + } + } + } + } + return value; + } + + @Override + public String toString() { + return "DataEntity{" + + "customDef=" + customDef + + ", dataType=" + dataType + + ", schema=" + schema + + ", value=" + value + + ", propertyName='" + propertyName + '\'' + + '}'; + } +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import TypeMismatchError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.constraints import Schema +from toscaparser.elements.datatype import DataType +from toscaparser.elements.portspectype import PortSpec +from toscaparser.elements.scalarunit import ScalarUnit_Frequency +from toscaparser.elements.scalarunit import ScalarUnit_Size +from toscaparser.elements.scalarunit import ScalarUnit_Time +from toscaparser.utils.gettextutils import _ +from toscaparser.utils import validateutils + + +class DataEntity(object): + '''A complex data value entity.''' + + def __init__(self, datatypename, value_dict, custom_def=None, + prop_name=None): + self.custom_def = custom_def + self.datatype = DataType(datatypename, custom_def) + self.schema = self.datatype.get_all_properties() + self.value = value_dict + self.property_name = prop_name + + def validate(self): + '''Validate the value by the definition of the datatype.''' + + # A datatype can not have both 'type' and 'properties' definitions. + # If the datatype has 'type' definition + if self.datatype.value_type: + self.value = DataEntity.validate_datatype(self.datatype.value_type, + self.value, + None, + self.custom_def) + schema = Schema(self.property_name, self.datatype.defs) + for constraint in schema.constraints: + constraint.validate(self.value) + # If the datatype has 'properties' definition + else: + if not isinstance(self.value, dict): + ExceptionCollector.appendException( + TypeMismatchError(what=self.value, + type=self.datatype.type)) + allowed_props = [] + required_props = [] + default_props = {} + if self.schema: + allowed_props = self.schema.keys() + for name, prop_def in self.schema.items(): + if prop_def.required: + required_props.append(name) + if prop_def.default: + default_props[name] = prop_def.default + + # check allowed field + for value_key in list(self.value.keys()): + if value_key not in allowed_props: + ExceptionCollector.appendException( + UnknownFieldError(what=(_('Data value of type "%s"') + % self.datatype.type), + field=value_key)) + + # check default field + for def_key, def_value in list(default_props.items()): + if def_key not in list(self.value.keys()): + self.value[def_key] = def_value + + # check missing field + missingprop = [] + for req_key in required_props: + if req_key not in list(self.value.keys()): + missingprop.append(req_key) + if missingprop: + ExceptionCollector.appendException( + MissingRequiredFieldError( + what=(_('Data value of type "%s"') + % self.datatype.type), required=missingprop)) + + # check every field + for name, value in list(self.value.items()): + schema_name = self._find_schema(name) + if not schema_name: + continue + prop_schema = Schema(name, schema_name) + # check if field value meets type defined + DataEntity.validate_datatype(prop_schema.type, value, + prop_schema.entry_schema, + self.custom_def) + # check if field value meets constraints defined + if prop_schema.constraints: + for constraint in prop_schema.constraints: + if isinstance(value, list): + for val in value: + constraint.validate(val) + else: + constraint.validate(value) + + return self.value + + def _find_schema(self, name): + if self.schema and name in self.schema.keys(): + return self.schema[name].schema + + @staticmethod + def validate_datatype(type, value, entry_schema=None, custom_def=None, + prop_name=None): + '''Validate value with given type. + + If type is list or map, validate its entry by entry_schema(if defined) + If type is a user-defined complex datatype, custom_def is required. + ''' + from toscaparser.functions import is_function + if is_function(value): + return value + if type == Schema.STRING: + return validateutils.validate_string(value) + elif type == Schema.INTEGER: + return validateutils.validate_integer(value) + elif type == Schema.FLOAT: + return validateutils.validate_float(value) + elif type == Schema.NUMBER: + return validateutils.validate_numeric(value) + elif type == Schema.BOOLEAN: + return validateutils.validate_boolean(value) + elif type == Schema.RANGE: + return validateutils.validate_range(value) + elif type == Schema.TIMESTAMP: + validateutils.validate_timestamp(value) + return value + elif type == Schema.LIST: + validateutils.validate_list(value) + if entry_schema: + DataEntity.validate_entry(value, entry_schema, custom_def) + return value + elif type == Schema.SCALAR_UNIT_SIZE: + return ScalarUnit_Size(value).validate_scalar_unit() + elif type == Schema.SCALAR_UNIT_FREQUENCY: + return ScalarUnit_Frequency(value).validate_scalar_unit() + elif type == Schema.SCALAR_UNIT_TIME: + return ScalarUnit_Time(value).validate_scalar_unit() + elif type == Schema.VERSION: + return validateutils.TOSCAVersionProperty(value).get_version() + elif type == Schema.MAP: + validateutils.validate_map(value) + if entry_schema: + DataEntity.validate_entry(value, entry_schema, custom_def) + return value + elif type == Schema.PORTSPEC: + # tODO(TBD) bug 1567063, validate source & target as PortDef type + # as complex types not just as integers + PortSpec.validate_additional_req(value, prop_name, custom_def) + else: + data = DataEntity(type, value, custom_def) + return data.validate() + + @staticmethod + def validate_entry(value, entry_schema, custom_def=None): + '''Validate entries for map and list.''' + schema = Schema(None, entry_schema) + valuelist = value + if isinstance(value, dict): + valuelist = list(value.values()) + for v in valuelist: + DataEntity.validate_datatype(schema.type, v, schema.entry_schema, + custom_def) + if schema.constraints: + for constraint in schema.constraints: + constraint.validate(v) + return value +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java new file mode 100644 index 0000000..e896905 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java @@ -0,0 +1,832 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.*; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public abstract class EntityTemplate { + // Base class for TOSCA templates + + protected static final String DERIVED_FROM = "derived_from"; + protected static final String PROPERTIES = "properties"; + protected static final String REQUIREMENTS = "requirements"; + protected static final String INTERFACES = "interfaces"; + protected static final String CAPABILITIES = "capabilities"; + protected static final String TYPE = "type"; + protected static final String DESCRIPTION = "description"; + protected static final String DIRECTIVES = "directives"; + protected static final String ATTRIBUTES = "attributes"; + protected static final String ARTIFACTS = "artifacts"; + protected static final String NODE_FILTER = "node_filter"; + protected static final String COPY = "copy"; + + protected static final String SECTIONS[] = { + DERIVED_FROM, PROPERTIES, REQUIREMENTS,INTERFACES, + CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES, + ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY}; + + private static final String NODE = "node"; + private static final String CAPABILITY = "capability"; + private static final String RELATIONSHIP = "relationship"; + private static final String OCCURRENCES = "occurrences"; + + protected static final String REQUIREMENTS_SECTION[] = { + NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER}; + + //# Special key names + private static final String METADATA = "metadata"; + protected static final String SPECIAL_SECTIONS[] = {METADATA}; + + protected String name; + protected LinkedHashMap entityTpl; + protected LinkedHashMap customDef; + protected StatefulEntityType typeDefinition; + private ArrayList _properties; + private ArrayList _interfaces; + private ArrayList _requirements; + private ArrayList _capabilities; + + // dummy constructor for subclasses that don't want super + public EntityTemplate() { + return; + } + + @SuppressWarnings("unchecked") + public EntityTemplate(String _name, + LinkedHashMap _template, + String _entityName, + LinkedHashMap _customDef) { + name = _name; + entityTpl = _template; + customDef = _customDef; + _validateField(entityTpl); + String type = (String)entityTpl.get("type"); + UnsupportedType.validateType(type); + if(_entityName.equals("node_type")) { + if(type != null) { + typeDefinition = new NodeType(type, customDef); + } + else { + typeDefinition = null; + } + } + if(_entityName.equals("relationship_type")) { + Object relationship = _template.get("relationship"); + type = null; + if(relationship != null && relationship instanceof LinkedHashMap) { + type = (String)((LinkedHashMap)relationship).get("type"); + } + else if(relationship instanceof String) { + type = (String)entityTpl.get("relationship"); + } + else { + type = (String)entityTpl.get("type"); + } + UnsupportedType.validateType(type); + typeDefinition = new RelationshipType(type,null, customDef); + } + if(_entityName.equals("policy_type")) { + if(type == null) { + //msg = (_('Policy definition of "%(pname)s" must have' + // ' a "type" ''attribute.') % dict(pname=name)) + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValidationError: Policy definition of \"%s\" must have a \"type\" attribute",name)); + } + typeDefinition = new PolicyType(type, customDef); + } + if(_entityName.equals("group_type")) { + if(type != null) { + typeDefinition = new GroupType(type, customDef); + } + else { + typeDefinition = null; + } + } + _properties = null; + _interfaces = null; + _requirements = null; + _capabilities = null; + } + + public String getType() { + if(typeDefinition != null) { + String clType = typeDefinition.getClass().getSimpleName(); + if(clType.equals("NodeType")) { + return (String)((NodeType)typeDefinition).getType(); + } + else if(clType.equals("PolicyType")) { + return (String)((PolicyType)typeDefinition).getType(); + } + else if(clType.equals("GroupType")) { + return (String)((GroupType)typeDefinition).getType(); + } + else if(clType.equals("RelationshipType")) { + return (String)((RelationshipType)typeDefinition).getType(); + } + } + return null; + } + + public Object getParentType() { + if(typeDefinition != null) { + String clType = typeDefinition.getClass().getSimpleName(); + if(clType.equals("NodeType")) { + return ((NodeType)typeDefinition).getParentType(); + } + else if(clType.equals("PolicyType")) { + return ((PolicyType)typeDefinition).getParentType(); + } + else if(clType.equals("GroupType")) { + return ((GroupType)typeDefinition).getParentType(); + } + else if(clType.equals("RelationshipType")) { + return ((RelationshipType)typeDefinition).getParentType(); + } + } + return null; + } + + @SuppressWarnings("unchecked") + public ArrayList getRequirements() { + if(_requirements == null) { + _requirements = new ArrayList(); + Object ob = ((EntityType)typeDefinition).getValue(REQUIREMENTS,entityTpl,false); + if(ob != null) { + _requirements.addAll((ArrayList)ob); + } + + } + return _requirements; + } + + public ArrayList getPropertiesObjects() { + // Return properties objects for this template + if(_properties ==null) { + _properties = _createProperties(); + } + return _properties; + } + + public LinkedHashMap getProperties() { + LinkedHashMap props = new LinkedHashMap<>(); + for(Property po: getPropertiesObjects()) { + props.put(((Property)po).getName(),po); + } + return props; + } + + public Object getPropertyValue(String name) { + LinkedHashMap props = getProperties(); + Property p = (Property)props.get(name); + return p != null ? p.getValue() : null; + } + + public ArrayList getInterfaces() { + if(_interfaces == null) { + _interfaces = _createInterfaces(); + } + return _interfaces; + } + + public ArrayList getCapabilitiesObjects() { + // Return capabilities objects for this template + if(_capabilities == null) { + _capabilities = _createCapabilities(); + } + return _capabilities; + + } + + public LinkedHashMap getCapabilities() { + LinkedHashMap caps = new LinkedHashMap(); + for(Capability cap: getCapabilitiesObjects()) { + caps.put(cap.getName(),cap); + } + return caps; + } + + public boolean isDerivedFrom(String typeStr) { + // Returns true if this object is derived from 'type_str'. + // False otherwise + + if(getType() == null) { + return false; + } + else if(getType().equals(typeStr)) { + return true; + } + else if(getParentType() != null) { + return ((EntityType)getParentType()).isDerivedFrom(typeStr); + } + return false; + } + + @SuppressWarnings("unchecked") + private ArrayList _createCapabilities() { + ArrayList capability = new ArrayList(); + LinkedHashMap caps = (LinkedHashMap) + ((EntityType)typeDefinition).getValue(CAPABILITIES,entityTpl,true); + if(caps != null) { + //?!? getCapabilities defined only for NodeType... + LinkedHashMap capabilities = ((NodeType)typeDefinition).getCapabilities(); + for(Map.Entry me: caps.entrySet()) { + String name = me. getKey(); + LinkedHashMap props = (LinkedHashMap)me.getValue(); + if(capabilities.get(name) != null) { + CapabilityTypeDef c = capabilities.get(name); // a CapabilityTypeDef + LinkedHashMap properties = new LinkedHashMap(); + // first use the definition default value + LinkedHashMap cprops = c.getProperties(); + if(cprops != null) { + for(Map.Entry cpe: cprops.entrySet()) { + String propertyName = cpe.getKey(); + LinkedHashMap propertyDef = (LinkedHashMap)cpe.getValue(); + Object dob = propertyDef.get("default"); + if(dob != null) { + properties.put(propertyName, dob); + + } + } + } + // then update (if available) with the node properties + LinkedHashMap pp = (LinkedHashMap)props.get("properties"); + if(pp != null) { + properties.putAll(pp); + } + Capability cap = new Capability(name, properties, c); + capability.add(cap); + } + } + } + return capability; + } + + protected void _validateProperties(LinkedHashMap template,StatefulEntityType entityType) { + @SuppressWarnings("unchecked") + LinkedHashMap properties = (LinkedHashMap)entityType.getValue(PROPERTIES,template,false); + _commonValidateProperties(entityType,properties); + } + + protected void _validateCapabilities() { + //BUG??? getCapabilities only defined in NodeType... + LinkedHashMap typeCapabilities = ((NodeType)typeDefinition).getCapabilities(); + ArrayList allowedCaps = new ArrayList(); + if(typeCapabilities != null) { + allowedCaps.addAll(typeCapabilities.keySet()); + } + @SuppressWarnings("unchecked") + LinkedHashMap capabilities = (LinkedHashMap) + ((EntityType)typeDefinition).getValue(CAPABILITIES, entityTpl, false); + if(capabilities != null) { + _commonValidateField(capabilities, allowedCaps, "capabilities"); + _validateCapabilitiesProperties(capabilities); + } + } + + @SuppressWarnings("unchecked") + private void _validateCapabilitiesProperties(LinkedHashMap capabilities) { + for(Map.Entry me: capabilities.entrySet()) { + String cap = me.getKey(); + LinkedHashMap props = (LinkedHashMap)me.getValue(); + Capability capability = getCapability(cap); + if(capability == null) { + continue; + } + CapabilityTypeDef capabilitydef = capability.getDefinition(); + _commonValidateProperties(capabilitydef,(LinkedHashMap)props.get(PROPERTIES)); + + // validating capability properties values + for(Property prop: getCapability(cap).getPropertiesObjects()) { + prop.validate(); + + if(cap.equals("scalable") && prop.getName().equals("default_instances")) { + LinkedHashMap propDict = (LinkedHashMap)props.get(PROPERTIES); + int minInstances = (int)propDict.get("min_instances"); + int maxInstances = (int)propDict.get("max_instances"); + int defaultInstances = (int)propDict.get("default_instances"); + if(defaultInstances < minInstances || defaultInstances > maxInstances) { + //err_msg = ('"properties" of template "%s": ' + // '"default_instances" value is not between ' + // '"min_instances" and "max_instances".' % + // self.name) + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValidationError: \"properties\" of template \"%s\": \"default_instances\" value is not between \"min_instances\" and \"max_instances\"", + name)); + } + } + } + } + } + + private void _commonValidateProperties(StatefulEntityType entityType,LinkedHashMap properties) { + ArrayList allowedProps = new ArrayList(); + ArrayList requiredProps = new ArrayList(); + for(PropertyDef p: entityType.getPropertiesDefObjects()) { + allowedProps.add(p.getName()); + // If property is 'required' and has no 'default' value then record + if(p.isRequired() && p.getDefault() == null) { + requiredProps.add(p.getName()); + } + } + // validate all required properties have values + if(properties != null) { + ArrayList reqPropsNoValueOrDefault = new ArrayList(); + _commonValidateField(properties, allowedProps, "properties"); + // make sure it's not missing any property required by a tosca type + for(String r: requiredProps) { + if(properties.get(r) == null) { + reqPropsNoValueOrDefault.add(r); + } + } + // Required properties found without value or a default value + if(!reqPropsNoValueOrDefault.isEmpty()) { + ThreadLocalsHolder.getCollector().appendWarning(String.format( + "MissingRequiredFieldError: properties of template \"%s\" are missing field(s): %s", + name,reqPropsNoValueOrDefault.toString())); + } + } + else { + // Required properties in schema, but not in template + if(!requiredProps.isEmpty()) { + ThreadLocalsHolder.getCollector().appendWarning(String.format( + "MissingRequiredFieldError2: properties of template \"%s\" are missing field(s): %s", + name,requiredProps.toString())); + } + } + } + + @SuppressWarnings("unchecked") + private void _validateField(LinkedHashMap template) { + if(!(template instanceof LinkedHashMap)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE)); + return;//??? + } + boolean bBad = false; + Object relationship = ((LinkedHashMap)template).get("relationship"); + if(relationship != null) { + if(!(relationship instanceof String)) { + bBad = (((LinkedHashMap)relationship).get(TYPE) == null); + } + else if(relationship instanceof String) { + bBad = (template.get("relationship") == null); + } + } + else { + bBad = (template.get(TYPE) == null); + } + if(bBad) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE)); + } + } + + protected void _commonValidateField(LinkedHashMap schema, ArrayList allowedList,String section) { + for(String sname: schema.keySet()) { + boolean bFound = false; + for(String allowed: allowedList) { + if(sname.equals(allowed)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: Section \"%s\" of template \"%s\" contains unknown field \"%s\"",section,name,sname)); + } + } + + } + + @SuppressWarnings("unchecked") + private ArrayList _createProperties() { + ArrayList props = new ArrayList(); + LinkedHashMap properties = (LinkedHashMap) + ((EntityType)typeDefinition).getValue(PROPERTIES,entityTpl,false); + if(properties == null) { + properties = new LinkedHashMap(); + } + for(Map.Entry me: properties.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + LinkedHashMap propsDef = ((StatefulEntityType)typeDefinition).getPropertiesDef(); + if(propsDef != null && propsDef.get(pname) != null) { + PropertyDef pd = (PropertyDef)propsDef.get(pname); + Property prop = new Property(pname,pvalue,pd.getSchema(),customDef); + props.add(prop); + } + } + ArrayList pds = ((StatefulEntityType)typeDefinition).getPropertiesDefObjects(); + for(Object pdo: pds) { + PropertyDef pd = (PropertyDef)pdo; + if(pd.getDefault() != null && properties.get(pd.getName()) == null) { + Property prop = new Property(pd.getName(),pd.getDefault(),pd.getSchema(),customDef); + props.add(prop); + } + } + return props; + } + + @SuppressWarnings("unchecked") + private ArrayList _createInterfaces() { + ArrayList interfaces = new ArrayList<>(); + LinkedHashMap typeInterfaces = new LinkedHashMap(); + if(typeDefinition instanceof RelationshipType) { + if(entityTpl instanceof LinkedHashMap) { + typeInterfaces = (LinkedHashMap)entityTpl.get(INTERFACES); + if(typeInterfaces == null) { + for(String relName: entityTpl.keySet()) { + Object relValue = entityTpl.get(relName); + if(!relName.equals("type")) { + Object relDef = relValue; + LinkedHashMap rel = null; + if(relDef instanceof LinkedHashMap) { + Object relob = ((LinkedHashMap)relDef).get("relationship"); + if(relob instanceof LinkedHashMap) { + rel = (LinkedHashMap)relob; + } + } + if(rel != null) { + if(rel.get(INTERFACES) != null) { + typeInterfaces = (LinkedHashMap)rel.get(INTERFACES); + break; + } + } + } + } + } + } + } + else { + typeInterfaces = (LinkedHashMap) + ((EntityType)typeDefinition).getValue(INTERFACES,entityTpl,false); + } + if(typeInterfaces != null) { + for(Map.Entry me: typeInterfaces.entrySet()) { + String interfaceType = me.getKey(); + LinkedHashMap value = (LinkedHashMap)me.getValue(); + for(Map.Entry ve: value.entrySet()) { + String op = ve.getKey(); + Object opDef = ve.getValue(); + InterfacesDef iface = new InterfacesDef((EntityType)typeDefinition, + interfaceType, + this, + op, + opDef); + interfaces.add(iface); + } + + } + } + return interfaces; + } + + public Capability getCapability(String name) { + // Provide named capability + // :param name: name of capability + // :return: capability object if found, None otherwise + LinkedHashMap caps = getCapabilities(); + if(caps != null) { + return caps.get(name); + } + return null; + } + + // getter + public String getName() { + return name; + } + + public StatefulEntityType getTypeDefinition() { + return typeDefinition; + } + + public LinkedHashMap getCustomDef() { + return customDef; + } + + @Override + public String toString() { + return "EntityTemplate{" + + "name='" + name + '\'' + + ", entityTpl=" + entityTpl + + ", customDef=" + customDef + + ", typeDefinition=" + typeDefinition + + ", _properties=" + _properties + + ", _interfaces=" + _interfaces + + ", _requirements=" + _requirements + + ", _capabilities=" + _capabilities + + '}'; + } +} + +/*python + +class EntityTemplate(object): + '''Base class for TOSCA templates.''' + + SECTIONS = (DERIVED_FROM, PROPERTIES, REQUIREMENTS, + INTERFACES, CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES, + ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY) = \ + ('derived_from', 'properties', 'requirements', 'interfaces', + 'capabilities', 'type', 'description', 'directives', + 'attributes', 'artifacts', 'node_filter', 'copy') + REQUIREMENTS_SECTION = (NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER) = \ + ('node', 'capability', 'relationship', + 'occurrences', 'node_filter') + # Special key names + SPECIAL_SECTIONS = (METADATA) = ('metadata') + + def __init__(self, name, template, entity_name, custom_def=None): + self.name = name + self.entity_tpl = template + self.custom_def = custom_def + self._validate_field(self.entity_tpl) + type = self.entity_tpl.get('type') + UnsupportedType.validate_type(type) + if entity_name == 'node_type': + self.type_definition = NodeType(type, custom_def) \ + if type is not None else None + if entity_name == 'relationship_type': + relationship = template.get('relationship') + type = None + if relationship and isinstance(relationship, dict): + type = relationship.get('type') + elif isinstance(relationship, str): + type = self.entity_tpl['relationship'] + else: + type = self.entity_tpl['type'] + UnsupportedType.validate_type(type) + self.type_definition = RelationshipType(type, + None, custom_def) + if entity_name == 'policy_type': + if not type: + msg = (_('Policy definition of "%(pname)s" must have' + ' a "type" ''attribute.') % dict(pname=name)) + ExceptionCollector.appendException( + ValidationError(msg)) + + self.type_definition = PolicyType(type, custom_def) + if entity_name == 'group_type': + self.type_definition = GroupType(type, custom_def) \ + if type is not None else None + self._properties = None + self._interfaces = None + self._requirements = None + self._capabilities = None + + @property + def type(self): + if self.type_definition: + return self.type_definition.type + + @property + def parent_type(self): + if self.type_definition: + return self.type_definition.parent_type + + @property + def requirements(self): + if self._requirements is None: + self._requirements = self.type_definition.get_value( + self.REQUIREMENTS, + self.entity_tpl) or [] + return self._requirements + + def get_properties_objects(self): + '''Return properties objects for this template.''' + if self._properties is None: + self._properties = self._create_properties() + return self._properties + + def get_properties(self): + '''Return a dictionary of property name-object pairs.''' + return {prop.name: prop + for prop in self.get_properties_objects()} + + def get_property_value(self, name): + '''Return the value of a given property name.''' + props = self.get_properties() + if props and name in props.keys(): + return props[name].value + + @property + def interfaces(self): + if self._interfaces is None: + self._interfaces = self._create_interfaces() + return self._interfaces + + def get_capabilities_objects(self): + '''Return capabilities objects for this template.''' + if not self._capabilities: + self._capabilities = self._create_capabilities() + return self._capabilities + + def get_capabilities(self): + '''Return a dictionary of capability name-object pairs.''' + return {cap.name: cap + for cap in self.get_capabilities_objects()} + + def is_derived_from(self, type_str): + '''Check if object inherits from the given type. + + Returns true if this object is derived from 'type_str'. + False otherwise. + ''' + if not self.type: + return False + elif self.type == type_str: + return True + elif self.parent_type: + return self.parent_type.is_derived_from(type_str) + else: + return False + + def _create_capabilities(self): + capability = [] + caps = self.type_definition.get_value(self.CAPABILITIES, + self.entity_tpl, True) + if caps: + for name, props in caps.items(): + capabilities = self.type_definition.get_capabilities() + if name in capabilities.keys(): + c = capabilities[name] + properties = {} + # first use the definition default value + if c.properties: + for property_name in c.properties.keys(): + prop_def = c.properties[property_name] + if 'default' in prop_def: + properties[property_name] = prop_def['default'] + # then update (if available) with the node properties + if 'properties' in props and props['properties']: + properties.update(props['properties']) + + cap = Capability(name, properties, c) + capability.append(cap) + return capability + + def _validate_properties(self, template, entitytype): + properties = entitytype.get_value(self.PROPERTIES, template) + self._common_validate_properties(entitytype, properties) + + def _validate_capabilities(self): + type_capabilities = self.type_definition.get_capabilities() + allowed_caps = \ + type_capabilities.keys() if type_capabilities else [] + capabilities = self.type_definition.get_value(self.CAPABILITIES, + self.entity_tpl) + if capabilities: + self._common_validate_field(capabilities, allowed_caps, + 'capabilities') + self._validate_capabilities_properties(capabilities) + + def _validate_capabilities_properties(self, capabilities): + for cap, props in capabilities.items(): + capability = self.get_capability(cap) + if not capability: + continue + capabilitydef = capability.definition + self._common_validate_properties(capabilitydef, + props[self.PROPERTIES]) + + # validating capability properties values + for prop in self.get_capability(cap).get_properties_objects(): + prop.validate() + + # tODO(srinivas_tadepalli): temporary work around to validate + # default_instances until standardized in specification + if cap == "scalable" and prop.name == "default_instances": + prop_dict = props[self.PROPERTIES] + min_instances = prop_dict.get("min_instances") + max_instances = prop_dict.get("max_instances") + default_instances = prop_dict.get("default_instances") + if not (min_instances <= default_instances + <= max_instances): + err_msg = ('"properties" of template "%s": ' + '"default_instances" value is not between ' + '"min_instances" and "max_instances".' % + self.name) + ExceptionCollector.appendException( + ValidationError(message=err_msg)) + + def _common_validate_properties(self, entitytype, properties): + allowed_props = [] + required_props = [] + for p in entitytype.get_properties_def_objects(): + allowed_props.append(p.name) + # If property is 'required' and has no 'default' value then record + if p.required and p.default is None: + required_props.append(p.name) + # validate all required properties have values + if properties: + req_props_no_value_or_default = [] + self._common_validate_field(properties, allowed_props, + 'properties') + # make sure it's not missing any property required by a tosca type + for r in required_props: + if r not in properties.keys(): + req_props_no_value_or_default.append(r) + # Required properties found without value or a default value + if req_props_no_value_or_default: + ExceptionCollector.appendException( + MissingRequiredFieldError( + what='"properties" of template "%s"' % self.name, + required=req_props_no_value_or_default)) + else: + # Required properties in schema, but not in template + if required_props: + ExceptionCollector.appendException( + MissingRequiredFieldError( + what='"properties" of template "%s"' % self.name, + required=required_props)) + + def _validate_field(self, template): + if not isinstance(template, dict): + ExceptionCollector.appendException( + MissingRequiredFieldError( + what='Template "%s"' % self.name, required=self.TYPE)) + try: + relationship = template.get('relationship') + if relationship and not isinstance(relationship, str): + relationship[self.TYPE] + elif isinstance(relationship, str): + template['relationship'] + else: + template[self.TYPE] + except KeyError: + ExceptionCollector.appendException( + MissingRequiredFieldError( + what='Template "%s"' % self.name, required=self.TYPE)) + + def _common_validate_field(self, schema, allowedlist, section): + for name in schema: + if name not in allowedlist: + ExceptionCollector.appendException( + UnknownFieldError( + what=('"%(section)s" of template "%(nodename)s"' + % {'section': section, 'nodename': self.name}), + field=name)) + + def _create_properties(self): + props = [] + properties = self.type_definition.get_value(self.PROPERTIES, + self.entity_tpl) or {} + for name, value in properties.items(): + props_def = self.type_definition.get_properties_def() + if props_def and name in props_def: + prop = Property(name, value, + props_def[name].schema, self.custom_def) + props.append(prop) + for p in self.type_definition.get_properties_def_objects(): + if p.default is not None and p.name not in properties.keys(): + prop = Property(p.name, p.default, p.schema, self.custom_def) + props.append(prop) + return props + + def _create_interfaces(self): + interfaces = [] + type_interfaces = None + if isinstance(self.type_definition, RelationshipType): + if isinstance(self.entity_tpl, dict): + if self.INTERFACES in self.entity_tpl: + type_interfaces = self.entity_tpl[self.INTERFACES] + else: + for rel_def, value in self.entity_tpl.items(): + if rel_def != 'type': + rel_def = self.entity_tpl.get(rel_def) + rel = None + if isinstance(rel_def, dict): + rel = rel_def.get('relationship') + if rel: + if self.INTERFACES in rel: + type_interfaces = rel[self.INTERFACES] + break + else: + type_interfaces = self.type_definition.get_value(self.INTERFACES, + self.entity_tpl) + if type_interfaces: + for interface_type, value in type_interfaces.items(): + for op, op_def in value.items(): + iface = InterfacesDef(self.type_definition, + interfacetype=interface_type, + node_template=self, + name=op, + value=op_def) + interfaces.append(iface) + return interfaces + + def get_capability(self, name): + """Provide named capability + + :param name: name of capability + :return: capability object if found, None otherwise + """ + caps = self.get_capabilities() + if caps and name in caps.keys(): + return caps[name] +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Group.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Group.java new file mode 100644 index 0000000..8ed623f --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/Group.java @@ -0,0 +1,137 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.Metadata; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; + +public class Group extends EntityTemplate { + + private static final String TYPE = "type"; + private static final String METADATA = "metadata"; + private static final String DESCRIPTION = "description"; + private static final String PROPERTIES = "properties"; + private static final String MEMBERS = "members"; + private static final String INTERFACES = "interfaces"; + private static final String SECTIONS[] = { + TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; + + private String name; + LinkedHashMap tpl; + ArrayList memberNodes; + LinkedHashMap customDef; + Metadata metaData; + + + public Group(String _name, LinkedHashMap _templates, + ArrayList _memberNodes, + LinkedHashMap _customDef) { + super(_name, _templates, "group_type", _customDef); + + name = _name; + tpl = _templates; + if(tpl.get(METADATA) != null) { + Object metadataObject = tpl.get(METADATA); + ValidateUtils.validateMap(metadataObject); + metaData = new Metadata((Map)metadataObject); + } + memberNodes = _memberNodes; + _validateKeys(); + } + + public Metadata getMetadata() { + return metaData; + } + + public ArrayList getMembers() { + return (ArrayList)entityTpl.get("members"); + } + + public String getDescription() { + return (String)entityTpl.get("description"); + + } + + public ArrayList getMemberNodes() { + return memberNodes; + } + + private void _validateKeys() { + for(String key: entityTpl.keySet()) { + boolean bFound = false; + for(String sect: SECTIONS) { + if(key.equals(sect)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: Groups \"%s\" contains unknown field \"%s\"", + name,key)); + } + } + } + + @Override + public String toString() { + return "Group{" + + "name='" + name + '\'' + + ", tpl=" + tpl + + ", memberNodes=" + memberNodes + + ", customDef=" + customDef + + ", metaData=" + metaData + + '}'; + } +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import UnknownFieldError +from toscaparser.entity_template import EntityTemplate +from toscaparser.utils import validateutils + +SECTIONS = (TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES) = \ + ('type', 'metadata', 'description', + 'properties', 'members', 'interfaces') + + +class Group(EntityTemplate): + + def __init__(self, name, group_templates, member_nodes, custom_defs=None): + super(Group, self).__init__(name, + group_templates, + 'group_type', + custom_defs) + self.name = name + self.tpl = group_templates + self.meta_data = None + if self.METADATA in self.tpl: + self.meta_data = self.tpl.get(self.METADATA) + validateutils.validate_map(self.meta_data) + self.member_nodes = member_nodes + self._validate_keys() + + @property + def members(self): + return self.entity_tpl.get('members') + + @property + def description(self): + return self.entity_tpl.get('description') + + def get_member_nodes(self): + return self.member_nodes + + def _validate_keys(self): + for key in self.entity_tpl.keys(): + if key not in SECTIONS: + ExceptionCollector.appendException( + UnknownFieldError(what='Groups "%s"' % self.name, + field=key)) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java new file mode 100644 index 0000000..a97a360 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java @@ -0,0 +1,728 @@ +package org.openecomp.sdc.toscaparser.api; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.TypeValidation; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.openecomp.sdc.toscaparser.api.utils.UrlUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.Yaml; + +import java.io.*; +import java.net.URL; +import java.nio.file.Paths; +import java.util.*; + +public class ImportsLoader { + + private static Logger log = LoggerFactory.getLogger(ImportsLoader.class.getName()); + private static final String FILE = "file"; + private static final String REPOSITORY = "repository"; + private static final String NAMESPACE_URI = "namespace_uri"; + private static final String NAMESPACE_PREFIX = "namespace_prefix"; + private String IMPORTS_SECTION[] = {FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX}; + + private ArrayList importslist; + private String path; + private ArrayList typeDefinitionList; + + private LinkedHashMap customDefs; + private ArrayList> nestedToscaTpls; + private LinkedHashMap repositories; + + @SuppressWarnings("unchecked") + public ImportsLoader(ArrayList_importslist, + String _path, + Object _typeDefinitionList, + LinkedHashMap tpl) { + + this.importslist = _importslist; + customDefs = new LinkedHashMap(); + nestedToscaTpls = new ArrayList>(); + if((_path == null || _path.isEmpty()) && tpl == null) { + //msg = _('Input tosca template is not provided.') + //log.warning(msg) + ThreadLocalsHolder.getCollector().appendException("ValidationError: Input tosca template is not provided"); + } + + this.path = _path; + this.repositories = new LinkedHashMap(); + + if(tpl != null && tpl.get("repositories") != null) { + this.repositories = (LinkedHashMap)tpl.get("repositories"); + } + this.typeDefinitionList = new ArrayList(); + if(_typeDefinitionList != null) { + if(_typeDefinitionList instanceof ArrayList) { + this.typeDefinitionList = (ArrayList)_typeDefinitionList; + } + else { + this.typeDefinitionList.add((String)_typeDefinitionList); + } + } + _validateAndLoadImports(); + } + + public LinkedHashMap getCustomDefs() { + return customDefs; + } + + public ArrayList> getNestedToscaTpls() { + return nestedToscaTpls; + } + + @SuppressWarnings({ "unchecked", "unused" }) + public void _validateAndLoadImports() { + Set importNames = new HashSet(); + + if(importslist == null) { + //msg = _('"imports" keyname is defined without including templates.') + //log.error(msg) + ThreadLocalsHolder.getCollector().appendException( + "ValidationError: \"imports\" keyname is defined without including templates"); + return; + } + + for(Object importDef: importslist) { + String fullFileName = null; + LinkedHashMap customType = null; + if(importDef instanceof LinkedHashMap) { + for(Map.Entry me: ((LinkedHashMap)importDef).entrySet()) { + String importName = me.getKey(); + Object importUri = me.getValue(); + if(importNames.contains(importName)) { + //msg = (_('Duplicate import name "%s" was found.') % import_name) + //log.error(msg) + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValidationError: Duplicate import name \"%s\" was found",importName)); + } + importNames.add(importName); //??? + + // _loadImportTemplate returns 2 objects + Object ffnct[] = _loadImportTemplate(importName, importUri); + fullFileName = (String)ffnct[0]; + customType = (LinkedHashMap)ffnct[1]; + String namespacePrefix = ""; + if(importUri instanceof LinkedHashMap) { + namespacePrefix = (String) + ((LinkedHashMap)importUri).get(NAMESPACE_PREFIX); + } + + if(customType != null) { + TypeValidation tv = new TypeValidation(customType, importDef); + _updateCustomDefs(customType, namespacePrefix); + } + } + } + else { // old style of imports + // _loadImportTemplate returns 2 objects + Object ffnct[] = _loadImportTemplate(null,importDef); + fullFileName = (String)ffnct[0]; + customType = (LinkedHashMap)ffnct[1]; + if(customType != null) { + TypeValidation tv = new TypeValidation(customType,importDef); + _updateCustomDefs(customType,null); + } + } + _updateNestedToscaTpls(fullFileName, customType); + + + } + } + + @SuppressWarnings("unchecked") + private void _updateCustomDefs(LinkedHashMap customType, String namespacePrefix) { + LinkedHashMap outerCustomTypes;// = new LinkedHashMap(); + for(String typeDef: typeDefinitionList) { + if(typeDef.equals("imports")) { + // imports are ArrayList... + customDefs.put("imports",(ArrayList)customType.get(typeDef)); + } + else { + outerCustomTypes = (LinkedHashMap)customType.get(typeDef); + if(outerCustomTypes != null) { + if(namespacePrefix != null && !namespacePrefix.isEmpty()) { + LinkedHashMap prefixCustomTypes = new LinkedHashMap(); + for(Map.Entry me: outerCustomTypes.entrySet()) { + String typeDefKey = me.getKey(); + String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey; + prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey)); + } + customDefs.putAll(prefixCustomTypes); + } + else { + customDefs.putAll(outerCustomTypes); + } + } + } + } + } + + private void _updateNestedToscaTpls(String fullFileName,LinkedHashMap customTpl) { + if(fullFileName != null && customTpl != null) { + LinkedHashMap tt = new LinkedHashMap(); + tt.put(fullFileName, customTpl); + nestedToscaTpls.add(tt); + } + } + + private void _validateImportKeys(String importName, LinkedHashMap importUri) { + if(importUri.get(FILE) == null) { + //log.warning(_('Missing keyname "file" in import "%(name)s".') % {'name': import_name}) + ThreadLocalsHolder.getCollector().appendException(String.format( + "MissingRequiredFieldError: Import of template \"%s\" is missing field %s",importName,FILE)); + } + for(String key: importUri.keySet()) { + boolean bFound = false; + for(String is: IMPORTS_SECTION) { + if(is.equals(key)) { + bFound = true; + break; + } + } + if(!bFound) { + //log.warning(_('Unknown keyname "%(key)s" error in ' + // 'imported definition "%(def)s".') + // % {'key': key, 'def': import_name}) + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: Import of template \"%s\" has unknown fiels %s",importName,key)); + } + } + } + + @SuppressWarnings("unchecked") + private Object[] _loadImportTemplate(String importName, Object importUriDef) { + /* + This method loads the custom type definitions referenced in "imports" + section of the TOSCA YAML template by determining whether each import + is specified via a file reference (by relative or absolute path) or a + URL reference. + + Possibilities: + +----------+--------+------------------------------+ + | template | import | comment | + +----------+--------+------------------------------+ + | file | file | OK | + | file | URL | OK | + | preparsed| file | file must be a full path | + | preparsed| URL | OK | + | URL | file | file must be a relative path | + | URL | URL | OK | + +----------+--------+------------------------------+ + */ + Object al[] = new Object[2]; + + boolean shortImportNotation = false; + String fileName; + String repository; + if(importUriDef instanceof LinkedHashMap) { + _validateImportKeys(importName, (LinkedHashMap)importUriDef); + fileName = (String)((LinkedHashMap)importUriDef).get(FILE); + repository = (String)((LinkedHashMap)importUriDef).get(REPOSITORY); + if(repository != null) { + if(!repositories.keySet().contains(repository)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidPropertyValueError: Repository \"%s\" not found in \"%s\"", + repository,repositories.keySet().toString())); + } + } + } + else { + fileName = (String)importUriDef; + repository = null; + shortImportNotation = true; + } + + if(fileName == null || fileName.isEmpty()) { + //msg = (_('A template file name is not provided with import ' + // 'definition "%(import_name)s".') + // % {'import_name': import_name}) + //log.error(msg) + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValidationError: A template file name is not provided with import definition \"%s\"",importName)); + al[0] = al[1] = null; + return al; + } + + if(UrlUtils.validateUrl(fileName)) { + try { + al[0] = fileName; + InputStream input = new URL(fileName).openStream(); + Yaml yaml = new Yaml(); + al[1] = yaml.load(input); + return al; + } + catch(IOException e) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ImportError: \"%s\" loading YAML import from \"%s\"",e.getClass().getSimpleName(),fileName)); + al[0] = al[1] = null; + return al; + } + } + else if(repository == null || repository.isEmpty()) { + boolean aFile = false; + String importTemplate = null; + if(path != null && !path.isEmpty()) { + if(UrlUtils.validateUrl(path)) { + File fp = new File(path); + if(fp.isAbsolute()) { + String msg = String.format( + "ImportError: Absolute file name \"%s\" cannot be used in the URL-based input template \"%s\"", + fileName,path); + ThreadLocalsHolder.getCollector().appendException(msg); + al[0] = al[1] = null; + return al; + } + importTemplate = UrlUtils.joinUrl(path,fileName); + aFile = false; + } + else { + + aFile = true; + File fp = new File(path); + if(fp.isFile()) { + File fn = new File(fileName); + if(fn.isFile()) { + importTemplate = fileName; + } + else { + String fullPath = Paths.get(path).toAbsolutePath().getParent().toString() + File.separator + fileName; + File ffp = new File(fullPath); + if(ffp.isFile()) { + importTemplate = fullPath; + } + else { + String dirPath = Paths.get(path).toAbsolutePath().getParent().toString(); + String filePath; + if(Paths.get(fileName).getParent() != null) { + filePath = Paths.get(fileName).getParent().toString(); + } + else { + filePath = ""; + } + if(!filePath.isEmpty() && dirPath.endsWith(filePath)) { + String sFileName = Paths.get(fileName).getFileName().toString(); + importTemplate = dirPath + File.separator + sFileName; + File fit = new File(importTemplate); + if(!fit.isFile()) { + //msg = (_('"%(import_template)s" is' + // 'not a valid file') + // % {'import_template': + // import_template}) + //log.error(msg) + String msg = String.format( + "ValueError: \"%s\" is not a valid file",importTemplate); + ThreadLocalsHolder.getCollector().appendException(msg); + log.debug("ImportsLoader - _loadImportTemplate - {}", msg); + } + } + } + } + } + } + } + else { // template is pre-parsed + File fn = new File(fileName); + if(fn.isAbsolute() && fn.isFile()) { + aFile = true; + importTemplate = fileName; + } + else { + String msg = String.format( + "Relative file name \"%s\" cannot be used in a pre-parsed input template",fileName); + ThreadLocalsHolder.getCollector().appendException("ImportError: " + msg); + al[0] = al[1] = null; + return al; + } + } + + if(importTemplate == null || importTemplate.isEmpty()) { + //log.error(_('Import "%(name)s" is not valid.') % + // {'name': import_uri_def}) + ThreadLocalsHolder.getCollector().appendException(String.format( + "ImportError: Import \"%s\" is not valid",importUriDef)); + al[0] = al[1] = null; + return al; + } + + // for now, this must be a file + if(!aFile) { + log.error("ImportsLoader - _loadImportTemplate - Error!! Expected a file. importUriDef = {}, importTemplate = {}", importUriDef, importTemplate); + ThreadLocalsHolder.getCollector().appendException(String.format( + "ImportError: Import \"%s\" is not a file",importName)); + al[0] = al[1] = null; + return al; + } + try { + al[0] = importTemplate; + InputStream input = new FileInputStream(new File(importTemplate)); + Yaml yaml = new Yaml(); + al[1] = yaml.load(input); + return al; + } + catch(FileNotFoundException e) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ImportError: Failed to load YAML from \"%s\"",importName)); + al[0] = al[1] = null; + return al; + } + catch(Exception e) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ImportError: Exception from SnakeYAML file = \"%s\"",importName)); + al[0] = al[1] = null; + return al; + } + } + + if(shortImportNotation) { + //log.error(_('Import "%(name)s" is not valid.') % import_uri_def) + ThreadLocalsHolder.getCollector().appendException(String.format( + "ImportError: Import \"%s\" is not valid",importName)); + al[0] = al[1] = null; + return al; + } + + String fullUrl = ""; + String repoUrl = ""; + if(repository != null && !repository.isEmpty()) { + if(repositories != null) { + for(String repoName: repositories.keySet()) { + if(repoName.equals(repository)) { + Object repoDef = repositories.get(repoName); + if(repoDef instanceof String) { + repoUrl = (String)repoDef; + } + else if(repoDef instanceof LinkedHashMap) { + repoUrl = (String)((LinkedHashMap)repoDef).get("url"); + } + // Remove leading, ending spaces and strip + // the last character if "/" + repoUrl = repoUrl.trim(); + if(repoUrl.endsWith("/")) { + repoUrl = repoUrl.substring(0,repoUrl.length()-1); + } + fullUrl = repoUrl + "/" + fileName; + break; + } + } + } + if(fullUrl.isEmpty()) { + String msg = String.format( + "referenced repository \"%s\" in import definition \"%s\" not found", + repository,importName); + ThreadLocalsHolder.getCollector().appendException("ImportError: " + msg); + al[0] = al[1] = null; + return al; + } + } + if(UrlUtils.validateUrl(fullUrl)) { + try { + al[0] = fullUrl; + InputStream input = new URL(fullUrl).openStream(); + Yaml yaml = new Yaml(); + al[1] = yaml.load(input); + return al; + } + catch(IOException e) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ImportError: Exception loading YAML import from \"%s\"",fullUrl)); + al[0] = al[1] = null; + return al; + } + } + else { + String msg = String.format( + "repository URL \"%s\" in import definition \"%s\" is not valid", + repoUrl,importName); + ThreadLocalsHolder.getCollector().appendException("ImportError: " + msg); + } + + // if we got here something is wrong with the flow... + log.error("ImportsLoader - _loadImportTemplate - got to dead end (importName {})", importName); + ThreadLocalsHolder.getCollector().appendException(String.format( + "ImportError: _loadImportTemplate got to dead end (importName %s)\n",importName)); + al[0] = al[1] = null; + return al; + } + + @Override + public String toString() { + return "ImportsLoader{" + + "IMPORTS_SECTION=" + Arrays.toString(IMPORTS_SECTION) + + ", importslist=" + importslist + + ", path='" + path + '\'' + + ", typeDefinitionList=" + typeDefinitionList + + ", customDefs=" + customDefs + + ", nestedToscaTpls=" + nestedToscaTpls + + ", repositories=" + repositories + + '}'; + } +} + +/*python + +import logging +import os + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import InvalidPropertyValueError +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.common.exception import ValidationError +from toscaparser.elements.tosca_type_validation import TypeValidation +from toscaparser.utils.gettextutils import _ +import org.openecomp.sdc.toscaparser.api.utils.urlutils +import org.openecomp.sdc.toscaparser.api.utils.yamlparser + +YAML_LOADER = toscaparser.utils.yamlparser.load_yaml +log = logging.getLogger("tosca") + + +class ImportsLoader(object): + + IMPORTS_SECTION = (FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX) = \ + ('file', 'repository', 'namespace_uri', + 'namespace_prefix') + + def __init__(self, importslist, path, type_definition_list=None, + tpl=None): + self.importslist = importslist + self.custom_defs = {} + if not path and not tpl: + msg = _('Input tosca template is not provided.') + log.warning(msg) + ExceptionCollector.appendException(ValidationError(message=msg)) + self.path = path + self.repositories = {} + if tpl and tpl.get('repositories'): + self.repositories = tpl.get('repositories') + self.type_definition_list = [] + if type_definition_list: + if isinstance(type_definition_list, list): + self.type_definition_list = type_definition_list + else: + self.type_definition_list.append(type_definition_list) + self._validate_and_load_imports() + + def get_custom_defs(self): + return self.custom_defs + + def _validate_and_load_imports(self): + imports_names = set() + + if not self.importslist: + msg = _('"imports" keyname is defined without including ' + 'templates.') + log.error(msg) + ExceptionCollector.appendException(ValidationError(message=msg)) + return + + for import_def in self.importslist: + if isinstance(import_def, dict): + for import_name, import_uri in import_def.items(): + if import_name in imports_names: + msg = (_('Duplicate import name "%s" was found.') % + import_name) + log.error(msg) + ExceptionCollector.appendException( + ValidationError(message=msg)) + imports_names.add(import_name) + + custom_type = self._load_import_template(import_name, + import_uri) + namespace_prefix = None + if isinstance(import_uri, dict): + namespace_prefix = import_uri.get( + self.NAMESPACE_PREFIX) + if custom_type: + TypeValidation(custom_type, import_def) + self._update_custom_def(custom_type, namespace_prefix) + else: # old style of imports + custom_type = self._load_import_template(None, + import_def) + if custom_type: + TypeValidation( + custom_type, import_def) + self._update_custom_def(custom_type, None) + + def _update_custom_def(self, custom_type, namespace_prefix): + outer_custom_types = {} + for type_def in self.type_definition_list: + outer_custom_types = custom_type.get(type_def) + if outer_custom_types: + if type_def == "imports": + self.custom_defs.update({'imports': outer_custom_types}) + else: + if namespace_prefix: + prefix_custom_types = {} + for type_def_key in outer_custom_types.keys(): + namespace_prefix_to_key = (namespace_prefix + + "." + type_def_key) + prefix_custom_types[namespace_prefix_to_key] = \ + outer_custom_types[type_def_key] + self.custom_defs.update(prefix_custom_types) + else: + self.custom_defs.update(outer_custom_types) + + def _validate_import_keys(self, import_name, import_uri_def): + if self.FILE not in import_uri_def.keys(): + log.warning(_('Missing keyname "file" in import "%(name)s".') + % {'name': import_name}) + ExceptionCollector.appendException( + MissingRequiredFieldError( + what='Import of template "%s"' % import_name, + required=self.FILE)) + for key in import_uri_def.keys(): + if key not in self.IMPORTS_SECTION: + log.warning(_('Unknown keyname "%(key)s" error in ' + 'imported definition "%(def)s".') + % {'key': key, 'def': import_name}) + ExceptionCollector.appendException( + UnknownFieldError( + what='Import of template "%s"' % import_name, + field=key)) + + def _load_import_template(self, import_name, import_uri_def): + """Handle custom types defined in imported template files + + This method loads the custom type definitions referenced in "imports" + section of the TOSCA YAML template by determining whether each import + is specified via a file reference (by relative or absolute path) or a + URL reference. + + Possibilities: + +----------+--------+------------------------------+ + | template | import | comment | + +----------+--------+------------------------------+ + | file | file | OK | + | file | URL | OK | + | preparsed| file | file must be a full path | + | preparsed| URL | OK | + | URL | file | file must be a relative path | + | URL | URL | OK | + +----------+--------+------------------------------+ + """ + short_import_notation = False + if isinstance(import_uri_def, dict): + self._validate_import_keys(import_name, import_uri_def) + file_name = import_uri_def.get(self.FILE) + repository = import_uri_def.get(self.REPOSITORY) + repos = self.repositories.keys() + if repository is not None: + if repository not in repos: + ExceptionCollector.appendException( + InvalidPropertyValueError( + what=_('Repository is not found in "%s"') % repos)) + else: + file_name = import_uri_def + repository = None + short_import_notation = True + + if not file_name: + msg = (_('A template file name is not provided with import ' + 'definition "%(import_name)s".') + % {'import_name': import_name}) + log.error(msg) + ExceptionCollector.appendException(ValidationError(message=msg)) + return + + if toscaparser.utils.urlutils.UrlUtils.validate_url(file_name): + return YAML_LOADER(file_name, False) + elif not repository: + import_template = None + if self.path: + if toscaparser.utils.urlutils.UrlUtils.validate_url(self.path): + if os.path.isabs(file_name): + msg = (_('Absolute file name "%(name)s" cannot be ' + 'used in a URL-based input template ' + '"%(template)s".') + % {'name': file_name, 'template': self.path}) + log.error(msg) + ExceptionCollector.appendException(ImportError(msg)) + return + import_template = toscaparser.utils.urlutils.UrlUtils.\ + join_url(self.path, file_name) + a_file = False + else: + a_file = True + main_a_file = os.path.isfile(self.path) + + if main_a_file: + if os.path.isfile(file_name): + import_template = file_name + else: + full_path = os.path.join( + os.path.dirname(os.path.abspath(self.path)), + file_name) + if os.path.isfile(full_path): + import_template = full_path + else: + file_path = file_name.rpartition("/") + dir_path = os.path.dirname(os.path.abspath( + self.path)) + if file_path[0] != '' and dir_path.endswith( + file_path[0]): + import_template = dir_path + "/" +\ + file_path[2] + if not os.path.isfile(import_template): + msg = (_('"%(import_template)s" is' + 'not a valid file') + % {'import_template': + import_template}) + log.error(msg) + ExceptionCollector.appendException + (ValueError(msg)) + else: # template is pre-parsed + if os.path.isabs(file_name) and os.path.isfile(file_name): + a_file = True + import_template = file_name + else: + msg = (_('Relative file name "%(name)s" cannot be used ' + 'in a pre-parsed input template.') + % {'name': file_name}) + log.error(msg) + ExceptionCollector.appendException(ImportError(msg)) + return + + if not import_template: + log.error(_('Import "%(name)s" is not valid.') % + {'name': import_uri_def}) + ExceptionCollector.appendException( + ImportError(_('Import "%s" is not valid.') % + import_uri_def)) + return + return YAML_LOADER(import_template, a_file) + + if short_import_notation: + log.error(_('Import "%(name)s" is not valid.') % import_uri_def) + ExceptionCollector.appendException( + ImportError(_('Import "%s" is not valid.') % import_uri_def)) + return + + full_url = "" + if repository: + if self.repositories: + for repo_name, repo_def in self.repositories.items(): + if repo_name == repository: + # Remove leading, ending spaces and strip + # the last character if "/" + repo_url = ((repo_def['url']).strip()).rstrip("//") + full_url = repo_url + "/" + file_name + + if not full_url: + msg = (_('referenced repository "%(n_uri)s" in import ' + 'definition "%(tpl)s" not found.') + % {'n_uri': repository, 'tpl': import_name}) + log.error(msg) + ExceptionCollector.appendException(ImportError(msg)) + return + + if toscaparser.utils.urlutils.UrlUtils.validate_url(full_url): + return YAML_LOADER(full_url, False) + else: + msg = (_('repository url "%(n_uri)s" is not valid in import ' + 'definition "%(tpl)s".') + % {'n_uri': repo_url, 'tpl': import_name}) + log.error(msg) + ExceptionCollector.appendException(ImportError(msg)) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java new file mode 100644 index 0000000..c8af559 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java @@ -0,0 +1,755 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.*; +import org.openecomp.sdc.toscaparser.api.utils.CopyUtils; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class NodeTemplate extends EntityTemplate { + + private LinkedHashMap templates; + private LinkedHashMap customDef; + private ArrayList availableRelTpls; + private LinkedHashMap availableRelTypes; + private LinkedHashMap related; + private ArrayList relationshipTpl; + private LinkedHashMap _relationships; + private SubstitutionMappings subMappingToscaTemplate; + private SubstitutionMappings subMappingToscaTemplate2; + private Metadata metadata; + + private static final String METADATA = "metadata"; + + @SuppressWarnings("unchecked") + public NodeTemplate(String name, + LinkedHashMap ntnodeTemplates, + LinkedHashMap ntcustomDef, + ArrayList ntavailableRelTpls, + LinkedHashMap ntavailableRelTypes) { + + super(name, (LinkedHashMap)ntnodeTemplates.get(name), "node_type", ntcustomDef); + + templates = ntnodeTemplates; + _validateFields((LinkedHashMap)templates.get(name)); + customDef = ntcustomDef; + related = new LinkedHashMap(); + relationshipTpl = new ArrayList(); + availableRelTpls = ntavailableRelTpls; + availableRelTypes = ntavailableRelTypes; + _relationships = new LinkedHashMap(); + subMappingToscaTemplate = null; + subMappingToscaTemplate2 = null; + metadata = _metaData(); + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getRelationships() { + if(_relationships.isEmpty()) { + ArrayList requires = getRequirements(); + if(requires != null && requires instanceof ArrayList) { + for(Object ro: requires) { + LinkedHashMap r = (LinkedHashMap)ro; + for(Map.Entry me: r.entrySet()) { + LinkedHashMap explicit = _getExplicitRelationship(r,me.getValue()); + if(explicit != null) { + // _relationships.putAll(explicit)... + for(Map.Entry ee: explicit.entrySet()) { + _relationships.put(ee.getKey(), ee.getValue()); + } + } + } + } + } + } + return _relationships; + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _getExplicitRelationship(LinkedHashMap req,Object value) { + // Handle explicit relationship + + // For example, + // - req: + // node: DBMS + // relationship: tosca.relationships.HostedOn + + LinkedHashMap explicitRelation = new LinkedHashMap(); + String node; + if(value instanceof LinkedHashMap) { + node = (String)((LinkedHashMap)value).get("node"); + } + else { + node = (String)value; + } + + if(node != null && !node.isEmpty()) { + //msg = _('Lookup by TOSCA types is not supported. ' + // 'Requirement for "%s" can not be full-filled.') % self.name + boolean bFound = false; + for(String k: EntityType.TOSCA_DEF.keySet()) { + if(k.equals(node)) { + bFound = true; + break; + } + } + if(bFound || customDef.get(node) != null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "NotImplementedError: Lookup by TOSCA types is not supported. Requirement for \"%s\" can not be full-filled", + getName())); + return null; + } + if(templates.get(node) == null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "KeyError: Node template \"%s\" was not found",node)); + return null; + } + NodeTemplate relatedTpl = new NodeTemplate(node,templates,customDef,null,null); + Object relationship = null; + String relationshipString = null; + if(value instanceof LinkedHashMap) { + relationship = ((LinkedHashMap)value).get("relationship"); + // here relationship can be a string or a LHM with 'type': + } + // check if its type has relationship defined + if(relationship == null) { + ArrayList parentReqs = ((NodeType)typeDefinition).getAllRequirements(); + if(parentReqs == null) { + ThreadLocalsHolder.getCollector().appendException("ValidationError: parent_req is null"); + } + else { + for(String key: req.keySet()) { + boolean bFoundRel = false; + for(Object rdo: parentReqs) { + LinkedHashMap reqDict = (LinkedHashMap)rdo; + LinkedHashMap relDict = (LinkedHashMap)reqDict.get(key); + if(relDict != null) { + relationship = relDict.get("relationship"); + //BUG-python??? need to break twice? + bFoundRel = true; + break; + } + } + if(bFoundRel) { + break; + } + } + } + } + + if(relationship != null) { + // here relationship can be a string or a LHM with 'type': + if(relationship instanceof String) { + relationshipString = (String)relationship; + } + else if(relationship instanceof LinkedHashMap) { + relationshipString = (String)((LinkedHashMap)relationship).get("type"); + } + + boolean foundRelationshipTpl = false; + // apply available relationship templates if found + if(availableRelTpls != null) { + for(RelationshipTemplate tpl: availableRelTpls) { + if(tpl.getName().equals(relationshipString)) { + RelationshipType rtype = new RelationshipType(tpl.getType(),null,customDef); + explicitRelation.put(rtype, relatedTpl); + tpl.setTarget(relatedTpl); + tpl.setSource(this); + relationshipTpl.add(tpl); + foundRelationshipTpl = true; + } + } + } + // create relationship template object. + String relPrfx = EntityType.RELATIONSHIP_PREFIX; + if(!foundRelationshipTpl) { + if(relationship instanceof LinkedHashMap) { + relationshipString = (String)((LinkedHashMap)relationship).get("type"); + if(relationshipString != null) { + if(availableRelTypes != null && !availableRelTypes.isEmpty() && + availableRelTypes.get(relationshipString) != null) { + ; + } + else if(!(relationshipString).startsWith(relPrfx)) { + relationshipString = relPrfx + relationshipString; + } + } + else { + ThreadLocalsHolder.getCollector().appendException(String.format( + "MissingRequiredFieldError: \"relationship\" used in template \"%s\" is missing required field \"type\"", + relatedTpl.getName())); + } + } + for(RelationshipType rtype: ((NodeType)typeDefinition).getRelationship().keySet()) { + if(rtype.getType().equals(relationshipString)) { + explicitRelation.put(rtype,relatedTpl); + relatedTpl._addRelationshipTemplate(req,rtype.getType(),this); + } + else if(availableRelTypes != null && !availableRelTypes.isEmpty()) { + LinkedHashMap relTypeDef = (LinkedHashMap)availableRelTypes.get(relationshipString); + if(relTypeDef != null) { + String superType = (String)relTypeDef.get("derived_from"); + if(superType != null) { + if(!superType.startsWith(relPrfx)) { + superType = relPrfx + superType; + } + if(rtype.getType().equals(superType)) { + explicitRelation.put(rtype,relatedTpl); + relatedTpl._addRelationshipTemplate(req,rtype.getType(),this); + } + } + } + } + } + } + } + } + return explicitRelation; + } + + @SuppressWarnings("unchecked") + private void _addRelationshipTemplate(LinkedHashMap requirement, String rtype, NodeTemplate source) { + LinkedHashMap req = (LinkedHashMap)CopyUtils.copyLhmOrAl(requirement); + req.put("type",rtype); + RelationshipTemplate tpl = new RelationshipTemplate(req, rtype, customDef, this, source); + relationshipTpl.add(tpl); + } + + public ArrayList getRelationshipTemplate() { + return relationshipTpl; + } + + void _addNext(NodeTemplate nodetpl,RelationshipType relationship) { + related.put(nodetpl,relationship); + } + + public ArrayList getRelatedNodes() { + if(related.isEmpty()) { + for(Map.Entry me: ((NodeType)typeDefinition).getRelationship().entrySet()) { + RelationshipType relation = me.getKey(); + NodeType node = me.getValue(); + for(String tpl: templates.keySet()) { + if(tpl.equals(node.getType())) { + //BUG.. python has + // self.related[NodeTemplate(tpl)] = relation + // but NodeTemplate doesn't have a constructor with just name... + //???? + related.put(new NodeTemplate(tpl,null,null,null,null),relation); + } + } + } + } + return new ArrayList(related.keySet()); + } + + public void validate(/*tosca_tpl=none is not used...*/) { + _validateCapabilities(); + _validateRequirements(); + _validateProperties(entityTpl,(NodeType)typeDefinition); + _validateInterfaces(); + for(Property prop: getPropertiesObjects()) { + prop.validate(); + } + } + + private Metadata _metaData() { + if(entityTpl.get(METADATA) != null) { + return new Metadata((Map)entityTpl.get(METADATA)); + } + else { + return null; + } + } + + @SuppressWarnings("unchecked") + private void _validateRequirements() { + ArrayList typeRequires = ((NodeType)typeDefinition).getAllRequirements(); + ArrayList allowedReqs = new ArrayList<>(); + allowedReqs.add("template"); + if(typeRequires != null) { + for(Object to: typeRequires) { + LinkedHashMap treq = (LinkedHashMap)to; + for(Map.Entry me: treq.entrySet()) { + String key = me.getKey(); + Object value = me.getValue(); + allowedReqs.add(key); + if(value instanceof LinkedHashMap) { + allowedReqs.addAll(((LinkedHashMap)value).keySet()); + } + } + + } + } + + ArrayList requires = (ArrayList)((NodeType)typeDefinition).getValue(REQUIREMENTS, entityTpl, false); + if(requires != null) { + if(!(requires instanceof ArrayList)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "TypeMismatchError: \"requirements\" of template \"%s\" are not of type \"list\"",name)); + } + else { + for(Object ro: requires) { + LinkedHashMap req = (LinkedHashMap)ro; + for(Map.Entry me: req.entrySet()) { + String rl = me.getKey(); + Object vo = me.getValue(); + if(vo instanceof LinkedHashMap) { + LinkedHashMap value = (LinkedHashMap)vo; + _validateRequirementsKeys(value); + _validateRequirementsProperties(value); + allowedReqs.add(rl); + } + } + _commonValidateField(req,allowedReqs,"requirements"); + } + } + } + } + + @SuppressWarnings("unchecked") + private void _validateRequirementsProperties(LinkedHashMap reqs) { + // TO-DO(anyone): Only occurrences property of the requirements is + // validated here. Validation of other requirement properties are being + // validated in different files. Better to keep all the requirements + // properties validation here. + for(Map.Entry me: reqs.entrySet()) { + if(me.getKey().equals("occurrences")) { + ArrayList val = (ArrayList)me.getValue(); + _validateOccurrences(val); + } + + } + } + + private void _validateOccurrences(ArrayList occurrences) { + DataEntity.validateDatatype("list",occurrences,null,null,null); + for(Object val: occurrences) { + DataEntity.validateDatatype("Integer",val,null,null,null); + } + if(occurrences.size() != 2 || + !(0 <= (int)occurrences.get(0) && (int)occurrences.get(0) <= (int)occurrences.get(1)) || + (int)occurrences.get(1) == 0) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidPropertyValueError: property has invalid value %s",occurrences.toString())); + } + } + + private void _validateRequirementsKeys(LinkedHashMap reqs) { + for(String key: reqs.keySet()) { + boolean bFound = false; + for(int i=0; i< REQUIREMENTS_SECTION.length; i++) { + if(key.equals(REQUIREMENTS_SECTION[i])) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: \"requirements\" of template \"%s\" contains unknown field \"%s\"",name,key)); + } + } + } + + @SuppressWarnings("unchecked") + private void _validateInterfaces() { + LinkedHashMap ifaces = (LinkedHashMap) + ((NodeType)typeDefinition).getValue(INTERFACES, entityTpl, false); + if(ifaces != null) { + for(Map.Entry me: ifaces.entrySet()) { + String iname = me.getKey(); + LinkedHashMap value = (LinkedHashMap)me.getValue(); + if(iname.equals(InterfacesDef.LIFECYCLE) || iname.equals(InterfacesDef.LIFECYCLE_SHORTNAME)) { + // maybe we should convert [] to arraylist??? + ArrayList inlo = new ArrayList<>(); + for(int i=0; i irco = new ArrayList<>(); + for(int i=0; i _collectCustomIfaceOperations(String iname) { + ArrayList allowedOperations = new ArrayList<>(); + LinkedHashMap nodetypeIfaceDef = (LinkedHashMap)((NodeType) + typeDefinition).getInterfaces().get(iname); + allowedOperations.addAll(nodetypeIfaceDef.keySet()); + String ifaceType = (String)nodetypeIfaceDef.get("type"); + if(ifaceType != null) { + LinkedHashMap ifaceTypeDef = null; + if(((NodeType)typeDefinition).customDef != null) { + ifaceTypeDef = (LinkedHashMap)((NodeType)typeDefinition).customDef.get(ifaceType); + } + if(ifaceTypeDef == null) { + ifaceTypeDef = (LinkedHashMap)EntityType.TOSCA_DEF.get(ifaceType); + } + allowedOperations.addAll(ifaceTypeDef.keySet()); + } + // maybe we should convert [] to arraylist??? + ArrayList idrw = new ArrayList<>(); + for(int i=0; i nodetemplate) { + for(String ntname: nodetemplate.keySet()) { + boolean bFound = false; + for(int i=0; i< SECTIONS.length; i++) { + if(ntname.equals(SECTIONS[i])) { + bFound = true; + break; + } + } + if(!bFound) { + for(int i=0; i< SPECIAL_SECTIONS.length; i++) { + if(ntname.equals(SPECIAL_SECTIONS[i])) { + bFound = true; + break; + } + } + + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: Node template \"%s\" has unknown field \"%s\"",name,ntname)); + } + } + } + + // getter/setter + + public SubstitutionMappings getSubMappingToscaTemplate() { + return subMappingToscaTemplate; + } + + public void setSubMappingToscaTemplate(SubstitutionMappings sm) { + subMappingToscaTemplate = sm; + } + + // **experimental** (multilevel nesting) + public SubstitutionMappings getSubMappingToscaTemplate2() { + return subMappingToscaTemplate2; + } + + public void setSubMappingToscaTemplate2(SubstitutionMappings sm) { + subMappingToscaTemplate2 = sm; + } + + public Metadata getMetaData() { + return metadata; + } + + public void setMetaData(Metadata metadata) { + this.metadata = metadata; + } + + @Override + public String toString() { + return getName(); + } + +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import InvalidPropertyValueError +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import TypeMismatchError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.common.exception import ValidationError +from toscaparser.dataentity import DataEntity +from toscaparser.elements.interfaces import CONFIGURE +from toscaparser.elements.interfaces import CONFIGURE_SHORTNAME +from toscaparser.elements.interfaces import INTERFACE_DEF_RESERVED_WORDS +from toscaparser.elements.interfaces import InterfacesDef +from toscaparser.elements.interfaces import LIFECYCLE +from toscaparser.elements.interfaces import LIFECYCLE_SHORTNAME +from toscaparser.elements.relationshiptype import RelationshipType +from toscaparser.entity_template import EntityTemplate +from toscaparser.relationship_template import RelationshipTemplate +from toscaparser.utils.gettextutils import _ + +log = logging.getLogger('tosca') + + +class NodeTemplate(EntityTemplate): + '''Node template from a Tosca profile.''' + def __init__(self, name, node_templates, custom_def=None, + available_rel_tpls=None, available_rel_types=None): + super(NodeTemplate, self).__init__(name, node_templates[name], + 'node_type', + custom_def) + self.templates = node_templates + self._validate_fields(node_templates[name]) + self.custom_def = custom_def + self.related = {} + self.relationship_tpl = [] + self.available_rel_tpls = available_rel_tpls + self.available_rel_types = available_rel_types + self._relationships = {} + self.sub_mapping_tosca_template = None + + @property + def relationships(self): + if not self._relationships: + requires = self.requirements + if requires and isinstance(requires, list): + for r in requires: + for r1, value in r.items(): + explicit = self._get_explicit_relationship(r, value) + if explicit: + for key, value in explicit.items(): + self._relationships[key] = value + return self._relationships + + def _get_explicit_relationship(self, req, value): + """Handle explicit relationship + + For example, + - req: + node: DBMS + relationship: tosca.relationships.HostedOn + """ + explicit_relation = {} + node = value.get('node') if isinstance(value, dict) else value + + if node: + # TO-DO(spzala) implement look up once Glance meta data is available + # to find a matching TOSCA node using the TOSCA types + msg = _('Lookup by TOSCA types is not supported. ' + 'Requirement for "%s" can not be full-filled.') % self.name + if (node in list(self.type_definition.TOSCA_DEF.keys()) + or node in self.custom_def): + ExceptionCollector.appendException(NotImplementedError(msg)) + return + + if node not in self.templates: + ExceptionCollector.appendException( + KeyError(_('Node template "%s" was not found.') % node)) + return + + related_tpl = NodeTemplate(node, self.templates, self.custom_def) + relationship = value.get('relationship') \ + if isinstance(value, dict) else None + # check if it's type has relationship defined + if not relationship: + parent_reqs = self.type_definition.get_all_requirements() + if parent_reqs is None: + ExceptionCollector.appendException( + ValidationError(message='parent_req is ' + + str(parent_reqs))) + else: + for key in req.keys(): + for req_dict in parent_reqs: + if key in req_dict.keys(): + relationship = (req_dict.get(key). + get('relationship')) + break + if relationship: + found_relationship_tpl = False + # apply available relationship templates if found + if self.available_rel_tpls: + for tpl in self.available_rel_tpls: + if tpl.name == relationship: + rtype = RelationshipType(tpl.type, None, + self.custom_def) + explicit_relation[rtype] = related_tpl + tpl.target = related_tpl + tpl.source = self + self.relationship_tpl.append(tpl) + found_relationship_tpl = True + # create relationship template object. + rel_prfx = self.type_definition.RELATIONSHIP_PREFIX + if not found_relationship_tpl: + if isinstance(relationship, dict): + relationship = relationship.get('type') + if relationship: + if self.available_rel_types and \ + relationship in self.available_rel_types.keys(): + pass + elif not relationship.startswith(rel_prfx): + relationship = rel_prfx + relationship + else: + ExceptionCollector.appendException( + MissingRequiredFieldError( + what=_('"relationship" used in template ' + '"%s"') % related_tpl.name, + required=self.TYPE)) + for rtype in self.type_definition.relationship.keys(): + if rtype.type == relationship: + explicit_relation[rtype] = related_tpl + related_tpl._add_relationship_template(req, + rtype.type, + self) + elif self.available_rel_types: + if relationship in self.available_rel_types.keys(): + rel_type_def = self.available_rel_types.\ + get(relationship) + if 'derived_from' in rel_type_def: + super_type = \ + rel_type_def.get('derived_from') + if not super_type.startswith(rel_prfx): + super_type = rel_prfx + super_type + if rtype.type == super_type: + explicit_relation[rtype] = related_tpl + related_tpl.\ + _add_relationship_template( + req, rtype.type, self) + return explicit_relation + + def _add_relationship_template(self, requirement, rtype, source): + req = requirement.copy() + req['type'] = rtype + tpl = RelationshipTemplate(req, rtype, self.custom_def, self, source) + self.relationship_tpl.append(tpl) + + def get_relationship_template(self): + return self.relationship_tpl + + def _add_next(self, nodetpl, relationship): + self.related[nodetpl] = relationship + + @property + def related_nodes(self): + if not self.related: + for relation, node in self.type_definition.relationship.items(): + for tpl in self.templates: + if tpl == node.type: + self.related[NodeTemplate(tpl)] = relation + return self.related.keys() + + def validate(self, tosca_tpl=None): + self._validate_capabilities() + self._validate_requirements() + self._validate_properties(self.entity_tpl, self.type_definition) + self._validate_interfaces() + for prop in self.get_properties_objects(): + prop.validate() + + def _validate_requirements(self): + type_requires = self.type_definition.get_all_requirements() + allowed_reqs = ["template"] + if type_requires: + for treq in type_requires: + for key, value in treq.items(): + allowed_reqs.append(key) + if isinstance(value, dict): + for key in value: + allowed_reqs.append(key) + + requires = self.type_definition.get_value(self.REQUIREMENTS, + self.entity_tpl) + if requires: + if not isinstance(requires, list): + ExceptionCollector.appendException( + TypeMismatchError( + what='"requirements" of template "%s"' % self.name, + type='list')) + else: + for req in requires: + for r1, value in req.items(): + if isinstance(value, dict): + self._validate_requirements_keys(value) + self._validate_requirements_properties(value) + allowed_reqs.append(r1) + self._common_validate_field(req, allowed_reqs, + 'requirements') + + def _validate_requirements_properties(self, requirements): + # TO-DO(anyone): Only occurrences property of the requirements is + # validated here. Validation of other requirement properties are being + # validated in different files. Better to keep all the requirements + # properties validation here. + for key, value in requirements.items(): + if key == 'occurrences': + self._validate_occurrences(value) + break + + def _validate_occurrences(self, occurrences): + DataEntity.validate_datatype('list', occurrences) + for value in occurrences: + DataEntity.validate_datatype('integer', value) + if len(occurrences) != 2 or not (0 <= occurrences[0] <= occurrences[1]) \ + or occurrences[1] == 0: + ExceptionCollector.appendException( + InvalidPropertyValueError(what=(occurrences))) + + def _validate_requirements_keys(self, requirement): + for key in requirement.keys(): + if key not in self.REQUIREMENTS_SECTION: + ExceptionCollector.appendException( + UnknownFieldError( + what='"requirements" of template "%s"' % self.name, + field=key)) + + def _validate_interfaces(self): + ifaces = self.type_definition.get_value(self.INTERFACES, + self.entity_tpl) + if ifaces: + for name, value in ifaces.items(): + if name in (LIFECYCLE, LIFECYCLE_SHORTNAME): + self._common_validate_field( + value, InterfacesDef. + interfaces_node_lifecycle_operations, + 'interfaces') + elif name in (CONFIGURE, CONFIGURE_SHORTNAME): + self._common_validate_field( + value, InterfacesDef. + interfaces_relationship_configure_operations, + 'interfaces') + elif name in self.type_definition.interfaces.keys(): + self._common_validate_field( + value, + self._collect_custom_iface_operations(name), + 'interfaces') + else: + ExceptionCollector.appendException( + UnknownFieldError( + what='"interfaces" of template "%s"' % + self.name, field=name)) + + def _collect_custom_iface_operations(self, name): + allowed_operations = [] + nodetype_iface_def = self.type_definition.interfaces[name] + allowed_operations.extend(nodetype_iface_def.keys()) + if 'type' in nodetype_iface_def: + iface_type = nodetype_iface_def['type'] + if iface_type in self.type_definition.custom_def: + iface_type_def = self.type_definition.custom_def[iface_type] + else: + iface_type_def = self.type_definition.TOSCA_DEF[iface_type] + allowed_operations.extend(iface_type_def.keys()) + allowed_operations = [op for op in allowed_operations if + op not in INTERFACE_DEF_RESERVED_WORDS] + return allowed_operations + + def _validate_fields(self, nodetemplate): + for name in nodetemplate.keys(): + if name not in self.SECTIONS and name not in self.SPECIAL_SECTIONS: + ExceptionCollector.appendException( + UnknownFieldError(what='Node template "%s"' % self.name, + field=name))*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java new file mode 100644 index 0000000..a59d9d5 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java @@ -0,0 +1,187 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; + +public class Policy extends EntityTemplate { + + + private static final String TYPE = "type"; + private static final String METADATA = "metadata"; + private static final String DESCRIPTION = "description"; + private static final String PROPERTIES = "properties"; + private static final String TARGETS = "targets"; + private static final String TRIGGERS = "triggers"; + private static final String SECTIONS[] = { + TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS}; + + LinkedHashMap metaData; + ArrayList targetsList; // *** a list of NodeTemplate OR a list of Group *** + String targetsType; + ArrayList triggers; + LinkedHashMap properties; + + public Policy(String _name, + LinkedHashMap _policy, +// ArrayList targetObjects, + ArrayList targetObjects, + String _targetsType, + LinkedHashMap _customDef) { + super(_name,_policy,"policy_type",_customDef); + + metaData = null; + if(_policy.get(METADATA) != null) { + metaData = (LinkedHashMap)_policy.get(METADATA); + ValidateUtils.validateMap(metaData); + } + + targetsList = targetObjects; + targetsType = _targetsType; + triggers = _triggers((LinkedHashMap)_policy.get(TRIGGERS)); + properties = null; + if(_policy.get("properties") != null) { + properties = (LinkedHashMap)_policy.get("properties"); + } + _validateKeys(); + } + + public ArrayList getTargets() { + return (ArrayList)entityTpl.get("targets"); + } + + public ArrayList getDescription() { + return (ArrayList)entityTpl.get("description"); + } + + public ArrayList getmetadata() { + return (ArrayList)entityTpl.get("metadata"); + } + + public String getTargetsType() { + return targetsType; + } + +// public ArrayList getTargetsList() { + public ArrayList getTargetsList() { + return targetsList; + } + + // entityTemplate already has a different getProperties... + // this is to access the local properties variable + public LinkedHashMap getPolicyProperties() { + return properties; + } + + private ArrayList _triggers(LinkedHashMap triggers) { + ArrayList triggerObjs = new ArrayList<>(); + if(triggers != null) { + for(Map.Entry me: triggers.entrySet()) { + String tname = me.getKey(); + LinkedHashMap ttriggerTpl = + (LinkedHashMap)me.getValue(); + Triggers triggersObj = new Triggers(tname,ttriggerTpl); + triggerObjs.add(triggersObj); + } + } + return triggerObjs; + } + + private void _validateKeys() { + for(String key: entityTpl.keySet()) { + boolean bFound = false; + for(int i=0; i customDef; + + public Property(String propname, + Object propvalue, + LinkedHashMap propschemaDict, + LinkedHashMap propcustomDef) { + + name = propname; + value = propvalue; + customDef = propcustomDef; + schema = new Schema(propname, propschemaDict); + } + + public String getType() { + return schema.getType(); + } + + public boolean isRequired() { + return schema.isRequired(); + } + + public String getDescription() { + return schema.getDescription(); + } + + public Object getDefault() { + return schema.getDefault(); + } + + public ArrayList getConstraints() { + return schema.getConstraints(); + } + + public LinkedHashMap getEntrySchema() { + return schema.getEntrySchema(); + } + + + public String getName() { + return name; + } + + public Object getValue() { + return value; + } + + // setter + public Object setValue(Object vob) { + value = vob; + return value; + } + + public void validate() { + // Validate if not a reference property + if(!Function.isFunction(value)) { + if(getType().equals(Schema.STRING)) { + value = value.toString(); + } + value = DataEntity.validateDatatype(getType(),value, + getEntrySchema(), + customDef, + name); + _validateConstraints(); + } + } + + private void _validateConstraints() { + if(getConstraints() != null) { + for(Constraint constraint: getConstraints()) { + constraint.validate(value); + } + } + } + + @Override + public String toString() { + return "Property{" + + "name='" + name + '\'' + + ", value=" + value + + ", schema=" + schema + + ", customDef=" + customDef + + '}'; + } +} + +/*python + +class Property(object): + '''TOSCA built-in Property type.''' + + PROPERTY_KEYS = ( + TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS + ) = ( + 'type', 'required', 'description', 'default', 'constraints' + ) + + ENTRY_SCHEMA_KEYS = ( + ENTRYTYPE, ENTRYPROPERTIES + ) = ( + 'type', 'properties' + ) + + def __init__(self, property_name, value, schema_dict, custom_def=None): + self.name = property_name + self.value = value + self.custom_def = custom_def + self.schema = Schema(property_name, schema_dict) + + @property + def type(self): + return self.schema.type + + @property + def required(self): + return self.schema.required + + @property + def description(self): + return self.schema.description + + @property + def default(self): + return self.schema.default + + @property + def constraints(self): + return self.schema.constraints + + @property + def entry_schema(self): + return self.schema.entry_schema + + def validate(self): + '''Validate if not a reference property.''' + if not is_function(self.value): + if self.type == Schema.STRING: + self.value = str(self.value) + self.value = DataEntity.validate_datatype(self.type, self.value, + self.entry_schema, + self.custom_def, + self.name) + self._validate_constraints() + + def _validate_constraints(self): + if self.constraints: + for constraint in self.constraints: + constraint.validate(self.value) +*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/RelationshipTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/RelationshipTemplate.java new file mode 100644 index 0000000..10d3ad9 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/RelationshipTemplate.java @@ -0,0 +1,199 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.elements.EntityType; +import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; +import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType; + +public class RelationshipTemplate extends EntityTemplate { + + private static final String DERIVED_FROM = "derived_from"; + private static final String PROPERTIES = "properties"; + private static final String REQUIREMENTS = "requirements"; + private static final String INTERFACES = "interfaces"; + private static final String CAPABILITIES = "capabilities"; + private static final String TYPE = "type"; + @SuppressWarnings("unused") + private static final String SECTIONS[] = { + DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES, CAPABILITIES, TYPE}; + + private String name; + private NodeTemplate target; + private NodeTemplate source; + private ArrayList _properties; + + public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, + String rtname, + LinkedHashMap rtcustomDef, + NodeTemplate rttarget, + NodeTemplate rtsource) { + super(rtname,rtrelationshipTemplate,"relationship_type",rtcustomDef); + + name = rtname; + target = rttarget; + source = rtsource; + _properties = null; + } + + public ArrayList getPropertiesObjects() { + // Return properties objects for this template + if(_properties == null) { + _properties = _createRelationshipProperties(); + } + return _properties; + } + + @SuppressWarnings({ "unchecked", "unused" }) + public ArrayList _createRelationshipProperties() { + ArrayList props = new ArrayList (); + LinkedHashMap properties = new LinkedHashMap(); + LinkedHashMap relationship = (LinkedHashMap)entityTpl.get("relationship"); + + if(relationship == null) { + for(Object val: entityTpl.values()) { + if(val instanceof LinkedHashMap) { + relationship = (LinkedHashMap)((LinkedHashMap)val).get("relationship"); + break; + } + } + } + + if(relationship != null) { + properties = (LinkedHashMap)((EntityType)typeDefinition).getValue(PROPERTIES,relationship,false); + } + if(properties == null) { + properties = new LinkedHashMap(); + } + if(properties == null) { + properties = (LinkedHashMap)entityTpl.get(PROPERTIES); + } + if(properties == null) { + properties = new LinkedHashMap(); + } + + if(properties != null) { + for(Map.Entry me: properties.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + LinkedHashMap propsDef = ((StatefulEntityType)typeDefinition).getPropertiesDef(); + if(propsDef != null && propsDef.get(pname) != null) { + if(properties.get(pname) != null) { + pvalue = properties.get(name); + } + PropertyDef pd = (PropertyDef)propsDef.get(pname); + Property prop = new Property(pname,pvalue,pd.getSchema(),customDef); + props.add(prop); + } + } + } + ArrayList pds = ((StatefulEntityType)typeDefinition).getPropertiesDefObjects(); + for(PropertyDef p: pds) { + if(p.getDefault() != null && properties.get(p.getName()) == null) { + Property prop = new Property(p.getName(), (LinkedHashMap)p.getDefault(), p.getSchema(), customDef); + props.add(prop); + } + } + return props; + } + + public void validate() { + _validateProperties(entityTpl,(StatefulEntityType)typeDefinition); + } + + // getters/setters + public NodeTemplate getTarget() { + return target; + } + + public NodeTemplate getSource() { + return source; + } + + public void setSource(NodeTemplate nt) { + source = nt; + } + + public void setTarget(NodeTemplate nt) { + target = nt; + } + + @Override + public String toString() { + return "RelationshipTemplate{" + + "name='" + name + '\'' + + ", target=" + target.getName() + + ", source=" + source.getName() + + ", _properties=" + _properties + + '}'; + } + +} + +/*python + +from toscaparser.entity_template import EntityTemplate +from toscaparser.properties import Property + +SECTIONS = (DERIVED_FROM, PROPERTIES, REQUIREMENTS, + INTERFACES, CAPABILITIES, TYPE) = \ + ('derived_from', 'properties', 'requirements', 'interfaces', + 'capabilities', 'type') + +log = logging.getLogger('tosca') + + +class RelationshipTemplate(EntityTemplate): + '''Relationship template.''' + def __init__(self, relationship_template, name, custom_def=None, + target=None, source=None): + super(RelationshipTemplate, self).__init__(name, + relationship_template, + 'relationship_type', + custom_def) + self.name = name.lower() + self.target = target + self.source = source + + def get_properties_objects(self): + '''Return properties objects for this template.''' + if self._properties is None: + self._properties = self._create_relationship_properties() + return self._properties + + def _create_relationship_properties(self): + props = [] + properties = {} + relationship = self.entity_tpl.get('relationship') + + if not relationship: + for value in self.entity_tpl.values(): + if isinstance(value, dict): + relationship = value.get('relationship') + break + + if relationship: + properties = self.type_definition.get_value(self.PROPERTIES, + relationship) or {} + if not properties: + properties = self.entity_tpl.get(self.PROPERTIES) or {} + + if properties: + for name, value in properties.items(): + props_def = self.type_definition.get_properties_def() + if props_def and name in props_def: + if name in properties.keys(): + value = properties.get(name) + prop = Property(name, value, + props_def[name].schema, self.custom_def) + props.append(prop) + for p in self.type_definition.get_properties_def_objects(): + if p.default is not None and p.name not in properties.keys(): + prop = Property(p.name, p.default, p.schema, self.custom_def) + props.append(prop) + return props + + def validate(self): + self._validate_properties(self.entity_tpl, self.type_definition)*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java new file mode 100644 index 0000000..92a90af --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java @@ -0,0 +1,117 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.openecomp.sdc.toscaparser.api.utils.UrlUtils; + +public class Repository { + + private static final String DESCRIPTION = "description"; + private static final String URL = "url"; + private static final String CREDENTIAL = "credential"; + private static final String SECTIONS[] ={DESCRIPTION, URL, CREDENTIAL}; + + private String name; + private Object reposit; + private String url; + + @SuppressWarnings("unchecked") + public Repository(String repName,Object repValue) { + name = repName; + reposit = repValue; + if(reposit instanceof LinkedHashMap) { + url = (String)((LinkedHashMap)reposit).get("url"); + if(url == null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "MissingRequiredFieldError: Repository \"%s\" is missing required field \"url\"", + name)); + } + } + loadAndValidate(name,reposit); + } + + @SuppressWarnings("unchecked") + private void loadAndValidate(String val,Object repositDef) { + String keyname = val; + if(repositDef instanceof LinkedHashMap) { + for(String key: ((LinkedHashMap)reposit).keySet()) { + boolean bFound = false; + for(String sect: SECTIONS) { + if(key.equals(sect)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: repositories \"%s\" contains unknown field \"%s\"", + keyname,key)); + } + } + + String repositUrl = (String)((LinkedHashMap)repositDef).get("url"); + if(repositUrl != null) { + boolean urlVal = UrlUtils.validateUrl(repositUrl); + if(!urlVal) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "URLException: repsositories \"%s\" Invalid Url",keyname)); + } + } + } + } + + @Override + public String toString() { + return "Repository{" + + "name='" + name + '\'' + + ", reposit=" + reposit + + ", url='" + url + '\'' + + '}'; + } +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.common.exception import URLException +from toscaparser.utils.gettextutils import _ +import org.openecomp.sdc.toscaparser.api.utils.urlutils + +SECTIONS = (DESCRIPTION, URL, CREDENTIAL) = \ + ('description', 'url', 'credential') + + +class Repository(object): + def __init__(self, repositories, values): + self.name = repositories + self.reposit = values + if isinstance(self.reposit, dict): + if 'url' not in self.reposit.keys(): + ExceptionCollector.appendException( + MissingRequiredFieldError(what=_('Repository "%s"') + % self.name, required='url')) + self.url = self.reposit['url'] + self.load_and_validate(self.name, self.reposit) + + def load_and_validate(self, val, reposit_def): + self.keyname = val + if isinstance(reposit_def, dict): + for key in reposit_def.keys(): + if key not in SECTIONS: + ExceptionCollector.appendException( + UnknownFieldError(what=_('repositories "%s"') + % self.keyname, field=key)) + + if URL in reposit_def.keys(): + reposit_url = reposit_def.get(URL) + url_val = toscaparser.utils.urlutils.UrlUtils.\ + validate_url(reposit_url) + if url_val is not True: + ExceptionCollector.appendException( + URLException(what=_('repsositories "%s" Invalid Url') + % self.keyname)) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java b/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java new file mode 100644 index 0000000..b9c2238 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java @@ -0,0 +1,520 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.NodeType; +import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; +import org.openecomp.sdc.toscaparser.api.parameters.Input; +import org.openecomp.sdc.toscaparser.api.parameters.Output; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + + +public class SubstitutionMappings { + // SubstitutionMappings class declaration + + // SubstitutionMappings exports the topology template as an + // implementation of a Node type. + + private static final String NODE_TYPE = "node_type"; + private static final String REQUIREMENTS = "requirements"; + private static final String CAPABILITIES = "capabilities"; + + private static final String SECTIONS[] = {NODE_TYPE, REQUIREMENTS, CAPABILITIES}; + + private static final String OPTIONAL_OUTPUTS[] = {"tosca_id", "tosca_name", "state"}; + + private LinkedHashMap subMappingDef; + private ArrayList nodetemplates; + private ArrayList inputs; + private ArrayList outputs; + private ArrayList groups; + private NodeTemplate subMappedNodeTemplate; + private LinkedHashMap customDefs; + private LinkedHashMap _capabilities; + private LinkedHashMap _requirements; + + public SubstitutionMappings(LinkedHashMap smsubMappingDef, + ArrayList smnodetemplates, + ArrayList sminputs, + ArrayList smoutputs, + ArrayList smgroups, + NodeTemplate smsubMappedNodeTemplate, + LinkedHashMap smcustomDefs) { + + subMappingDef = smsubMappingDef; + nodetemplates = smnodetemplates; + inputs = sminputs != null ? sminputs : new ArrayList(); + outputs = smoutputs != null ? smoutputs : new ArrayList(); + groups = smgroups != null ? smgroups : new ArrayList(); + subMappedNodeTemplate = smsubMappedNodeTemplate; + customDefs = smcustomDefs != null ? smcustomDefs : new LinkedHashMap(); + _validate(); + + _capabilities = null; + _requirements = null; + } + + public String getType() { + if(subMappingDef != null) { + return (String)subMappingDef.get(NODE_TYPE); + } + return null; + } + + public ArrayList getNodeTemplates() { + return nodetemplates; + } + + /* + @classmethod + def get_node_type(cls, sub_mapping_def): + if isinstance(sub_mapping_def, dict): + return sub_mapping_def.get(cls.NODE_TYPE) + */ + + public static String stGetNodeType(LinkedHashMap _subMappingDef) { + if(_subMappingDef instanceof LinkedHashMap) { + return (String)_subMappingDef.get(NODE_TYPE); + } + return null; + } + + public String getNodeType() { + return (String)subMappingDef.get(NODE_TYPE); + } + + public ArrayList getInputs() { + return inputs; + } + + public ArrayList getGroups() { + return groups; + } + + public LinkedHashMap getCapabilities() { + return (LinkedHashMap)subMappingDef.get(CAPABILITIES); + } + + public LinkedHashMap getRequirements() { + return (LinkedHashMap)subMappingDef.get(REQUIREMENTS); + } + + public NodeType getNodeDefinition() { + return new NodeType(getNodeType(), customDefs); + } + + private void _validate() { + // Basic validation + _validateKeys(); + _validateType(); + + // SubstitutionMapping class syntax validation + _validateInputs(); + _validateCapabilities(); + _validateRequirements(); + _validateOutputs(); + } + + private void _validateKeys() { + // validate the keys of substitution mappings + for(String key: subMappingDef.keySet()) { + boolean bFound = false; + for(String s: SECTIONS) { + if(s.equals(key)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: SubstitutionMappings contain unknown field \"%s\"", + key)); + } + } + } + + private void _validateType() { + // validate the node_type of substitution mappings + String nodeType = (String)subMappingDef.get(NODE_TYPE); + if(nodeType == null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "MissingRequiredFieldError: SubstitutionMappings used in topology_template is missing required field \"%s\"", + NODE_TYPE)); + } + Object nodeTypeDef = customDefs.get(nodeType); + if(nodeTypeDef == null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidNodeTypeError: \"%s\" is invalid",nodeType)); + } + } + + private void _validateInputs() { + // validate the inputs of substitution mappings. + + // The inputs defined by the topology template have to match the + // properties of the node type or the substituted node. If there are + // more inputs than the substituted node has properties, default values + //must be defined for those inputs. + + HashSet allInputs = new HashSet<>(); + for(Input inp: inputs) { + allInputs.add(inp.getName()); + } + HashSet requiredProperties = new HashSet<>(); + for(PropertyDef pd: getNodeDefinition().getPropertiesDefObjects()) { + if(pd.isRequired() && pd.getDefault() == null) { + requiredProperties.add(pd.getName()); + } + } + // Must provide inputs for required properties of node type. + for(String property: requiredProperties) { + // Check property which is 'required' and has no 'default' value + if(!allInputs.contains(property)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", + getNodeType(),property)); + } + } + // If the optional properties of node type need to be customized by + // substituted node, it also is necessary to define inputs for them, + // otherwise they are not mandatory to be defined. + HashSet customizedParameters = new HashSet<>(); + if(subMappedNodeTemplate != null) { + customizedParameters.addAll(subMappedNodeTemplate.getProperties().keySet()); + } + HashSet allProperties = new HashSet( + getNodeDefinition().getPropertiesDef().keySet()); + HashSet diffset = customizedParameters; + diffset.removeAll(allInputs); + for(String parameter: diffset) { + if(allProperties.contains(parameter)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", + getNodeType(),parameter)); + } + } + // Additional inputs are not in the properties of node type must + // provide default values. Currently the scenario may not happen + // because of parameters validation in nodetemplate, here is a + // guarantee. + for(Input inp: inputs) { + diffset = allInputs; + diffset.removeAll(allProperties); + if(diffset.contains(inp.getName()) && inp.getDefault() == null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing rquired input \"%s\"", + getNodeType(),inp.getName())); + } + } + } + + private void _validateCapabilities() { + // validate the capabilities of substitution mappings + + // The capabilities must be in node template which be mapped. + LinkedHashMap tplsCapabilities = + (LinkedHashMap)subMappingDef.get(CAPABILITIES); + LinkedHashMap nodeCapabilities = null; + if(subMappedNodeTemplate != null) { + nodeCapabilities = subMappedNodeTemplate.getCapabilities(); + } + if(nodeCapabilities != null) { + for(String cap: nodeCapabilities.keySet()) { + if(tplsCapabilities != null && tplsCapabilities.get(cap) == null) { + ; //pass + // ExceptionCollector.appendException( + // UnknownFieldError(what='SubstitutionMappings', + // field=cap)) + } + } + } + } + + private void _validateRequirements() { + // validate the requirements of substitution mappings + //***************************************************** + //TO-DO - Different from Python code!! one is a bug... + //***************************************************** + // The requirements must be in node template which be mapped. + LinkedHashMap tplsRequirements = + (LinkedHashMap)subMappingDef.get(REQUIREMENTS); + ArrayList nodeRequirements = null; + if(subMappedNodeTemplate != null) { + nodeRequirements = subMappedNodeTemplate.getRequirements(); + } + if(nodeRequirements != null) { + for(Object ro: nodeRequirements) { + ArrayList al = new ArrayList( + ((LinkedHashMap)ro).keySet()); + String cap = al.get(0); + if(tplsRequirements != null && tplsRequirements.get(cap) == null) { + ; //pass + // ExceptionCollector.appendException( + // UnknownFieldError(what='SubstitutionMappings', + // field=cap)) + } + } + } + } + + private void _validateOutputs() { + // validate the outputs of substitution mappings. + + // The outputs defined by the topology template have to match the + // attributes of the node type or the substituted node template, + // and the observable attributes of the substituted node template + // have to be defined as attributes of the node type or outputs in + // the topology template. + + // The outputs defined by the topology template have to match the + // attributes of the node type according to the specification, but + // it's reasonable that there are more inputs than the node type + // has properties, the specification will be amended? + + for(Output output: outputs) { + Object ado = getNodeDefinition().getAttributesDef(); + if(ado != null && ((LinkedHashMap)ado).get(output.getName()) == null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownOutputError: Unknown output \"%s\" in SubstitutionMappings with node_type \"%s\"", + output.getName(),getNodeType())); + } + } + } + + @Override + public String toString() { + return "SubstitutionMappings{" + +// "subMappingDef=" + subMappingDef + +// ", nodetemplates=" + nodetemplates + +// ", inputs=" + inputs + +// ", outputs=" + outputs + +// ", groups=" + groups + + ", subMappedNodeTemplate=" + (subMappedNodeTemplate==null?"":subMappedNodeTemplate.getName()) + +// ", customDefs=" + customDefs + +// ", _capabilities=" + _capabilities + +// ", _requirements=" + _requirements + + '}'; + } + + @Deprecated + public String toLimitedString() { + return "SubstitutionMappings{" + + "subMappingDef=" + subMappingDef + + ", nodetemplates=" + nodetemplates + + ", inputs=" + inputs + + ", outputs=" + outputs + + ", groups=" + groups + + ", subMappedNodeTemplate=" + (subMappedNodeTemplate==null?"":subMappedNodeTemplate.getName()) + + ", customDefs=" + customDefs + + ", _capabilities=" + _capabilities + + ", _requirements=" + _requirements + + '}'; + } +} + + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import InvalidNodeTypeError +from toscaparser.common.exception import MissingDefaultValueError +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import MissingRequiredInputError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.common.exception import UnknownOutputError +from toscaparser.elements.nodetype import NodeType +from toscaparser.utils.gettextutils import _ + +log = logging.getLogger('tosca') + + +class SubstitutionMappings(object): + '''SubstitutionMappings class declaration + + SubstitutionMappings exports the topology template as an + implementation of a Node type. + ''' + + SECTIONS = (NODE_TYPE, REQUIREMENTS, CAPABILITIES) = \ + ('node_type', 'requirements', 'capabilities') + + OPTIONAL_OUTPUTS = ['tosca_id', 'tosca_name', 'state'] + + def __init__(self, sub_mapping_def, nodetemplates, inputs, outputs, + sub_mapped_node_template, custom_defs): + self.nodetemplates = nodetemplates + self.sub_mapping_def = sub_mapping_def + self.inputs = inputs or [] + self.outputs = outputs or [] + self.sub_mapped_node_template = sub_mapped_node_template + self.custom_defs = custom_defs or {} + self._validate() + + self._capabilities = None + self._requirements = None + + @property + def type(self): + if self.sub_mapping_def: + return self.sub_mapping_def.get(self.NODE_TYPE) + + @classmethod + def get_node_type(cls, sub_mapping_def): + if isinstance(sub_mapping_def, dict): + return sub_mapping_def.get(cls.NODE_TYPE) + + @property + def node_type(self): + return self.sub_mapping_def.get(self.NODE_TYPE) + + @property + def capabilities(self): + return self.sub_mapping_def.get(self.CAPABILITIES) + + @property + def requirements(self): + return self.sub_mapping_def.get(self.REQUIREMENTS) + + @property + def node_definition(self): + return NodeType(self.node_type, self.custom_defs) + + def _validate(self): + # Basic validation + self._validate_keys() + self._validate_type() + + # SubstitutionMapping class syntax validation + self._validate_inputs() + self._validate_capabilities() + self._validate_requirements() + self._validate_outputs() + + def _validate_keys(self): + """validate the keys of substitution mappings.""" + for key in self.sub_mapping_def.keys(): + if key not in self.SECTIONS: + ExceptionCollector.appendException( + UnknownFieldError(what=_('SubstitutionMappings'), + field=key)) + + def _validate_type(self): + """validate the node_type of substitution mappings.""" + node_type = self.sub_mapping_def.get(self.NODE_TYPE) + if not node_type: + ExceptionCollector.appendException( + MissingRequiredFieldError( + what=_('SubstitutionMappings used in topology_template'), + required=self.NODE_TYPE)) + + node_type_def = self.custom_defs.get(node_type) + if not node_type_def: + ExceptionCollector.appendException( + InvalidNodeTypeError(what=node_type)) + + def _validate_inputs(self): + """validate the inputs of substitution mappings. + + The inputs defined by the topology template have to match the + properties of the node type or the substituted node. If there are + more inputs than the substituted node has properties, default values + must be defined for those inputs. + """ + + all_inputs = set([input.name for input in self.inputs]) + required_properties = set([p.name for p in + self.node_definition. + get_properties_def_objects() + if p.required and p.default is None]) + # Must provide inputs for required properties of node type. + for property in required_properties: + # Check property which is 'required' and has no 'default' value + if property not in all_inputs: + ExceptionCollector.appendException( + MissingRequiredInputError( + what=_('SubstitutionMappings with node_type ') + + self.node_type, + input_name=property)) + + # If the optional properties of node type need to be customized by + # substituted node, it also is necessary to define inputs for them, + # otherwise they are not mandatory to be defined. + customized_parameters = set(self.sub_mapped_node_template + .get_properties().keys() + if self.sub_mapped_node_template else []) + all_properties = set(self.node_definition.get_properties_def()) + for parameter in customized_parameters - all_inputs: + if parameter in all_properties: + ExceptionCollector.appendException( + MissingRequiredInputError( + what=_('SubstitutionMappings with node_type ') + + self.node_type, + input_name=parameter)) + + # Additional inputs are not in the properties of node type must + # provide default values. Currently the scenario may not happen + # because of parameters validation in nodetemplate, here is a + # guarantee. + for input in self.inputs: + if input.name in all_inputs - all_properties \ + and input.default is None: + ExceptionCollector.appendException( + MissingDefaultValueError( + what=_('SubstitutionMappings with node_type ') + + self.node_type, + input_name=input.name)) + + def _validate_capabilities(self): + """validate the capabilities of substitution mappings.""" + + # The capabilites must be in node template wchich be mapped. + tpls_capabilities = self.sub_mapping_def.get(self.CAPABILITIES) + node_capabiliteys = self.sub_mapped_node_template.get_capabilities() \ + if self.sub_mapped_node_template else None + for cap in node_capabiliteys.keys() if node_capabiliteys else []: + if (tpls_capabilities and + cap not in list(tpls_capabilities.keys())): + pass + # ExceptionCollector.appendException( + # UnknownFieldError(what='SubstitutionMappings', + # field=cap)) + + def _validate_requirements(self): + """validate the requirements of substitution mappings.""" + + # The requirements must be in node template wchich be mapped. + tpls_requirements = self.sub_mapping_def.get(self.REQUIREMENTS) + node_requirements = self.sub_mapped_node_template.requirements \ + if self.sub_mapped_node_template else None + for req in node_requirements if node_requirements else []: + if (tpls_requirements and + req not in list(tpls_requirements.keys())): + pass + # ExceptionCollector.appendException( + # UnknownFieldError(what='SubstitutionMappings', + # field=req)) + + def _validate_outputs(self): + """validate the outputs of substitution mappings. + + The outputs defined by the topology template have to match the + attributes of the node type or the substituted node template, + and the observable attributes of the substituted node template + have to be defined as attributes of the node type or outputs in + the topology template. + """ + + # The outputs defined by the topology template have to match the + # attributes of the node type according to the specification, but + # it's reasonable that there are more inputs than the node type + # has properties, the specification will be amended? + for output in self.outputs: + if output.name not in self.node_definition.get_attributes_def(): + ExceptionCollector.appendException( + UnknownOutputError( + where=_('SubstitutionMappings with node_type ') + + self.node_type, + output_name=output.name))*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java new file mode 100644 index 0000000..25f118b --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java @@ -0,0 +1,857 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.InterfacesDef; +import org.openecomp.sdc.toscaparser.api.elements.NodeType; +import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; +import org.openecomp.sdc.toscaparser.api.functions.Function; +import org.openecomp.sdc.toscaparser.api.functions.GetAttribute; +import org.openecomp.sdc.toscaparser.api.functions.GetInput; +import org.openecomp.sdc.toscaparser.api.parameters.Input; +import org.openecomp.sdc.toscaparser.api.parameters.Output; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class TopologyTemplate { + + private static final String DESCRIPTION = "description"; + private static final String INPUTS = "inputs"; + private static final String NODE_TEMPLATES = "node_templates"; + private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; + private static final String OUTPUTS = "outputs"; + private static final String GROUPS = "groups"; + private static final String SUBSTITUTION_MAPPINGS = "substitution_mappings"; + private static final String POLICIES = "policies"; + private static final String METADATA = "metadata"; + + private static String SECTIONS[] = { + DESCRIPTION, INPUTS, NODE_TEMPLATES, RELATIONSHIP_TEMPLATES, + OUTPUTS, GROUPS, SUBSTITUTION_MAPPINGS, POLICIES, METADATA + }; + + private LinkedHashMap tpl; + LinkedHashMap metaData; + private ArrayList inputs; + private ArrayList outputs; + private ArrayList relationshipTemplates; + private ArrayList nodeTemplates; + private LinkedHashMap customDefs; + private LinkedHashMap relTypes;//TYPE + private NodeTemplate subMappedNodeTemplate; + private ArrayList groups; + private ArrayList policies; + private LinkedHashMap parsedParams = null;//TYPE + private String description; + private ToscaGraph graph; + private SubstitutionMappings substitutionMappings; + + public TopologyTemplate( + LinkedHashMap _template, + LinkedHashMap _customDefs, + LinkedHashMap _relTypes,//TYPE + LinkedHashMap _parsedParams, + NodeTemplate _subMappedNodeTemplate) { + + tpl = _template; + if(tpl != null) { + subMappedNodeTemplate = _subMappedNodeTemplate; + metaData = _metaData(); + customDefs = _customDefs; + relTypes = _relTypes; + parsedParams = _parsedParams; + _validateField(); + description = _tplDescription(); + inputs = _inputs(); + relationshipTemplates =_relationshipTemplates(); + nodeTemplates = _nodeTemplates(); + outputs = _outputs(); + if(nodeTemplates != null) { + graph = new ToscaGraph(nodeTemplates); + } + groups = _groups(); + policies = _policies(); + _processIntrinsicFunctions(); + substitutionMappings = _substitutionMappings(); + } + } + + @SuppressWarnings("unchecked") + private ArrayList _inputs() { + //DumpUtils.dumpYaml(customDefs,0); + ArrayList alInputs = new ArrayList<>(); + for(String name: _tplInputs().keySet()) { + Object attrs = _tplInputs().get(name); + Input input = new Input(name,(LinkedHashMap)attrs,customDefs); + if(parsedParams != null && parsedParams.get(name) != null) { + input.validate(parsedParams.get(name)); + } + else { + Object _default = input.getDefault(); + if(_default != null) { + input.validate(_default); + } + } + if((parsedParams != null && parsedParams.get(input.getName()) == null || parsedParams == null) + && input.isRequired() && input.getDefault() == null) { + System.out.format("Log warning: The required parameter \"%s\" is not provided\n",input.getName()); + } + alInputs.add(input); + } + return alInputs; + + } + + private LinkedHashMap _metaData() { + if(tpl.get(METADATA) != null) { + return (LinkedHashMap)tpl.get(METADATA); + } + else { + return new LinkedHashMap(); + } + + } + + private ArrayList _nodeTemplates() { + ArrayList alNodeTemplates = new ArrayList<>(); + LinkedHashMap tpls = _tplNodeTemplates(); + if(tpls != null) { + for(String name: tpls.keySet()) { + NodeTemplate tpl = new NodeTemplate(name, + tpls, + customDefs, + relationshipTemplates, + relTypes); + if(tpl.getTypeDefinition() != null) { + boolean b = NodeType.TOSCA_DEF.get(tpl.getType()) != null; + if(b || (tpl.getCustomDef() != null && !tpl.getCustomDef().isEmpty())) { + tpl.validate(); + alNodeTemplates.add(tpl); + } + } + } + } + return alNodeTemplates; + } + + @SuppressWarnings("unchecked") + private ArrayList _relationshipTemplates() { + ArrayList alRelationshipTemplates = new ArrayList<>(); + LinkedHashMap tpls = _tplRelationshipTemplates(); + if(tpls != null) { + for(String name: tpls.keySet()) { + RelationshipTemplate tpl = new RelationshipTemplate( + (LinkedHashMap)tpls.get(name),name,customDefs,null,null); + + alRelationshipTemplates.add(tpl); + } + } + return alRelationshipTemplates; + } + + private ArrayList _outputs() { + ArrayList alOutputs = new ArrayList<>(); + for(Map.Entry me: _tplOutputs().entrySet()) { + String oname = me.getKey(); + LinkedHashMap oattrs = (LinkedHashMap)me.getValue(); + Output o = new Output(oname,oattrs); + o.validate(); + alOutputs.add(o); + } + return alOutputs; + } + + private SubstitutionMappings _substitutionMappings() { + LinkedHashMap tplSubstitutionMapping = (LinkedHashMap) _tplSubstitutionMappings(); + + //*** the commenting-out below and the weaker condition are in the Python source + // #if tpl_substitution_mapping and self.sub_mapped_node_template: + if(tplSubstitutionMapping != null && tplSubstitutionMapping.size() > 0) { + return new SubstitutionMappings(tplSubstitutionMapping, + nodeTemplates, + inputs, + outputs, + groups, + subMappedNodeTemplate, + customDefs); + } + return null; + + } + + @SuppressWarnings("unchecked") + private ArrayList _policies() { + ArrayList alPolicies = new ArrayList<>(); + for(Object po: _tplPolicies()) { + LinkedHashMap policy = (LinkedHashMap)po; + for(Map.Entry me: policy.entrySet()) { + String policyName = me.getKey(); + LinkedHashMap policyTpl = (LinkedHashMap)me.getValue(); + ArrayList targetList = (ArrayList)policyTpl.get("targets"); + //ArrayList targetObjects = new ArrayList<>(); + ArrayList targetNodes = new ArrayList<>(); + ArrayList targetObjects = new ArrayList<>(); + ArrayList targetGroups = new ArrayList<>(); + String targetsType = "groups"; + if(targetList != null && targetList.size() >= 1) { + targetGroups = _getPolicyGroups(targetList); + if(targetGroups == null) { + targetsType = "node_templates"; + targetNodes = _getGroupMembers(targetList); + for(NodeTemplate nt: targetNodes) { + targetObjects.add(nt); + } + } + else { + for(Group gr: targetGroups) { + targetObjects.add(gr); + } + } + } + Policy policyObj = new Policy(policyName, + policyTpl, + targetObjects, + targetsType, + customDefs); + alPolicies.add(policyObj); + } + } + return alPolicies; + } + + private ArrayList _groups() { + ArrayList groups = new ArrayList<>(); + ArrayList memberNodes = null; + for(Map.Entry me: _tplGroups().entrySet()) { + String groupName = me.getKey(); + LinkedHashMap groupTpl = (LinkedHashMap)me.getValue(); + ArrayList memberNames = (ArrayList)groupTpl.get("members"); + if(memberNames != null) { + DataEntity.validateDatatype("list", memberNames,null,null,null); + if(memberNames.size() < 1 || + (new HashSet(memberNames)).size() != memberNames.size()) { + ThreadLocalsHolder.getCollector().appendWarning(String.format( + "InvalidGroupTargetException: Member nodes \"%s\" should be >= 1 and not repeated", + memberNames.toString())); + } + else { + memberNodes = _getGroupMembers(memberNames); + } + } + Group group = new Group(groupName, + groupTpl, + memberNodes, + customDefs); + groups.add(group); + } + return groups; + } + + private ArrayList _getGroupMembers(ArrayList memberNames) { + ArrayList memberNodes = new ArrayList<>(); + _validateGroupMembers(memberNames); + for(String member: memberNames) { + for(NodeTemplate node: nodeTemplates) { + if(member.equals(node.getName())) { + memberNodes.add(node); + } + } + } + return memberNodes; + } + + private ArrayList _getPolicyGroups(ArrayList memberNames) { + ArrayList memberGroups = new ArrayList<>(); + for(String member: memberNames) { + for(Group group: groups) { + if(member.equals(group.getName())) { + memberGroups.add(group); + } + } + } + return memberGroups; + } + + private void _validateGroupMembers(ArrayList members) { + ArrayList nodeNames = new ArrayList<>(); + for(NodeTemplate node: nodeTemplates) { + nodeNames.add(node.getName()); + } + for(String member: members) { + if(!nodeNames.contains(member)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"",member)); + } + } + } + + // topology template can act like node template + // it is exposed by substitution_mappings. + + public String nodetype() { + return substitutionMappings.getNodeType(); + } + + public LinkedHashMap capabilities() { + return substitutionMappings.getCapabilities(); + } + + public LinkedHashMap requirements() { + return substitutionMappings.getRequirements(); + } + + private String _tplDescription() { + return (String)tpl.get(DESCRIPTION); + //if description: + // return description.rstrip() + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplInputs() { + if(tpl.get(INPUTS) != null) { + return (LinkedHashMap)tpl.get(INPUTS); + } + else { + return new LinkedHashMap(); + } + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplNodeTemplates() { + return (LinkedHashMap)tpl.get(NODE_TEMPLATES); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplRelationshipTemplates() { + if(tpl.get(RELATIONSHIP_TEMPLATES) != null) { + return (LinkedHashMap)tpl.get(RELATIONSHIP_TEMPLATES); + } + else { + return new LinkedHashMap(); + } + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplOutputs() { + if(tpl.get(OUTPUTS) != null) { + return (LinkedHashMap)tpl.get(OUTPUTS); + } + else { + return new LinkedHashMap(); + } + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplSubstitutionMappings() { + if(tpl.get(SUBSTITUTION_MAPPINGS) != null) { + return (LinkedHashMap)tpl.get(SUBSTITUTION_MAPPINGS); + } + else { + return new LinkedHashMap(); + } + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplGroups() { + if(tpl.get(GROUPS) != null) { + return (LinkedHashMap)tpl.get(GROUPS); + } + else { + return new LinkedHashMap(); + } + } + + @SuppressWarnings("unchecked") + private ArrayList _tplPolicies() { + if(tpl.get(POLICIES) != null) { + return (ArrayList)tpl.get(POLICIES); + } + else { + return new ArrayList(); + } + } + + private void _validateField() { + for(String name: tpl.keySet()) { + boolean bFound = false; + for(String section: SECTIONS) { + if(name.equals(section)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"",name)); + } + } + } + + @SuppressWarnings("unchecked") + private void _processIntrinsicFunctions() { + // Process intrinsic functions + + // Current implementation processes functions within node template + // properties, requirements, interfaces inputs and template outputs. + + if(nodeTemplates != null) { + for(NodeTemplate nt: nodeTemplates) { + for(Property prop: nt.getPropertiesObjects()) { + prop.setValue(Function.getFunction(this,nt,prop.getValue())); + } + for(InterfacesDef ifd: nt.getInterfaces()) { + LinkedHashMap ifin = ifd.getInputs(); + if(ifin != null) { + for(Map.Entry me: ifin.entrySet()) { + String name = me.getKey(); + Object value = Function.getFunction(this,nt,me.getValue()); + ifd.setInput(name,value); + } + } + } + if(nt.getRequirements() != null && + nt.getRequirements() instanceof ArrayList) { + for(Object oreq: nt.getRequirements()) { + LinkedHashMap req = (LinkedHashMap)oreq; + LinkedHashMap rel = req; + for(String reqName: req.keySet()) { + Object reqItem = req.get(reqName); + if(reqItem instanceof LinkedHashMap) { + Object t = ((LinkedHashMap)reqItem).get("relationship"); + // it can be a string or a LHM... + if(t instanceof LinkedHashMap) { + rel = (LinkedHashMap)t; + } + else { + // we set it to null to fail the next test + // and avoid the get("proprties") + rel = null; + } + break; + } + } + if(rel != null && rel.get("properties") != null) { + LinkedHashMap relprops = + (LinkedHashMap)rel.get("properties"); + for(String key: relprops.keySet()) { + Object value = relprops.get(key); + Object func = Function.getFunction(this,req,value); + relprops.put(key,func); + } + } + } + } + if(nt.getCapabilitiesObjects() != null) { + for(Capability cap: nt.getCapabilitiesObjects()) { + if(cap.getPropertiesObjects() != null) { + for(Property prop: cap.getPropertiesObjects()) { + Object propvalue = Function.getFunction(this,nt,prop.getValue()); + if(propvalue instanceof GetInput) { + propvalue = ((GetInput)propvalue).result(); + for(String p: cap.getProperties().keySet()) { + //Object v = cap.getProperties().get(p); + if(p.equals(prop.getName())) { + cap.setProperty(p,propvalue); + } + } + } + } + } + } + } + for(RelationshipType rel: nt.getRelationships().keySet()) { + NodeTemplate node = nt.getRelationships().get(rel); + ArrayList relTpls = node.getRelationshipTemplate(); + if(relTpls != null) { + for(RelationshipTemplate relTpl: relTpls) { + // TT 5 + for(InterfacesDef iface: relTpl.getInterfaces()) { + if(iface.getInputs() != null) { + for(String name: iface.getInputs().keySet()) { + Object value = iface.getInputs().get(name); + Object func = Function.getFunction( + this, + relTpl, + value); + iface.setInput(name,func); + } + } + } + } + } + } + } + } + for(Output output: outputs) { + Object func = Function.getFunction(this,outputs,output.getValue()); + if(func instanceof GetAttribute) { + output.setAttr(Output.VALUE,func); + } + } + } + + public static String getSubMappingNodeType(LinkedHashMap topologyTpl) { + if(topologyTpl != null && topologyTpl instanceof LinkedHashMap) { + Object submapTpl = topologyTpl.get(SUBSTITUTION_MAPPINGS); + return SubstitutionMappings.stGetNodeType((LinkedHashMap)submapTpl); + } + return null; + } + + // getters + + public LinkedHashMap getTpl() { + return tpl; + } + + public LinkedHashMap getMetadata() { + return metaData; + } + + public ArrayList getInputs() { + return inputs; + } + + public ArrayList getOutputs() { + return outputs; + } + + public ArrayList getPolicies() { + return policies; + } + + public ArrayList getRelationshipTemplates() { + return relationshipTemplates; + } + + public ArrayList getNodeTemplates() { + return nodeTemplates; + } + + public ArrayList getGroups() { + return groups; + } + + public SubstitutionMappings getSubstitutionMappings() { + return substitutionMappings; + } + + public LinkedHashMap getParsedParams() { + return parsedParams; + } +} + +/*python + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +import logging + +from toscaparser.common import exception +from toscaparser.dataentity import DataEntity +from toscaparser import functions +from toscaparser.groups import Group +from toscaparser.nodetemplate import NodeTemplate +from toscaparser.parameters import Input +from toscaparser.parameters import Output +from toscaparser.policy import Policy +from toscaparser.relationship_template import RelationshipTemplate +from toscaparser.substitution_mappings import SubstitutionMappings +from toscaparser.tpl_relationship_graph import ToscaGraph +from toscaparser.utils.gettextutils import _ + + +# Topology template key names +SECTIONS = (DESCRIPTION, INPUTS, NODE_TEMPLATES, + RELATIONSHIP_TEMPLATES, OUTPUTS, GROUPS, + SUBSTITUION_MAPPINGS, POLICIES) = \ + ('description', 'inputs', 'node_templates', + 'relationship_templates', 'outputs', 'groups', + 'substitution_mappings', 'policies') + +log = logging.getLogger("tosca.model") + + +class TopologyTemplate(object): + + '''Load the template data.''' + def __init__(self, template, custom_defs, + rel_types=None, parsed_params=None, + sub_mapped_node_template=None): + self.tpl = template + self.sub_mapped_node_template = sub_mapped_node_template + if self.tpl: + self.custom_defs = custom_defs + self.rel_types = rel_types + self.parsed_params = parsed_params + self._validate_field() + self.description = self._tpl_description() + self.inputs = self._inputs() + self.relationship_templates = self._relationship_templates() + self.nodetemplates = self._nodetemplates() + self.outputs = self._outputs() + if hasattr(self, 'nodetemplates'): + self.graph = ToscaGraph(self.nodetemplates) + self.groups = self._groups() + self.policies = self._policies() + self._process_intrinsic_functions() + self.substitution_mappings = self._substitution_mappings() + + def _inputs(self): + inputs = [] + for name, attrs in self._tpl_inputs().items(): + input = Input(name, attrs) + if self.parsed_params and name in self.parsed_params: + input.validate(self.parsed_params[name]) + else: + default = input.default + if default: + input.validate(default) + if (self.parsed_params and input.name not in self.parsed_params + or self.parsed_params is None) and input.required \ + and input.default is None: + log.warning(_('The required parameter %s ' + 'is not provided') % input.name) + + inputs.append(input) + return inputs + + def _nodetemplates(self): + nodetemplates = [] + tpls = self._tpl_nodetemplates() + if tpls: + for name in tpls: + tpl = NodeTemplate(name, tpls, self.custom_defs, + self.relationship_templates, + self.rel_types) + if (tpl.type_definition and + (tpl.type in tpl.type_definition.TOSCA_DEF or + (tpl.type not in tpl.type_definition.TOSCA_DEF and + bool(tpl.custom_def)))): + tpl.validate(self) + nodetemplates.append(tpl) + return nodetemplates + + def _relationship_templates(self): + rel_templates = [] + tpls = self._tpl_relationship_templates() + for name in tpls: + tpl = RelationshipTemplate(tpls[name], name, self.custom_defs) + rel_templates.append(tpl) + return rel_templates + + def _outputs(self): + outputs = [] + for name, attrs in self._tpl_outputs().items(): + output = Output(name, attrs) + output.validate() + outputs.append(output) + return outputs + + def _substitution_mappings(self): + tpl_substitution_mapping = self._tpl_substitution_mappings() + # if tpl_substitution_mapping and self.sub_mapped_node_template: + if tpl_substitution_mapping: + return SubstitutionMappings(tpl_substitution_mapping, + self.nodetemplates, + self.inputs, + self.outputs, + self.sub_mapped_node_template, + self.custom_defs) + + def _policies(self): + policies = [] + for policy in self._tpl_policies(): + for policy_name, policy_tpl in policy.items(): + target_list = policy_tpl.get('targets') + if target_list and len(target_list) >= 1: + target_objects = [] + targets_type = "groups" + target_objects = self._get_policy_groups(target_list) + if not target_objects: + targets_type = "node_templates" + target_objects = self._get_group_members(target_list) + policyObj = Policy(policy_name, policy_tpl, + target_objects, targets_type, + self.custom_defs) + policies.append(policyObj) + return policies + + def _groups(self): + groups = [] + member_nodes = None + for group_name, group_tpl in self._tpl_groups().items(): + member_names = group_tpl.get('members') + if member_names is not None: + DataEntity.validate_datatype('list', member_names) + if len(member_names) < 1 or \ + len(member_names) != len(set(member_names)): + exception.ExceptionCollector.appendException( + exception.InvalidGroupTargetException( + message=_('Member nodes "%s" should be >= 1 ' + 'and not repeated') % member_names)) + else: + member_nodes = self._get_group_members(member_names) + group = Group(group_name, group_tpl, + member_nodes, + self.custom_defs) + groups.append(group) + return groups + + def _get_group_members(self, member_names): + member_nodes = [] + self._validate_group_members(member_names) + for member in member_names: + for node in self.nodetemplates: + if node.name == member: + member_nodes.append(node) + return member_nodes + + def _get_policy_groups(self, member_names): + member_groups = [] + for member in member_names: + for group in self.groups: + if group.name == member: + member_groups.append(group) + return member_groups + + def _validate_group_members(self, members): + node_names = [] + for node in self.nodetemplates: + node_names.append(node.name) + for member in members: + if member not in node_names: + exception.ExceptionCollector.appendException( + exception.InvalidGroupTargetException( + message=_('Target member "%s" is not found in ' + 'node_templates') % member)) + + # topology template can act like node template + # it is exposed by substitution_mappings. + def nodetype(self): + return self.substitution_mappings.node_type \ + if self.substitution_mappings else None + + def capabilities(self): + return self.substitution_mappings.capabilities \ + if self.substitution_mappings else None + + def requirements(self): + return self.substitution_mappings.requirements \ + if self.substitution_mappings else None + + def _tpl_description(self): + description = self.tpl.get(DESCRIPTION) + if description: + return description.rstrip() + + def _tpl_inputs(self): + return self.tpl.get(INPUTS) or {} + + def _tpl_nodetemplates(self): + return self.tpl.get(NODE_TEMPLATES) + + def _tpl_relationship_templates(self): + return self.tpl.get(RELATIONSHIP_TEMPLATES) or {} + + def _tpl_outputs(self): + return self.tpl.get(OUTPUTS) or {} + + def _tpl_substitution_mappings(self): + return self.tpl.get(SUBSTITUION_MAPPINGS) or {} + + def _tpl_groups(self): + return self.tpl.get(GROUPS) or {} + + def _tpl_policies(self): + return self.tpl.get(POLICIES) or {} + + def _validate_field(self): + for name in self.tpl: + if name not in SECTIONS: + exception.ExceptionCollector.appendException( + exception.UnknownFieldError(what='Template', field=name)) + + def _process_intrinsic_functions(self): + """Process intrinsic functions + + Current implementation processes functions within node template + properties, requirements, interfaces inputs and template outputs. + """ + if hasattr(self, 'nodetemplates'): + for node_template in self.nodetemplates: + for prop in node_template.get_properties_objects(): + prop.value = functions.get_function(self, + node_template, + prop.value) + for interface in node_template.interfaces: + if interface.inputs: + for name, value in interface.inputs.items(): + interface.inputs[name] = functions.get_function( + self, + node_template, + value) + if node_template.requirements and \ + isinstance(node_template.requirements, list): + for req in node_template.requirements: + rel = req + for req_name, req_item in req.items(): + if isinstance(req_item, dict): + rel = req_item.get('relationship') + break + if rel and 'properties' in rel: + for key, value in rel['properties'].items(): + rel['properties'][key] = \ + functions.get_function(self, + req, + value) + if node_template.get_capabilities_objects(): + for cap in node_template.get_capabilities_objects(): + if cap.get_properties_objects(): + for prop in cap.get_properties_objects(): + propvalue = functions.get_function( + self, + node_template, + prop.value) + if isinstance(propvalue, functions.GetInput): + propvalue = propvalue.result() + for p, v in cap._properties.items(): + if p == prop.name: + cap._properties[p] = propvalue + for rel, node in node_template.relationships.items(): + rel_tpls = node.relationship_tpl + if rel_tpls: + for rel_tpl in rel_tpls: + for interface in rel_tpl.interfaces: + if interface.inputs: + for name, value in \ + interface.inputs.items(): + interface.inputs[name] = \ + functions.get_function(self, + rel_tpl, + value) + for output in self.outputs: + func = functions.get_function(self, self.outputs, output.value) + if isinstance(func, functions.GetAttribute): + output.attrs[output.VALUE] = func + + @classmethod + def get_sub_mapping_node_type(cls, topology_tpl): + if topology_tpl and isinstance(topology_tpl, dict): + submap_tpl = topology_tpl.get(SUBSTITUION_MAPPINGS) + return SubstitutionMappings.get_node_type(submap_tpl) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java.orig b/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java.orig new file mode 100644 index 0000000..3af4b34 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java.orig @@ -0,0 +1,857 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.InterfacesDef; +import org.openecomp.sdc.toscaparser.api.elements.NodeType; +import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; +import org.openecomp.sdc.toscaparser.api.functions.Function; +import org.openecomp.sdc.toscaparser.api.functions.GetAttribute; +import org.openecomp.sdc.toscaparser.api.functions.GetInput; +import org.openecomp.sdc.toscaparser.api.parameters.Input; +import org.openecomp.sdc.toscaparser.api.parameters.Output; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class TopologyTemplate { + + private static final String DESCRIPTION = "description"; + private static final String INPUTS = "inputs"; + private static final String NODE_TEMPLATES = "node_templates"; + private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; + private static final String OUTPUTS = "outputs"; + private static final String GROUPS = "groups"; + private static final String SUBSTITUTION_MAPPINGS = "substitution_mappings"; + private static final String POLICIES = "policies"; + private static final String METADATA = "metadata"; + + private static String SECTIONS[] = { + DESCRIPTION, INPUTS, NODE_TEMPLATES, RELATIONSHIP_TEMPLATES, + OUTPUTS, GROUPS, SUBSTITUTION_MAPPINGS, POLICIES, METADATA + }; + + private LinkedHashMap tpl; + LinkedHashMap metaData; + private ArrayList inputs; + private ArrayList outputs; + private ArrayList relationshipTemplates; + private ArrayList nodeTemplates; + private LinkedHashMap customDefs; + private LinkedHashMap relTypes;//TYPE + private NodeTemplate subMappedNodeTemplate; + private ArrayList groups; + private ArrayList policies; + private LinkedHashMap parsedParams = null;//TYPE + private String description; + private ToscaGraph graph; + private SubstitutionMappings substitutionMappings; + + public TopologyTemplate( + LinkedHashMap _template, + LinkedHashMap _customDefs, + LinkedHashMap _relTypes,//TYPE + LinkedHashMap _parsedParams, + NodeTemplate _subMappedNodeTemplate) { + + tpl = _template; + if(tpl != null) { + subMappedNodeTemplate = _subMappedNodeTemplate; + metaData = _metaData(); + customDefs = _customDefs; + relTypes = _relTypes; + parsedParams = _parsedParams; + _validateField(); + description = _tplDescription(); + inputs = _inputs(); + relationshipTemplates =_relationshipTemplates(); + nodeTemplates = _nodeTemplates(); + outputs = _outputs(); + if(nodeTemplates != null) { + graph = new ToscaGraph(nodeTemplates); + } + groups = _groups(); + policies = _policies(); + _processIntrinsicFunctions(); + substitutionMappings = _substitutionMappings(); + } + } + + @SuppressWarnings("unchecked") + private ArrayList _inputs() { + //DumpUtils.dumpYaml(customDefs,0); + ArrayList alInputs = new ArrayList<>(); + for(String name: _tplInputs().keySet()) { + Object attrs = _tplInputs().get(name); + Input input = new Input(name,(LinkedHashMap)attrs,customDefs); + if(parsedParams != null && parsedParams.get(name) != null) { + input.validate(parsedParams.get(name)); + } + else { + Object _default = input.getDefault(); + if(_default != null) { + input.validate(_default); + } + } + if((parsedParams != null && parsedParams.get(input.getName()) == null || parsedParams == null) + && input.isRequired() && input.getDefault() == null) { + System.out.format("Log warning: The required parameter \"%s\" is not provided\n",input.getName()); + } + alInputs.add(input); + } + return alInputs; + + } + + private LinkedHashMap _metaData() { + if(tpl.get(METADATA) != null) { + return (LinkedHashMap)tpl.get(METADATA); + } + else { + return new LinkedHashMap(); + } + + } + + private ArrayList _nodeTemplates() { + ArrayList alNodeTemplates = new ArrayList<>(); + LinkedHashMap tpls = _tplNodeTemplates(); + if(tpls != null) { + for(String name: tpls.keySet()) { + NodeTemplate tpl = new NodeTemplate(name, + tpls, + customDefs, + relationshipTemplates, + relTypes); + if(tpl.getTypeDefinition() != null) { + boolean b = NodeType.TOSCA_DEF.get(tpl.getType()) != null; + if(b || (tpl.getCustomDef() != null && !tpl.getCustomDef().isEmpty())) { + tpl.validate(); + alNodeTemplates.add(tpl); + } + } + } + } + return alNodeTemplates; + } + + @SuppressWarnings("unchecked") + private ArrayList _relationshipTemplates() { + ArrayList alRelationshipTemplates = new ArrayList<>(); + LinkedHashMap tpls = _tplRelationshipTemplates(); + if(tpls != null) { + for(String name: tpls.keySet()) { + RelationshipTemplate tpl = new RelationshipTemplate( + (LinkedHashMap)tpls.get(name),name,customDefs,null,null); + + alRelationshipTemplates.add(tpl); + } + } + return alRelationshipTemplates; + } + + private ArrayList _outputs() { + ArrayList alOutputs = new ArrayList<>(); + for(Map.Entry me: _tplOutputs().entrySet()) { + String oname = me.getKey(); + LinkedHashMap oattrs = (LinkedHashMap)me.getValue(); + Output o = new Output(oname,oattrs); + o.validate(); + alOutputs.add(o); + } + return alOutputs; + } + + private SubstitutionMappings _substitutionMappings() { + LinkedHashMap tplSubstitutionMapping = (LinkedHashMap) _tplSubstitutionMappings(); + + //*** the commenting-out below and the weaker condition are in the Python source + // #if tpl_substitution_mapping and self.sub_mapped_node_template: + if(tplSubstitutionMapping != null && tplSubstitutionMapping.size() > 0) { + return new SubstitutionMappings(tplSubstitutionMapping, + nodeTemplates, + inputs, + outputs, + groups, + subMappedNodeTemplate, + customDefs); + } + return null; + + } + + @SuppressWarnings("unchecked") + private ArrayList _policies() { + ArrayList alPolicies = new ArrayList<>(); + for(Object po: _tplPolicies()) { + LinkedHashMap policy = (LinkedHashMap)po; + for(Map.Entry me: policy.entrySet()) { + String policyName = me.getKey(); + LinkedHashMap policyTpl = (LinkedHashMap)me.getValue(); + ArrayList targetList = (ArrayList)policyTpl.get("targets"); + //ArrayList targetObjects = new ArrayList<>(); + ArrayList targetNodes = new ArrayList<>(); + ArrayList targetObjects = new ArrayList<>(); + ArrayList targetGroups = new ArrayList<>(); + String targetsType = "groups"; + if(targetList != null && targetList.size() >= 1) { + targetGroups = _getPolicyGroups(targetList); + if(targetGroups == null) { + targetsType = "node_templates"; + targetNodes = _getGroupMembers(targetList); + for(NodeTemplate nt: targetNodes) { + targetObjects.add(nt); + } + } + else { + for(Group gr: targetGroups) { + targetObjects.add(gr); + } + } + } + Policy policyObj = new Policy(policyName, + policyTpl, + targetObjects, + targetsType, + customDefs); + alPolicies.add(policyObj); + } + } + return alPolicies; + } + + private ArrayList _groups() { + ArrayList groups = new ArrayList<>(); + ArrayList memberNodes = null; + for(Map.Entry me: _tplGroups().entrySet()) { + String groupName = me.getKey(); + LinkedHashMap groupTpl = (LinkedHashMap)me.getValue(); + ArrayList memberNames = (ArrayList)groupTpl.get("members"); + if(memberNames != null) { + DataEntity.validateDatatype("list", memberNames,null,null,null); + if(memberNames.size() < 1 || + (new HashSet(memberNames)).size() != memberNames.size()) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidGroupTargetException: Member nodes \"%s\" should be >= 1 and not repeated", + memberNames.toString())); + } + else { + memberNodes = _getGroupMembers(memberNames); + } + } + Group group = new Group(groupName, + groupTpl, + memberNodes, + customDefs); + groups.add(group); + } + return groups; + } + + private ArrayList _getGroupMembers(ArrayList memberNames) { + ArrayList memberNodes = new ArrayList<>(); + _validateGroupMembers(memberNames); + for(String member: memberNames) { + for(NodeTemplate node: nodeTemplates) { + if(member.equals(node.getName())) { + memberNodes.add(node); + } + } + } + return memberNodes; + } + + private ArrayList _getPolicyGroups(ArrayList memberNames) { + ArrayList memberGroups = new ArrayList<>(); + for(String member: memberNames) { + for(Group group: groups) { + if(member.equals(group.getName())) { + memberGroups.add(group); + } + } + } + return memberGroups; + } + + private void _validateGroupMembers(ArrayList members) { + ArrayList nodeNames = new ArrayList<>(); + for(NodeTemplate node: nodeTemplates) { + nodeNames.add(node.getName()); + } + for(String member: members) { + if(!nodeNames.contains(member)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"",member)); + } + } + } + + // topology template can act like node template + // it is exposed by substitution_mappings. + + public String nodetype() { + return substitutionMappings.getNodeType(); + } + + public LinkedHashMap capabilities() { + return substitutionMappings.getCapabilities(); + } + + public LinkedHashMap requirements() { + return substitutionMappings.getRequirements(); + } + + private String _tplDescription() { + return (String)tpl.get(DESCRIPTION); + //if description: + // return description.rstrip() + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplInputs() { + if(tpl.get(INPUTS) != null) { + return (LinkedHashMap)tpl.get(INPUTS); + } + else { + return new LinkedHashMap(); + } + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplNodeTemplates() { + return (LinkedHashMap)tpl.get(NODE_TEMPLATES); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplRelationshipTemplates() { + if(tpl.get(RELATIONSHIP_TEMPLATES) != null) { + return (LinkedHashMap)tpl.get(RELATIONSHIP_TEMPLATES); + } + else { + return new LinkedHashMap(); + } + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplOutputs() { + if(tpl.get(OUTPUTS) != null) { + return (LinkedHashMap)tpl.get(OUTPUTS); + } + else { + return new LinkedHashMap(); + } + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplSubstitutionMappings() { + if(tpl.get(SUBSTITUTION_MAPPINGS) != null) { + return (LinkedHashMap)tpl.get(SUBSTITUTION_MAPPINGS); + } + else { + return new LinkedHashMap(); + } + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplGroups() { + if(tpl.get(GROUPS) != null) { + return (LinkedHashMap)tpl.get(GROUPS); + } + else { + return new LinkedHashMap(); + } + } + + @SuppressWarnings("unchecked") + private ArrayList _tplPolicies() { + if(tpl.get(POLICIES) != null) { + return (ArrayList)tpl.get(POLICIES); + } + else { + return new ArrayList(); + } + } + + private void _validateField() { + for(String name: tpl.keySet()) { + boolean bFound = false; + for(String section: SECTIONS) { + if(name.equals(section)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"",name)); + } + } + } + + @SuppressWarnings("unchecked") + private void _processIntrinsicFunctions() { + // Process intrinsic functions + + // Current implementation processes functions within node template + // properties, requirements, interfaces inputs and template outputs. + + if(nodeTemplates != null) { + for(NodeTemplate nt: nodeTemplates) { + for(Property prop: nt.getPropertiesObjects()) { + prop.setValue(Function.getFunction(this,nt,prop.getValue())); + } + for(InterfacesDef ifd: nt.getInterfaces()) { + LinkedHashMap ifin = ifd.getInputs(); + if(ifin != null) { + for(Map.Entry me: ifin.entrySet()) { + String name = me.getKey(); + Object value = Function.getFunction(this,nt,me.getValue()); + ifd.setInput(name,value); + } + } + } + if(nt.getRequirements() != null && + nt.getRequirements() instanceof ArrayList) { + for(Object oreq: nt.getRequirements()) { + LinkedHashMap req = (LinkedHashMap)oreq; + LinkedHashMap rel = req; + for(String reqName: req.keySet()) { + Object reqItem = req.get(reqName); + if(reqItem instanceof LinkedHashMap) { + Object t = ((LinkedHashMap)reqItem).get("relationship"); + // it can be a string or a LHM... + if(t instanceof LinkedHashMap) { + rel = (LinkedHashMap)t; + } + else { + // we set it to null to fail the next test + // and avoid the get("proprties") + rel = null; + } + break; + } + } + if(rel != null && rel.get("properties") != null) { + LinkedHashMap relprops = + (LinkedHashMap)rel.get("properties"); + for(String key: relprops.keySet()) { + Object value = relprops.get(key); + Object func = Function.getFunction(this,req,value); + relprops.put(key,func); + } + } + } + } + if(nt.getCapabilitiesObjects() != null) { + for(Capability cap: nt.getCapabilitiesObjects()) { + if(cap.getPropertiesObjects() != null) { + for(Property prop: cap.getPropertiesObjects()) { + Object propvalue = Function.getFunction(this,nt,prop.getValue()); + if(propvalue instanceof GetInput) { + propvalue = ((GetInput)propvalue).result(); + for(String p: cap.getProperties().keySet()) { + //Object v = cap.getProperties().get(p); + if(p.equals(prop.getName())) { + cap.setProperty(p,propvalue); + } + } + } + } + } + } + } + for(RelationshipType rel: nt.getRelationships().keySet()) { + NodeTemplate node = nt.getRelationships().get(rel); + ArrayList relTpls = node.getRelationshipTemplate(); + if(relTpls != null) { + for(RelationshipTemplate relTpl: relTpls) { + // TT 5 + for(InterfacesDef iface: relTpl.getInterfaces()) { + if(iface.getInputs() != null) { + for(String name: iface.getInputs().keySet()) { + Object value = iface.getInputs().get(name); + Object func = Function.getFunction( + this, + relTpl, + value); + iface.setInput(name,func); + } + } + } + } + } + } + } + } + for(Output output: outputs) { + Object func = Function.getFunction(this,outputs,output.getValue()); + if(func instanceof GetAttribute) { + output.setAttr(Output.VALUE,func); + } + } + } + + public static String getSubMappingNodeType(LinkedHashMap topologyTpl) { + if(topologyTpl != null && topologyTpl instanceof LinkedHashMap) { + Object submapTpl = topologyTpl.get(SUBSTITUTION_MAPPINGS); + return SubstitutionMappings.stGetNodeType((LinkedHashMap)submapTpl); + } + return null; + } + + // getters + + public LinkedHashMap getTpl() { + return tpl; + } + + public LinkedHashMap getMetadata() { + return metaData; + } + + public ArrayList getInputs() { + return inputs; + } + + public ArrayList getOutputs() { + return outputs; + } + + public ArrayList getPolicies() { + return policies; + } + + public ArrayList getRelationshipTemplates() { + return relationshipTemplates; + } + + public ArrayList getNodeTemplates() { + return nodeTemplates; + } + + public ArrayList getGroups() { + return groups; + } + + public SubstitutionMappings getSubstitutionMappings() { + return substitutionMappings; + } + + public LinkedHashMap getParsedParams() { + return parsedParams; + } +} + +/*python + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +import logging + +from toscaparser.common import exception +from toscaparser.dataentity import DataEntity +from toscaparser import functions +from toscaparser.groups import Group +from toscaparser.nodetemplate import NodeTemplate +from toscaparser.parameters import Input +from toscaparser.parameters import Output +from toscaparser.policy import Policy +from toscaparser.relationship_template import RelationshipTemplate +from toscaparser.substitution_mappings import SubstitutionMappings +from toscaparser.tpl_relationship_graph import ToscaGraph +from toscaparser.utils.gettextutils import _ + + +# Topology template key names +SECTIONS = (DESCRIPTION, INPUTS, NODE_TEMPLATES, + RELATIONSHIP_TEMPLATES, OUTPUTS, GROUPS, + SUBSTITUION_MAPPINGS, POLICIES) = \ + ('description', 'inputs', 'node_templates', + 'relationship_templates', 'outputs', 'groups', + 'substitution_mappings', 'policies') + +log = logging.getLogger("tosca.model") + + +class TopologyTemplate(object): + + '''Load the template data.''' + def __init__(self, template, custom_defs, + rel_types=None, parsed_params=None, + sub_mapped_node_template=None): + self.tpl = template + self.sub_mapped_node_template = sub_mapped_node_template + if self.tpl: + self.custom_defs = custom_defs + self.rel_types = rel_types + self.parsed_params = parsed_params + self._validate_field() + self.description = self._tpl_description() + self.inputs = self._inputs() + self.relationship_templates = self._relationship_templates() + self.nodetemplates = self._nodetemplates() + self.outputs = self._outputs() + if hasattr(self, 'nodetemplates'): + self.graph = ToscaGraph(self.nodetemplates) + self.groups = self._groups() + self.policies = self._policies() + self._process_intrinsic_functions() + self.substitution_mappings = self._substitution_mappings() + + def _inputs(self): + inputs = [] + for name, attrs in self._tpl_inputs().items(): + input = Input(name, attrs) + if self.parsed_params and name in self.parsed_params: + input.validate(self.parsed_params[name]) + else: + default = input.default + if default: + input.validate(default) + if (self.parsed_params and input.name not in self.parsed_params + or self.parsed_params is None) and input.required \ + and input.default is None: + log.warning(_('The required parameter %s ' + 'is not provided') % input.name) + + inputs.append(input) + return inputs + + def _nodetemplates(self): + nodetemplates = [] + tpls = self._tpl_nodetemplates() + if tpls: + for name in tpls: + tpl = NodeTemplate(name, tpls, self.custom_defs, + self.relationship_templates, + self.rel_types) + if (tpl.type_definition and + (tpl.type in tpl.type_definition.TOSCA_DEF or + (tpl.type not in tpl.type_definition.TOSCA_DEF and + bool(tpl.custom_def)))): + tpl.validate(self) + nodetemplates.append(tpl) + return nodetemplates + + def _relationship_templates(self): + rel_templates = [] + tpls = self._tpl_relationship_templates() + for name in tpls: + tpl = RelationshipTemplate(tpls[name], name, self.custom_defs) + rel_templates.append(tpl) + return rel_templates + + def _outputs(self): + outputs = [] + for name, attrs in self._tpl_outputs().items(): + output = Output(name, attrs) + output.validate() + outputs.append(output) + return outputs + + def _substitution_mappings(self): + tpl_substitution_mapping = self._tpl_substitution_mappings() + # if tpl_substitution_mapping and self.sub_mapped_node_template: + if tpl_substitution_mapping: + return SubstitutionMappings(tpl_substitution_mapping, + self.nodetemplates, + self.inputs, + self.outputs, + self.sub_mapped_node_template, + self.custom_defs) + + def _policies(self): + policies = [] + for policy in self._tpl_policies(): + for policy_name, policy_tpl in policy.items(): + target_list = policy_tpl.get('targets') + if target_list and len(target_list) >= 1: + target_objects = [] + targets_type = "groups" + target_objects = self._get_policy_groups(target_list) + if not target_objects: + targets_type = "node_templates" + target_objects = self._get_group_members(target_list) + policyObj = Policy(policy_name, policy_tpl, + target_objects, targets_type, + self.custom_defs) + policies.append(policyObj) + return policies + + def _groups(self): + groups = [] + member_nodes = None + for group_name, group_tpl in self._tpl_groups().items(): + member_names = group_tpl.get('members') + if member_names is not None: + DataEntity.validate_datatype('list', member_names) + if len(member_names) < 1 or \ + len(member_names) != len(set(member_names)): + exception.ExceptionCollector.appendException( + exception.InvalidGroupTargetException( + message=_('Member nodes "%s" should be >= 1 ' + 'and not repeated') % member_names)) + else: + member_nodes = self._get_group_members(member_names) + group = Group(group_name, group_tpl, + member_nodes, + self.custom_defs) + groups.append(group) + return groups + + def _get_group_members(self, member_names): + member_nodes = [] + self._validate_group_members(member_names) + for member in member_names: + for node in self.nodetemplates: + if node.name == member: + member_nodes.append(node) + return member_nodes + + def _get_policy_groups(self, member_names): + member_groups = [] + for member in member_names: + for group in self.groups: + if group.name == member: + member_groups.append(group) + return member_groups + + def _validate_group_members(self, members): + node_names = [] + for node in self.nodetemplates: + node_names.append(node.name) + for member in members: + if member not in node_names: + exception.ExceptionCollector.appendException( + exception.InvalidGroupTargetException( + message=_('Target member "%s" is not found in ' + 'node_templates') % member)) + + # topology template can act like node template + # it is exposed by substitution_mappings. + def nodetype(self): + return self.substitution_mappings.node_type \ + if self.substitution_mappings else None + + def capabilities(self): + return self.substitution_mappings.capabilities \ + if self.substitution_mappings else None + + def requirements(self): + return self.substitution_mappings.requirements \ + if self.substitution_mappings else None + + def _tpl_description(self): + description = self.tpl.get(DESCRIPTION) + if description: + return description.rstrip() + + def _tpl_inputs(self): + return self.tpl.get(INPUTS) or {} + + def _tpl_nodetemplates(self): + return self.tpl.get(NODE_TEMPLATES) + + def _tpl_relationship_templates(self): + return self.tpl.get(RELATIONSHIP_TEMPLATES) or {} + + def _tpl_outputs(self): + return self.tpl.get(OUTPUTS) or {} + + def _tpl_substitution_mappings(self): + return self.tpl.get(SUBSTITUION_MAPPINGS) or {} + + def _tpl_groups(self): + return self.tpl.get(GROUPS) or {} + + def _tpl_policies(self): + return self.tpl.get(POLICIES) or {} + + def _validate_field(self): + for name in self.tpl: + if name not in SECTIONS: + exception.ExceptionCollector.appendException( + exception.UnknownFieldError(what='Template', field=name)) + + def _process_intrinsic_functions(self): + """Process intrinsic functions + + Current implementation processes functions within node template + properties, requirements, interfaces inputs and template outputs. + """ + if hasattr(self, 'nodetemplates'): + for node_template in self.nodetemplates: + for prop in node_template.get_properties_objects(): + prop.value = functions.get_function(self, + node_template, + prop.value) + for interface in node_template.interfaces: + if interface.inputs: + for name, value in interface.inputs.items(): + interface.inputs[name] = functions.get_function( + self, + node_template, + value) + if node_template.requirements and \ + isinstance(node_template.requirements, list): + for req in node_template.requirements: + rel = req + for req_name, req_item in req.items(): + if isinstance(req_item, dict): + rel = req_item.get('relationship') + break + if rel and 'properties' in rel: + for key, value in rel['properties'].items(): + rel['properties'][key] = \ + functions.get_function(self, + req, + value) + if node_template.get_capabilities_objects(): + for cap in node_template.get_capabilities_objects(): + if cap.get_properties_objects(): + for prop in cap.get_properties_objects(): + propvalue = functions.get_function( + self, + node_template, + prop.value) + if isinstance(propvalue, functions.GetInput): + propvalue = propvalue.result() + for p, v in cap._properties.items(): + if p == prop.name: + cap._properties[p] = propvalue + for rel, node in node_template.relationships.items(): + rel_tpls = node.relationship_tpl + if rel_tpls: + for rel_tpl in rel_tpls: + for interface in rel_tpl.interfaces: + if interface.inputs: + for name, value in \ + interface.inputs.items(): + interface.inputs[name] = \ + functions.get_function(self, + rel_tpl, + value) + for output in self.outputs: + func = functions.get_function(self, self.outputs, output.value) + if isinstance(func, functions.GetAttribute): + output.attrs[output.VALUE] = func + + @classmethod + def get_sub_mapping_node_type(cls, topology_tpl): + if topology_tpl and isinstance(topology_tpl, dict): + submap_tpl = topology_tpl.get(SUBSTITUION_MAPPINGS) + return SubstitutionMappings.get_node_type(submap_tpl) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaGraph.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaGraph.java new file mode 100644 index 0000000..2de3bb9 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaGraph.java @@ -0,0 +1,109 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; + +//import java.util.Iterator; + +public class ToscaGraph { + // Graph of Tosca Node Templates + + private ArrayList nodeTemplates; + private LinkedHashMap vertices; + + public ToscaGraph(ArrayList inodeTemplates) { + nodeTemplates = inodeTemplates; + vertices = new LinkedHashMap(); + _create(); + } + + private void _createVertex(NodeTemplate node) { + if(vertices.get(node.getName()) == null) { + vertices.put(node.getName(),node); + } + } + + private void _createEdge(NodeTemplate node1, + NodeTemplate node2, + RelationshipType relation) { + if(vertices.get(node1.getName()) == null) { + _createVertex(node1); + vertices.get(node1.name)._addNext(node2,relation); + } + } + + public NodeTemplate vertex(String name) { + if(vertices.get(name) != null) { + return vertices.get(name); + } + return null; + } + +// public Iterator getIter() { +// return vertices.values().iterator(); +// } + + private void _create() { + for(NodeTemplate node: nodeTemplates) { + LinkedHashMap relation = node.getRelationships(); + if(relation != null) { + for(RelationshipType rel: relation.keySet()) { + NodeTemplate nodeTpls = relation.get(rel); + for(NodeTemplate tpl: nodeTemplates) { + if(tpl.getName().equals(nodeTpls.getName())) { + _createEdge(node,tpl,rel); + } + } + } + } + _createVertex(node); + } + } + + @Override + public String toString() { + return "ToscaGraph{" + + "nodeTemplates=" + nodeTemplates + + ", vertices=" + vertices + + '}'; + } +} + +/*python + +class ToscaGraph(object): + '''Graph of Tosca Node Templates.''' + def __init__(self, nodetemplates): + self.nodetemplates = nodetemplates + self.vertices = {} + self._create() + + def _create_vertex(self, node): + if node not in self.vertices: + self.vertices[node.name] = node + + def _create_edge(self, node1, node2, relationship): + if node1 not in self.vertices: + self._create_vertex(node1) + self.vertices[node1.name]._add_next(node2, + relationship) + + def vertex(self, node): + if node in self.vertices: + return self.vertices[node] + + def __iter__(self): + return iter(self.vertices.values()) + + def _create(self): + for node in self.nodetemplates: + relation = node.relationships + if relation: + for rel, nodetpls in relation.items(): + for tpl in self.nodetemplates: + if tpl.name == nodetpls.name: + self._create_edge(node, tpl, rel) + self._create_vertex(node) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java new file mode 100644 index 0000000..b13a2a5 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java @@ -0,0 +1,1002 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.util.*; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.common.JToscaException; +import org.openecomp.sdc.toscaparser.api.elements.EntityType; +import org.openecomp.sdc.toscaparser.api.elements.Metadata; +import org.openecomp.sdc.toscaparser.api.extensions.ExtTools; +import org.openecomp.sdc.toscaparser.api.parameters.Input; +import org.openecomp.sdc.toscaparser.api.parameters.Output; +import org.openecomp.sdc.toscaparser.api.prereq.CSAR; +import org.openecomp.sdc.toscaparser.api.utils.JToscaErrorCodes; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.Yaml; + +public class ToscaTemplate extends Object { + + private static Logger log = LoggerFactory.getLogger(ToscaTemplate.class.getName()); + + // TOSCA template key names + private static final String DEFINITION_VERSION = "tosca_definitions_version"; + private static final String DEFAULT_NAMESPACE = "tosca_default_namespace"; + private static final String TEMPLATE_NAME = "template_name"; + private static final String TOPOLOGY_TEMPLATE = "topology_template"; + private static final String TEMPLATE_AUTHOR = "template_author"; + private static final String TEMPLATE_VERSION = "template_version"; + private static final String DESCRIPTION = "description"; + private static final String IMPORTS = "imports"; + private static final String DSL_DEFINITIONS = "dsl_definitions"; + private static final String NODE_TYPES = "node_types"; + private static final String RELATIONSHIP_TYPES = "relationship_types"; + private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; + private static final String CAPABILITY_TYPES = "capability_types"; + private static final String ARTIFACT_TYPES = "artifact_types"; + private static final String DATA_TYPES = "data_types"; + private static final String INTERFACE_TYPES = "interface_types"; + private static final String POLICY_TYPES = "policy_types"; + private static final String GROUP_TYPES = "group_types"; + private static final String REPOSITORIES = "repositories"; + + private static String SECTIONS[] = { + DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME, + TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION, + DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES, + RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES, + CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES, + INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES, REPOSITORIES + }; + + // Sections that are specific to individual template definitions + private static final String METADATA = "metadata"; + private static ArrayList SPECIAL_SECTIONS; + + private ExtTools exttools = new ExtTools(); + + private ArrayList VALID_TEMPLATE_VERSIONS; + private LinkedHashMap> ADDITIONAL_SECTIONS; + + private boolean isFile; + private String path; + private String inputPath; + private LinkedHashMap parsedParams; + private LinkedHashMap tpl; + private String version; + private ArrayList imports; + private LinkedHashMap relationshipTypes; + private Metadata metaData; + private String description; + private TopologyTemplate topologyTemplate; + private ArrayList repositories; + private ArrayList inputs; + private ArrayList relationshipTemplates; + private ArrayList nodeTemplates; + private ArrayList outputs; + private ArrayList policies; + private LinkedHashMap nestedToscaTplsWithTopology; + private ArrayList nestedToscaTemplatesWithTopology; + private ToscaGraph graph; + private String csarTempDir; + private int nestingLoopCounter; + private LinkedHashMap> metaProperties; + + @SuppressWarnings("unchecked") + public ToscaTemplate(String _path, + LinkedHashMap _parsedParams, + boolean aFile, + LinkedHashMap yamlDictTpl) throws JToscaException { + + ThreadLocalsHolder.setCollector(new ExceptionCollector(_path)); + + VALID_TEMPLATE_VERSIONS = new ArrayList<>(); + VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_0"); + VALID_TEMPLATE_VERSIONS.addAll(exttools.getVersions()); + ADDITIONAL_SECTIONS = new LinkedHashMap<>(); + SPECIAL_SECTIONS = new ArrayList<>(); + SPECIAL_SECTIONS.add(METADATA); + ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_0",SPECIAL_SECTIONS); + ADDITIONAL_SECTIONS.putAll(exttools.getSections()); + + //long startTime = System.nanoTime(); + + + isFile = aFile; + inputPath = null; + path = null; + tpl = null; + csarTempDir = null; + nestedToscaTplsWithTopology = new LinkedHashMap(); + nestedToscaTemplatesWithTopology = new ArrayList(); + + if(_path != null && !_path.isEmpty()) { + // save the original input path + inputPath = _path; + // get the actual path (will change with CSAR) + path = _getPath(_path); + // load the YAML template + if (path != null && !path.isEmpty()) { + try { + //System.out.println("Loading YAML file " + path); + log.debug("ToscaTemplate Loading YAMEL file {}", path); + InputStream input = new FileInputStream(new File(path)); + Yaml yaml = new Yaml(); + Object data = yaml.load(input); + this.tpl = (LinkedHashMap) data; + } + catch (FileNotFoundException e) { + log.error("ToscaTemplate - Exception loading yaml: {}", e.getMessage()); + return; + } + catch(Exception e) { + log.error("ToscaTemplate - Error loading yaml, aborting"); + return; + } + + if(yamlDictTpl != null) { + //msg = (_('Both path and yaml_dict_tpl arguments were ' + // 'provided. Using path and ignoring yaml_dict_tpl.')) + //log.info(msg) + log.debug("ToscaTemplate - Both path and yaml_dict_tpl arguments were provided. Using path and ignoring yaml_dict_tpl"); + } + } + else { + // no input to process... + _abort(); + } + } + else { + if(yamlDictTpl != null) { + tpl = yamlDictTpl; + } + else { + ThreadLocalsHolder.getCollector().appendException( + "ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse"); + log.debug("ToscaTemplate ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse"); + + } + } + + if(tpl != null) { + parsedParams = _parsedParams; + _validateField(); + this.version = _tplVersion(); + this.metaData = _tplMetaData(); + this.relationshipTypes = _tplRelationshipTypes(); + this.description = _tplDescription(); + this.topologyTemplate = _topologyTemplate(); + this.repositories = _tplRepositories(); + if(topologyTemplate.getTpl() != null) { + this.inputs = _inputs(); + this.relationshipTemplates = _relationshipTemplates(); + this.nodeTemplates = _nodeTemplates(); + this.outputs = _outputs(); + this.policies = _policies(); + _handleNestedToscaTemplatesWithTopology(); + graph = new ToscaGraph(nodeTemplates); + } + } + + if(csarTempDir != null) { + CSAR.deleteDir(new File(csarTempDir)); + csarTempDir = null; + } + + verifyTemplate(); + + } + + private void _abort() throws JToscaException { + // print out all exceptions caught + verifyTemplate(); + throw new JToscaException("jtosca aborting", JToscaErrorCodes.PATH_NOT_VALID.getValue()); + } + private TopologyTemplate _topologyTemplate() { + return new TopologyTemplate( + _tplTopologyTemplate(), + _getAllCustomDefs(imports), + relationshipTypes, + parsedParams, + null); + } + + private ArrayList _inputs() { + return topologyTemplate.getInputs(); + } + + private ArrayList _nodeTemplates() { + return topologyTemplate.getNodeTemplates(); + } + + private ArrayList _relationshipTemplates() { + return topologyTemplate.getRelationshipTemplates(); + } + + private ArrayList _outputs() { + return topologyTemplate.getOutputs(); + } + + private String _tplVersion() { + return (String)tpl.get(DEFINITION_VERSION); + } + + @SuppressWarnings("unchecked") + private Metadata _tplMetaData() { + Object mdo = tpl.get(METADATA); + if(mdo instanceof LinkedHashMap) { + return new Metadata((Map)mdo); + } + else { + return null; + } + } + + private String _tplDescription() { + return (String)tpl.get(DESCRIPTION); + } + + private ArrayList _tplImports() { + return (ArrayList)tpl.get(IMPORTS); + } + + private ArrayList _tplRepositories() { + LinkedHashMap repositories = + (LinkedHashMap)tpl.get(REPOSITORIES); + ArrayList reposit = new ArrayList<>(); + if(repositories != null) { + for(Map.Entry me: repositories.entrySet()) { + Repository reposits = new Repository(me.getKey(),me.getValue()); + reposit.add(reposits); + } + } + return reposit; + } + + private LinkedHashMap _tplRelationshipTypes() { + return (LinkedHashMap)_getCustomTypes(RELATIONSHIP_TYPES,null); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplRelationshipTemplates() { + return (LinkedHashMap)_tplTopologyTemplate().get(RELATIONSHIP_TEMPLATES); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplTopologyTemplate() { + return (LinkedHashMap)tpl.get(TOPOLOGY_TEMPLATE); + } + + private ArrayList _policies() { + return topologyTemplate.getPolicies(); + } + + private LinkedHashMap _getAllCustomDefs(ArrayList alImports) { + + String types[] = { + IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, + DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES + }; + LinkedHashMap customDefsFinal = new LinkedHashMap(); + LinkedHashMap customDefs = _getCustomTypes(types,alImports); + if(customDefs != null) { + customDefsFinal.putAll(customDefs); + if(customDefs.get(IMPORTS) != null) { + @SuppressWarnings("unchecked") + LinkedHashMap importDefs = _getAllCustomDefs((ArrayList)customDefs.get(IMPORTS)); + customDefsFinal.putAll(importDefs); + } + } + + // As imports are not custom_types, remove from the dict + customDefsFinal.remove(IMPORTS); + + return customDefsFinal; + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _getCustomTypes(Object typeDefinitions,ArrayList alImports) { + + // Handle custom types defined in imported template files + // This method loads the custom type definitions referenced in "imports" + // section of the TOSCA YAML template. + + LinkedHashMap customDefs = new LinkedHashMap(); + ArrayList typeDefs = new ArrayList(); + if(typeDefinitions instanceof String[]) { + for(String s: (String[])typeDefinitions) { + typeDefs.add(s); + } + } + else { + typeDefs.add((String)typeDefinitions); + } + + if(alImports == null) { + alImports = _tplImports(); + } + + if(alImports != null) { + ImportsLoader customService = new ImportsLoader(alImports,path,typeDefs,tpl); + ArrayList> nestedToscaTpls = customService.getNestedToscaTpls(); + _updateNestedToscaTplsWithTopology(nestedToscaTpls); + + customDefs = customService.getCustomDefs(); + if(customDefs == null) { + return null; + } + } + + //Handle custom types defined in current template file + for(String td: typeDefs) { + if(!td.equals(IMPORTS)) { + LinkedHashMap innerCustomTypes = (LinkedHashMap )tpl.get(td); + if(innerCustomTypes != null) { + customDefs.putAll(innerCustomTypes); + } + } + } + return customDefs; + } + + private void _updateNestedToscaTplsWithTopology(ArrayList> nestedToscaTpls) { + for(LinkedHashMap ntpl: nestedToscaTpls) { + // there is just one key:value pair in ntpl + for(Map.Entry me: ntpl.entrySet()) { + String fileName = me.getKey(); + @SuppressWarnings("unchecked") + LinkedHashMap toscaTpl = (LinkedHashMap)me.getValue(); + if(toscaTpl.get(TOPOLOGY_TEMPLATE) != null) { + if(nestedToscaTplsWithTopology.get(fileName) == null) { + nestedToscaTplsWithTopology.putAll(ntpl); + } + } + } + } + } + + // **experimental** (multi level nesting) RECURSIVE - BEWARE OF INIFINITE LOOPS... + private void _handleNestedToscaTemplatesWithTopology2(TopologyTemplate tt) { + if(++nestingLoopCounter > 10) { + log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology2 - Nested Topologies Loop: too many levels, aborting"); + return; + } + for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { + String fname = me.getKey(); + LinkedHashMap toscaTpl = + (LinkedHashMap)me.getValue(); + for(NodeTemplate nt: tt.getNodeTemplates()) { + if(_isSubMappedNode2(nt,toscaTpl)) { + parsedParams = _getParamsForNestedTemplate(nt); + LinkedHashMap topologyTpl = + (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE); + TopologyTemplate topologyWithSubMapping = + new TopologyTemplate(topologyTpl, + _getAllCustomDefs(null), + relationshipTypes, + parsedParams, + nt); + if(topologyWithSubMapping.getSubstitutionMappings() != null) { + // Record nested topology templates in top level template + //nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); + // Set substitution mapping object for mapped node + nt.setSubMappingToscaTemplate2( + topologyWithSubMapping.getSubstitutionMappings()); + _handleNestedToscaTemplatesWithTopology2(topologyWithSubMapping); + } + } + } + } + } + + private void _handleNestedToscaTemplatesWithTopology() { + for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { + String fname = me.getKey(); + LinkedHashMap toscaTpl = + (LinkedHashMap)me.getValue(); + for(NodeTemplate nt: nodeTemplates) { + if(_isSubMappedNode(nt,toscaTpl)) { + parsedParams = _getParamsForNestedTemplate(nt); + ArrayList alim = (ArrayList)toscaTpl.get(IMPORTS); + LinkedHashMap topologyTpl = + (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE); + TopologyTemplate topologyWithSubMapping = + new TopologyTemplate(topologyTpl, + //_getAllCustomDefs(null), + _getAllCustomDefs(alim), + relationshipTypes, + parsedParams, + nt); + if(topologyWithSubMapping.getSubstitutionMappings() != null) { + // Record nested topology templates in top level template + nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); + // Set substitution mapping object for mapped node + nt.setSubMappingToscaTemplate( + topologyWithSubMapping.getSubstitutionMappings()); + } + } + } + } + } + + private void _validateField() { + String sVersion = _tplVersion(); + if(sVersion == null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "MissingRequiredField: Template is missing required field \"%s\"",DEFINITION_VERSION)); + } + else { + _validateVersion(sVersion); + this.version = sVersion; + } + + for (String sKey : tpl.keySet()) { + boolean bFound = false; + for (String sSection: SECTIONS) { + if(sKey.equals(sSection)) { + bFound = true; + break; + } + } + // check ADDITIONAL_SECTIONS + if(!bFound) { + if(ADDITIONAL_SECTIONS.get(version) != null && + ADDITIONAL_SECTIONS.get(version).contains(sKey)) { + bFound = true; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: Template contains unknown field \"%s\"", + sKey)); + } + } + } + + private void _validateVersion(String sVersion) { + boolean bFound = false; + for(String vtv: VALID_TEMPLATE_VERSIONS) { + if(sVersion.equals(vtv)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidTemplateVersion: \"%s\" is invalid. Valid versions are %s", + sVersion,VALID_TEMPLATE_VERSIONS.toString())); + } + else if(!sVersion.equals("tosca_simple_yaml_1_0")) { + EntityType.updateDefinitions(sVersion); + } + } + + private String _getPath(String _path) throws JToscaException { + if (_path.toLowerCase().endsWith(".yaml") || _path.toLowerCase().endsWith(".yml")) { + return _path; + } + else if (_path.toLowerCase().endsWith(".zip") || _path.toLowerCase().endsWith(".csar")) { + // a CSAR archive + CSAR csar = new CSAR(_path, isFile); + if (csar.validate()) { + try { + csar.decompress(); + metaProperties = csar.getMetaProperties(); + } + catch (IOException e) { + log.error("ToscaTemplate - _getPath - IOException trying to decompress {}", _path); + return null; + } + isFile = true; // the file has been decompressed locally + csar.cleanup(); + csarTempDir = csar.getTempDir(); + return csar.getTempDir() + File.separator + csar.getMainTemplate(); + } + } + else { + ThreadLocalsHolder.getCollector().appendException("ValueError: " + _path + " is not a valid file"); + return null; + } + return null; + } + + private void verifyTemplate() throws JToscaException { + ThreadLocalsHolder.getCollector().setWantTrace(false); + + //Warnings + int warningsCount = ThreadLocalsHolder.getCollector().warningsCaught(); + if (warningsCount > 0) { + List warningsStrings = ThreadLocalsHolder.getCollector().getWarningsReport(); + log.warn("####################################################################################################"); + log.warn("CSAR Warnings found! CSAR name - {}", inputPath); + log.warn("ToscaTemplate - verifyTemplate - {} Parsing Warning{} occurred...", warningsCount, (warningsCount > 1 ? "s" : "")); + for (String s : warningsStrings) { + log.warn("{}. CSAR name - {}", s, inputPath); + } + log.warn("####################################################################################################"); + } + + //Criticals + int criticalsCount = ThreadLocalsHolder.getCollector().criticalsCaught(); + if (criticalsCount > 0) { + List criticalStrings = ThreadLocalsHolder.getCollector().getCriticalsReport(); + log.error("####################################################################################################"); + log.error("ToscaTemplate - verifyTemplate - {} Parsing Critical{} occurred...", criticalsCount, (criticalsCount > 1 ? "s" : "")); + for (String s : criticalStrings) { + log.error("{}. CSAR name - {}", s, inputPath); + } + throw new JToscaException(String.format("CSAR Validation Failed. CSAR name - {}. Please check logs for details.", inputPath), JToscaErrorCodes.CSAR_TOSCA_VALIDATION_ERROR.getValue()); + } + } + + public String getPath() { + return path; + } + + public String getVersion() { + return version; + } + + public String getDescription() { + return description; + } + + public TopologyTemplate getTopologyTemplate() { + return topologyTemplate; + } + + public Metadata getMetaData() { + return metaData; + } + + public ArrayList getInputs() { + return inputs; + } + + public ArrayList getOutputs() { + return outputs; + } + + public ArrayList getPolicies() { + return policies; + } + + public ArrayList getNodeTemplates() { + return nodeTemplates; + } + + public LinkedHashMap getMetaProperties(String propertiesFile) { + return metaProperties.get(propertiesFile); + } + + private boolean _isSubMappedNode(NodeTemplate nt,LinkedHashMap toscaTpl) { + // Return True if the nodetemple is substituted + if(nt != null && nt.getSubMappingToscaTemplate() == null && + getSubMappingNodeType(toscaTpl).equals(nt.getType()) && + nt.getInterfaces().size() < 1) { + return true; + } + return false; + } + + private boolean _isSubMappedNode2(NodeTemplate nt,LinkedHashMap toscaTpl) { + // Return True if the nodetemple is substituted + if(nt != null && nt.getSubMappingToscaTemplate2() == null && + getSubMappingNodeType(toscaTpl).equals(nt.getType()) && + nt.getInterfaces().size() < 1) { + return true; + } + return false; + } + + private LinkedHashMap _getParamsForNestedTemplate(NodeTemplate nt) { + // Return total params for nested_template + LinkedHashMap pparams; + if(parsedParams != null) { + pparams = parsedParams; + } + else { + pparams = new LinkedHashMap(); + } + if(nt != null) { + for(String pname: nt.getProperties().keySet()) { + pparams.put(pname,nt.getPropertyValue(pname)); + } + } + return pparams; + } + + private String getSubMappingNodeType(LinkedHashMap toscaTpl) { + // Return substitution mappings node type + if(toscaTpl != null) { + return TopologyTemplate.getSubMappingNodeType( + (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE)); + } + return null; + } + + private boolean _hasSubstitutionMapping() { + // Return True if the template has valid substitution mappings + return topologyTemplate != null && + topologyTemplate.getSubstitutionMappings() != null; + } + + public boolean hasNestedTemplates() { + // Return True if the tosca template has nested templates + return nestedToscaTemplatesWithTopology != null && + nestedToscaTemplatesWithTopology.size() >= 1; + + } + + public ArrayList getNestedTemplates() { + return nestedToscaTemplatesWithTopology; + } + + @Override + public String toString() { + return "ToscaTemplate{" + + "exttools=" + exttools + + ", VALID_TEMPLATE_VERSIONS=" + VALID_TEMPLATE_VERSIONS + + ", ADDITIONAL_SECTIONS=" + ADDITIONAL_SECTIONS + + ", isFile=" + isFile + + ", path='" + path + '\'' + + ", inputPath='" + inputPath + '\'' + + ", parsedParams=" + parsedParams + + ", tpl=" + tpl + + ", version='" + version + '\'' + + ", imports=" + imports + + ", relationshipTypes=" + relationshipTypes + + ", metaData=" + metaData + + ", description='" + description + '\'' + + ", topologyTemplate=" + topologyTemplate + + ", repositories=" + repositories + + ", inputs=" + inputs + + ", relationshipTemplates=" + relationshipTemplates + + ", nodeTemplates=" + nodeTemplates + + ", outputs=" + outputs + + ", policies=" + policies + + ", nestedToscaTplsWithTopology=" + nestedToscaTplsWithTopology + + ", nestedToscaTemplatesWithTopology=" + nestedToscaTemplatesWithTopology + + ", graph=" + graph + + ", csarTempDir='" + csarTempDir + '\'' + + ", nestingLoopCounter=" + nestingLoopCounter + + '}'; + } +} + +/*python + +import logging +import os + +from copy import deepcopy +from toscaparser.common.exception import ExceptionCollector.collector +from toscaparser.common.exception import InvalidTemplateVersion +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.common.exception import ValidationError +from toscaparser.elements.entity_type import update_definitions +from toscaparser.extensions.exttools import ExtTools +import org.openecomp.sdc.toscaparser.api.imports +from toscaparser.prereq.csar import CSAR +from toscaparser.repositories import Repository +from toscaparser.topology_template import TopologyTemplate +from toscaparser.tpl_relationship_graph import ToscaGraph +from toscaparser.utils.gettextutils import _ +import org.openecomp.sdc.toscaparser.api.utils.yamlparser + + +# TOSCA template key names +SECTIONS = (DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME, + TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION, + DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES, + RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES, + CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES, INTERFACE_TYPES, + POLICY_TYPES, GROUP_TYPES, REPOSITORIES) = \ + ('tosca_definitions_version', 'tosca_default_namespace', + 'template_name', 'topology_template', 'template_author', + 'template_version', 'description', 'imports', 'dsl_definitions', + 'node_types', 'relationship_types', 'relationship_templates', + 'capability_types', 'artifact_types', 'data_types', + 'interface_types', 'policy_types', 'group_types', 'repositories') +# Sections that are specific to individual template definitions +SPECIAL_SECTIONS = (METADATA) = ('metadata') + +log = logging.getLogger("tosca.model") + +YAML_LOADER = toscaparser.utils.yamlparser.load_yaml + + +class ToscaTemplate(object): + exttools = ExtTools() + + VALID_TEMPLATE_VERSIONS = ['tosca_simple_yaml_1_0'] + + VALID_TEMPLATE_VERSIONS.extend(exttools.get_versions()) + + ADDITIONAL_SECTIONS = {'tosca_simple_yaml_1_0': SPECIAL_SECTIONS} + + ADDITIONAL_SECTIONS.update(exttools.get_sections()) + + '''Load the template data.''' + def __init__(self, path=None, parsed_params=None, a_file=True, + yaml_dict_tpl=None): + + ExceptionCollector.collector.start() + self.a_file = a_file + self.input_path = None + self.path = None + self.tpl = None + self.nested_tosca_tpls_with_topology = {} + self.nested_tosca_templates_with_topology = [] + if path: + self.input_path = path + self.path = self._get_path(path) + if self.path: + self.tpl = YAML_LOADER(self.path, self.a_file) + if yaml_dict_tpl: + msg = (_('Both path and yaml_dict_tpl arguments were ' + 'provided. Using path and ignoring yaml_dict_tpl.')) + log.info(msg) + print(msg) + else: + if yaml_dict_tpl: + self.tpl = yaml_dict_tpl + else: + ExceptionCollector.collector.appendException( + ValueError(_('No path or yaml_dict_tpl was provided. ' + 'There is nothing to parse.'))) + + if self.tpl: + self.parsed_params = parsed_params + self._validate_field() + self.version = self._tpl_version() + self.relationship_types = self._tpl_relationship_types() + self.description = self._tpl_description() + self.topology_template = self._topology_template() + self.repositories = self._tpl_repositories() + if self.topology_template.tpl: + self.inputs = self._inputs() + self.relationship_templates = self._relationship_templates() + self.nodetemplates = self._nodetemplates() + self.outputs = self._outputs() + self._handle_nested_tosca_templates_with_topology() + self.graph = ToscaGraph(self.nodetemplates) + + ExceptionCollector.collector.stop() + self.verify_template() + + def _topology_template(self): + return TopologyTemplate(self._tpl_topology_template(), + self._get_all_custom_defs(), + self.relationship_types, + self.parsed_params, + None) + + def _inputs(self): + return self.topology_template.inputs + + def _nodetemplates(self): + return self.topology_template.nodetemplates + + def _relationship_templates(self): + return self.topology_template.relationship_templates + + def _outputs(self): + return self.topology_template.outputs + + def _tpl_version(self): + return self.tpl.get(DEFINITION_VERSION) + + def _tpl_description(self): + desc = self.tpl.get(DESCRIPTION) + if desc: + return desc.rstrip() + + def _tpl_imports(self): + return self.tpl.get(IMPORTS) + + def _tpl_repositories(self): + repositories = self.tpl.get(REPOSITORIES) + reposit = [] + if repositories: + for name, val in repositories.items(): + reposits = Repository(name, val) + reposit.append(reposits) + return reposit + + def _tpl_relationship_types(self): + return self._get_custom_types(RELATIONSHIP_TYPES) + + def _tpl_relationship_templates(self): + topology_template = self._tpl_topology_template() + return topology_template.get(RELATIONSHIP_TEMPLATES) + + def _tpl_topology_template(self): + return self.tpl.get(TOPOLOGY_TEMPLATE) + + def _get_all_custom_defs(self, imports=None): + types = [IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, + DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES] + custom_defs_final = {} + custom_defs = self._get_custom_types(types, imports) + if custom_defs: + custom_defs_final.update(custom_defs) + if custom_defs.get(IMPORTS): + import_defs = self._get_all_custom_defs( + custom_defs.get(IMPORTS)) + custom_defs_final.update(import_defs) + + # As imports are not custom_types, removing from the dict + custom_defs_final.pop(IMPORTS, None) + return custom_defs_final + + def _get_custom_types(self, type_definitions, imports=None): + """Handle custom types defined in imported template files + + This method loads the custom type definitions referenced in "imports" + section of the TOSCA YAML template. + """ + custom_defs = {} + type_defs = [] + if not isinstance(type_definitions, list): + type_defs.append(type_definitions) + else: + type_defs = type_definitions + + if not imports: + imports = self._tpl_imports() + + if imports: + custom_service = toscaparser.imports.\ + ImportsLoader(imports, self.path, + type_defs, self.tpl) + + nested_tosca_tpls = custom_service.get_nested_tosca_tpls() + self._update_nested_tosca_tpls_with_topology(nested_tosca_tpls) + + custom_defs = custom_service.get_custom_defs() + if not custom_defs: + return + + # Handle custom types defined in current template file + for type_def in type_defs: + if type_def != IMPORTS: + inner_custom_types = self.tpl.get(type_def) or {} + if inner_custom_types: + custom_defs.update(inner_custom_types) + return custom_defs + + def _update_nested_tosca_tpls_with_topology(self, nested_tosca_tpls): + for tpl in nested_tosca_tpls: + filename, tosca_tpl = list(tpl.items())[0] + if (tosca_tpl.get(TOPOLOGY_TEMPLATE) and + filename not in list( + self.nested_tosca_tpls_with_topology.keys())): + self.nested_tosca_tpls_with_topology.update(tpl) + + def _handle_nested_tosca_templates_with_topology(self): + for fname, tosca_tpl in self.nested_tosca_tpls_with_topology.items(): + for nodetemplate in self.nodetemplates: + if self._is_sub_mapped_node(nodetemplate, tosca_tpl): + parsed_params = self._get_params_for_nested_template( + nodetemplate) + topology_tpl = tosca_tpl.get(TOPOLOGY_TEMPLATE) + topology_with_sub_mapping = TopologyTemplate( + topology_tpl, + self._get_all_custom_defs(), + self.relationship_types, + parsed_params, + nodetemplate) + if topology_with_sub_mapping.substitution_mappings: + # Record nested topo templates in top level template + self.nested_tosca_templates_with_topology.\ + append(topology_with_sub_mapping) + # Set substitution mapping object for mapped node + nodetemplate.sub_mapping_tosca_template = \ + topology_with_sub_mapping.substitution_mappings + + def _validate_field(self): + version = self._tpl_version() + if not version: + ExceptionCollector.collector.appendException( + MissingRequiredFieldError(what='Template', + required=DEFINITION_VERSION)) + else: + self._validate_version(version) + self.version = version + + for name in self.tpl: + if (name not in SECTIONS and + name not in self.ADDITIONAL_SECTIONS.get(version, ())): + ExceptionCollector.collector.appendException( + UnknownFieldError(what='Template', field=name)) + + def _validate_version(self, version): + if version not in self.VALID_TEMPLATE_VERSIONS: + ExceptionCollector.collector.appendException( + InvalidTemplateVersion( + what=version, + valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS))) + else: + if version != 'tosca_simple_yaml_1_0': + update_definitions(version) + + def _get_path(self, path): + if path.lower().endswith(('.yaml','.yml')): + return path + elif path.lower().endswith(('.zip', '.csar')): + # a CSAR archive + csar = CSAR(path, self.a_file) + if csar.validate(): + csar.decompress() + self.a_file = True # the file has been decompressed locally + return os.path.join(csar.temp_dir, csar.get_main_template()) + else: + ExceptionCollector.collector.appendException( + ValueError(_('"%(path)s" is not a valid file.') + % {'path': path})) + + def verify_template(self): + if ExceptionCollector.collector.exceptionsCaught(): + if self.input_path: + raise ValidationError( + message=(_('\nThe input "%(path)s" failed validation with ' + 'the following error(s): \n\n\t') + % {'path': self.input_path}) + + '\n\t'.join(ExceptionCollector.collector.getExceptionsReport())) + else: + raise ValidationError( + message=_('\nThe pre-parsed input failed validation with ' + 'the following error(s): \n\n\t') + + '\n\t'.join(ExceptionCollector.collector.getExceptionsReport())) + else: + if self.input_path: + msg = (_('The input "%(path)s" successfully passed ' + 'validation.') % {'path': self.input_path}) + else: + msg = _('The pre-parsed input successfully passed validation.') + + log.info(msg) + + def _is_sub_mapped_node(self, nodetemplate, tosca_tpl): + """Return True if the nodetemple is substituted.""" + if (nodetemplate and not nodetemplate.sub_mapping_tosca_template and + self.get_sub_mapping_node_type(tosca_tpl) == nodetemplate.type + and len(nodetemplate.interfaces) < 1): + return True + else: + return False + + def _get_params_for_nested_template(self, nodetemplate): + """Return total params for nested_template.""" + parsed_params = deepcopy(self.parsed_params) \ + if self.parsed_params else {} + if nodetemplate: + for pname in nodetemplate.get_properties(): + parsed_params.update({pname: + nodetemplate.get_property_value(pname)}) + return parsed_params + + def get_sub_mapping_node_type(self, tosca_tpl): + """Return substitution mappings node type.""" + if tosca_tpl: + return TopologyTemplate.get_sub_mapping_node_type( + tosca_tpl.get(TOPOLOGY_TEMPLATE)) + + def _has_substitution_mappings(self): + """Return True if the template has valid substitution mappings.""" + return self.topology_template is not None and \ + self.topology_template.substitution_mappings is not None + + def has_nested_templates(self): + """Return True if the tosca template has nested templates.""" + return self.nested_tosca_templates_with_topology is not None and \ + len(self.nested_tosca_templates_with_topology) >= 1 +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java new file mode 100644 index 0000000..0ec0b5a --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java @@ -0,0 +1,183 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; + +public class Triggers extends EntityTemplate { + + private static final String DESCRIPTION = "description"; + private static final String EVENT = "event_type"; + private static final String SCHEDULE = "schedule"; + private static final String TARGET_FILTER = "target_filter"; + private static final String CONDITION = "condition"; + private static final String ACTION = "action"; + + private static final String SECTIONS[] = { + DESCRIPTION, EVENT, SCHEDULE, TARGET_FILTER, CONDITION, ACTION + }; + + private static final String METER_NAME = "meter_name"; + private static final String CONSTRAINT = "constraint"; + private static final String PERIOD = "period"; + private static final String EVALUATIONS = "evaluations"; + private static final String METHOD = "method"; + private static final String THRESHOLD = "threshold"; + private static final String COMPARISON_OPERATOR = "comparison_operator"; + + private static final String CONDITION_KEYNAMES[] = { + METER_NAME, CONSTRAINT, PERIOD, EVALUATIONS, METHOD, THRESHOLD, COMPARISON_OPERATOR + }; + + private String name; + private LinkedHashMap triggerTpl; + + public Triggers(String _name,LinkedHashMap _triggerTpl) { + super(); // dummy. don't want super + name = _name; + triggerTpl = _triggerTpl; + _validateKeys(); + _validateCondition(); + _validateInput(); + } + + public String getDescription() { + return (String)triggerTpl.get("description"); + } + + public String getEvent() { + return (String)triggerTpl.get("event_type"); + } + + public LinkedHashMap getSchedule() { + return (LinkedHashMap)triggerTpl.get("schedule"); + } + + public LinkedHashMap getTargetFilter() { + return (LinkedHashMap)triggerTpl.get("target_filter"); + } + + public LinkedHashMap getCondition() { + return (LinkedHashMap)triggerTpl.get("condition"); + } + + public LinkedHashMap getAction() { + return (LinkedHashMap)triggerTpl.get("action"); + } + + private void _validateKeys() { + for(String key: triggerTpl.keySet()) { + boolean bFound = false; + for(int i=0; i notAnalyzedExceptions = new HashMap<>(); + private Map criticalExceptions = new HashMap<>(); + private Map warningExceptions = new HashMap<>(); + + private boolean bWantTrace = true; + private String filePath; + + public enum ReportType {WARNING, CRITICAL, NOT_ANALYZED} + + public ExceptionCollector(String filePath) { + this.filePath = filePath; + } + + public void appendException(String exception) { + + addException(exception, ReportType.NOT_ANALYZED); + } + + public void appendCriticalException(String exception) { + + addException(exception, ReportType.CRITICAL); + } + + public void appendWarning(String exception) { + + addException(exception, ReportType.WARNING); + } + + private void addException(String exception, ReportType type) { + + Map exceptions = getExceptionCollection(type); + + if (!exceptions.containsKey(exception)) { + // get stack trace + StackTraceElement[] ste = Thread.currentThread().getStackTrace(); + StringBuilder sb = new StringBuilder(); + // skip the last 2 (getStackTrace and this) + for (int i = 2; i < ste.length; i++) { + sb.append(String.format(" %s(%s:%d)%s", ste[i].getClassName(), ste[i].getFileName(), + ste[i].getLineNumber(), i == ste.length - 1 ? " " : "\n")); + } + exceptions.put(exception, sb.toString()); + } + } + + public List getCriticalsReport() { + + return getReport(ReportType.CRITICAL); + } + + public List getNotAnalyzedExceptionsReport() { + + return getReport(ReportType.NOT_ANALYZED); + } + + public List getWarningsReport() { + + return getReport(ReportType.WARNING); + } + + private List getReport(ReportType type) { + Map collectedExceptions = getExceptionCollection(type); + + List report = new ArrayList<>(); + if (collectedExceptions.size() > 0) { + for (Map.Entry exception : collectedExceptions.entrySet()) { + report.add(exception.getKey()); + if (bWantTrace) { + report.add(exception.getValue()); + } + } + } + + return report; + } + + private Map getExceptionCollection(ReportType type) { + switch (type) { + case WARNING: + return warningExceptions; + case CRITICAL: + return criticalExceptions; + case NOT_ANALYZED: + return notAnalyzedExceptions; + default: + return notAnalyzedExceptions; + } + } + + public int errorsNotAnalyzedCaught() { + return notAnalyzedExceptions.size(); + } + + public int criticalsCaught() { + return criticalExceptions.size(); + } + + public int warningsCaught() { + return warningExceptions.size(); + } + + public void setWantTrace(boolean b) { + bWantTrace = b; + } + +} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaException.java b/src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaException.java new file mode 100644 index 0000000..6cd5872 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaException.java @@ -0,0 +1,27 @@ +package org.openecomp.sdc.toscaparser.api.common; + +public class JToscaException extends Exception { + + private static final long serialVersionUID = 1L; + private String code; + + public JToscaException(String message, String code) { + super(message); + this.code = code; + } + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + //JT1001 - Meta file missing + //JT1002 - Invalid yaml content + //JT1003 - Entry-Definition not defined in meta file + //JT1004 - Entry-Definition file missing + //JT1005 - General Error + //JT1006 - General Error/Path not valid +} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/common/TOSCAException.java b/src/main/java/org/openecomp/sdc/toscaparser/api/common/TOSCAException.java new file mode 100644 index 0000000..cfd7560 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/common/TOSCAException.java @@ -0,0 +1,39 @@ +package org.openecomp.sdc.toscaparser.api.common; + +import java.util.IllegalFormatException; + +public class TOSCAException extends Exception { + private String message = "An unkown exception has occurred"; + private static boolean FATAL_EXCEPTION_FORMAT_ERRORS = false; + private String msgFmt = null; + + public TOSCAException(String...strings) { + try { + message = String.format(msgFmt,(Object[])strings); + } + catch (IllegalFormatException e) { + // TODO log + + if(FATAL_EXCEPTION_FORMAT_ERRORS) { + throw e; + } + + } + + } + + public String __str__() { + return message; + } + + public static void generate_inv_schema_property_error(String name, String attr, String value, String valid_values) { + //TODO + + } + + public static void setFatalFormatException(boolean flag) { + FATAL_EXCEPTION_FORMAT_ERRORS = flag; + } + +} + diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ArtifactTypeDef.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ArtifactTypeDef.java new file mode 100644 index 0000000..8a13d99 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ArtifactTypeDef.java @@ -0,0 +1,105 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.util.LinkedHashMap; + +public class ArtifactTypeDef extends StatefulEntityType { + + private String type; + private LinkedHashMap customDef; + private LinkedHashMap properties; + private LinkedHashMap parentArtifacts; + + + + public ArtifactTypeDef(String atype,LinkedHashMap _customDef) { + super(atype,ARTIFACT_PREFIX,_customDef); + + type = atype; + customDef = _customDef; + properties = null; + if(defs != null) { + properties = (LinkedHashMap)defs.get(PROPERTIES); + } + parentArtifacts = _getParentArtifacts(); + } + + private LinkedHashMap _getParentArtifacts() { + LinkedHashMap artifacts = new LinkedHashMap<>(); + String parentArtif = null; + if(getParentType() != null) { + parentArtif = getParentType().getType(); + } + if(parentArtif != null && !parentArtif.isEmpty()) { + while(!parentArtif.equals("tosca.artifacts.Root")) { + Object ob = TOSCA_DEF.get(parentArtif); + artifacts.put(parentArtif,ob); + parentArtif = + (String)((LinkedHashMap)ob).get("derived_from"); + } + } + return artifacts; + } + + public ArtifactTypeDef getParentType() { + // Return a artifact entity from which this entity is derived + if(defs == null) { + return null; + } + String partifactEntity = derivedFrom(defs); + if(partifactEntity != null) { + return new ArtifactTypeDef(partifactEntity,customDef); + } + return null; + } + + public Object getArtifact(String name) { + // Return the definition of an artifact field by name + if(defs != null) { + return defs.get(name); + } + return null; + } + + public String getType() { + return type; + } + +} + +/*python +class ArtifactTypeDef(StatefulEntityType): + '''TOSCA built-in artifacts type.''' + + def __init__(self, atype, custom_def=None): + super(ArtifactTypeDef, self).__init__(atype, self.ARTIFACT_PREFIX, + custom_def) + self.type = atype + self.custom_def = custom_def + self.properties = None + if self.PROPERTIES in self.defs: + self.properties = self.defs[self.PROPERTIES] + self.parent_artifacts = self._get_parent_artifacts() + + def _get_parent_artifacts(self): + artifacts = {} + parent_artif = self.parent_type.type if self.parent_type else None + if parent_artif: + while parent_artif != 'tosca.artifacts.Root': + artifacts[parent_artif] = self.TOSCA_DEF[parent_artif] + parent_artif = artifacts[parent_artif]['derived_from'] + return artifacts + + @property + def parent_type(self): + '''Return a artifact entity from which this entity is derived.''' + if not hasattr(self, 'defs'): + return None + partifact_entity = self.derived_from(self.defs) + if partifact_entity: + return ArtifactTypeDef(partifact_entity, self.custom_def) + + def get_artifact(self, name): + '''Return the definition of an artifact field by name.''' + if name in self.defs: + return self.defs[name] +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/AttributeDef.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/AttributeDef.java new file mode 100644 index 0000000..5551908 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/AttributeDef.java @@ -0,0 +1,40 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.util.LinkedHashMap; + +public class AttributeDef { + // TOSCA built-in Attribute type + + private String name; + private Object value; + private LinkedHashMap schema; + + public AttributeDef(String adName, Object adValue, LinkedHashMap adSchema) { + name = adName; + value = adValue; + schema = adSchema; + } + + public String getName() { + return name; + } + + public Object getValue() { + return value; + } + + public LinkedHashMap getSchema() { + return schema; + } +} + +/*python + +class AttributeDef(object): + '''TOSCA built-in Attribute type.''' + + def __init__(self, name, value=None, schema=None): + self.name = name + self.value = value + self.schema = schema +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java new file mode 100644 index 0000000..03e2c45 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java @@ -0,0 +1,222 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; + +public class CapabilityTypeDef extends StatefulEntityType { + // TOSCA built-in capabilities type + + private static final String TOSCA_TYPEURI_CAPABILITY_ROOT = "tosca.capabilities.Root"; + + private String name; + private String nodetype; + private LinkedHashMap customDef; + private LinkedHashMap properties; + private LinkedHashMap parentCapabilities; + + @SuppressWarnings("unchecked") + public CapabilityTypeDef(String cname,String ctype,String ntype,LinkedHashMap ccustomDef) { + super(ctype,CAPABILITY_PREFIX,ccustomDef); + + name = cname; + nodetype = ntype; + properties = null; + customDef = ccustomDef; + if(defs != null) { + properties = (LinkedHashMap)defs.get(PROPERTIES); + } + parentCapabilities = _getParentCapabilities(customDef); + } + + @SuppressWarnings("unchecked") + public ArrayList getPropertiesDefObjects () { + // Return a list of property definition objects + ArrayList propsdefs = new ArrayList<>(); + LinkedHashMap parentProperties = new LinkedHashMap<>(); + if(parentCapabilities != null) { + for(Map.Entry me: parentCapabilities.entrySet()) { + parentProperties.put(me.getKey(),((LinkedHashMap)me.getValue()).get("properties")); + } + } + if(properties != null) { + for(Map.Entry me: properties.entrySet()) { + propsdefs.add(new PropertyDef(me.getKey(),null,(LinkedHashMap)me.getValue())); + } + } + if(parentProperties != null) { + for(Map.Entry me: parentProperties.entrySet()) { + LinkedHashMap props = (LinkedHashMap)me.getValue(); + for(Map.Entry pe: props.entrySet()) { + String prop = pe.getKey(); + LinkedHashMap schema = (LinkedHashMap)pe.getValue(); + // add parent property if not overridden by children type + if(properties == null || properties.get(prop) == null) { + propsdefs.add(new PropertyDef(prop, null, schema)); + } + } + } + } + return propsdefs; + } + + public LinkedHashMap getPropertiesDef() { + LinkedHashMap pds = new LinkedHashMap<>(); + for(PropertyDef pd: getPropertiesDefObjects()) { + pds.put(pd.getName(),pd); + } + return pds; + } + + public PropertyDef getPropertyDefValue(String pdname) { + // Return the definition of a given property name + LinkedHashMap propsDef = getPropertiesDef(); + if(propsDef != null && propsDef.get(pdname) != null) { + return (PropertyDef)propsDef.get(pdname).getPDValue(); + } + return null; + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _getParentCapabilities(LinkedHashMap customDef) { + LinkedHashMap capabilities = new LinkedHashMap<>(); + CapabilityTypeDef parentCap = getParentType(); + if(parentCap != null) { + String sParentCap = parentCap.getType(); + while(!sParentCap.equals(TOSCA_TYPEURI_CAPABILITY_ROOT)) { + if(TOSCA_DEF.get(sParentCap) != null) { + capabilities.put(sParentCap,TOSCA_DEF.get(sParentCap)); + } + else if(customDef != null && customDef.get(sParentCap) != null) { + capabilities.put(sParentCap,customDef.get(sParentCap)); + } + sParentCap = (String)((LinkedHashMap)capabilities.get(sParentCap)).get("derived_from"); + } + } + return capabilities; + } + + public CapabilityTypeDef getParentType() { + // Return a capability this capability is derived from + if(defs == null) { + return null; + } + String pnode = derivedFrom(defs); + if(pnode != null && !pnode.isEmpty()) { + return new CapabilityTypeDef(name, pnode, nodetype, customDef); + } + return null; + } + + public boolean inheritsFrom(ArrayList typeNames) { + // Check this capability is in type_names + + // Check if this capability or some of its parent types + // are in the list of types: type_names + if(typeNames.contains(getType())) { + return true; + } + else if(getParentType() != null) { + return getParentType().inheritsFrom(typeNames); + } + return false; + } + + // getters/setters + + public LinkedHashMap getProperties() { + return properties; + } + + public String getName() { + return name; + } +} + +/*python +from toscaparser.elements.property_definition import PropertyDef +from toscaparser.elements.statefulentitytype import StatefulEntityType + + +class CapabilityTypeDef(StatefulEntityType): + '''TOSCA built-in capabilities type.''' + TOSCA_TYPEURI_CAPABILITY_ROOT = 'tosca.capabilities.Root' + + def __init__(self, name, ctype, ntype, custom_def=None): + self.name = name + super(CapabilityTypeDef, self).__init__(ctype, self.CAPABILITY_PREFIX, + custom_def) + self.nodetype = ntype + self.properties = None + self.custom_def = custom_def + if self.PROPERTIES in self.defs: + self.properties = self.defs[self.PROPERTIES] + self.parent_capabilities = self._get_parent_capabilities(custom_def) + + def get_properties_def_objects(self): + '''Return a list of property definition objects.''' + properties = [] + parent_properties = {} + if self.parent_capabilities: + for type, value in self.parent_capabilities.items(): + parent_properties[type] = value.get('properties') + if self.properties: + for prop, schema in self.properties.items(): + properties.append(PropertyDef(prop, None, schema)) + if parent_properties: + for parent, props in parent_properties.items(): + for prop, schema in props.items(): + # add parent property if not overridden by children type + if not self.properties or \ + prop not in self.properties.keys(): + properties.append(PropertyDef(prop, None, schema)) + return properties + + def get_properties_def(self): + '''Return a dictionary of property definition name-object pairs.''' + return {prop.name: prop + for prop in self.get_properties_def_objects()} + + def get_property_def_value(self, name): + '''Return the definition of a given property name.''' + props_def = self.get_properties_def() + if props_def and name in props_def: + return props_def[name].value + + def _get_parent_capabilities(self, custom_def=None): + capabilities = {} + parent_cap = self.parent_type + if parent_cap: + parent_cap = parent_cap.type + while parent_cap != self.TOSCA_TYPEURI_CAPABILITY_ROOT: + if parent_cap in self.TOSCA_DEF.keys(): + capabilities[parent_cap] = self.TOSCA_DEF[parent_cap] + elif custom_def and parent_cap in custom_def.keys(): + capabilities[parent_cap] = custom_def[parent_cap] + parent_cap = capabilities[parent_cap]['derived_from'] + return capabilities + + @property + def parent_type(self): + '''Return a capability this capability is derived from.''' + if not hasattr(self, 'defs'): + return None + pnode = self.derived_from(self.defs) + if pnode: + return CapabilityTypeDef(self.name, pnode, + self.nodetype, self.custom_def) + + def inherits_from(self, type_names): + '''Check this capability is in type_names + + Check if this capability or some of its parent types + are in the list of types: type_names + ''' + if self.type in type_names: + return True + elif self.parent_type: + return self.parent_type.inherits_from(type_names) + else: + return False*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/DataType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/DataType.java new file mode 100644 index 0000000..d5d770b --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/DataType.java @@ -0,0 +1,116 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +public class DataType extends StatefulEntityType { + + LinkedHashMap customDef; + + public DataType(String _dataTypeName,LinkedHashMap _customDef) { + super(_dataTypeName,DATATYPE_NETWORK_PREFIX,_customDef); + + customDef = _customDef; + } + + public DataType getParentType() { + // Return a datatype this datatype is derived from + if(defs != null) { + String ptype = derivedFrom(defs); + if(ptype != null) { + return new DataType(ptype,customDef); + } + } + return null; + } + + public String getValueType() { + // Return 'type' section in the datatype schema + if(defs != null) { + return (String)entityValue(defs,"type"); + } + return null; + } + + public ArrayList getAllPropertiesObjects() { + //Return all properties objects defined in type and parent type + ArrayList propsDef = getPropertiesDefObjects(); + DataType ptype = getParentType(); + while(ptype != null) { + propsDef.addAll(ptype.getPropertiesDefObjects()); + ptype = ptype.getParentType(); + } + return propsDef; + } + + public LinkedHashMap getAllProperties() { + // Return a dictionary of all property definition name-object pairs + LinkedHashMap pno = new LinkedHashMap<>(); + for(PropertyDef pd: getAllPropertiesObjects()) { + pno.put(pd.getName(),pd); + } + return pno; + } + + public Object getAllPropertyValue(String name) { + // Return the value of a given property name + LinkedHashMap propsDef = getAllProperties(); + if(propsDef != null && propsDef.get(name) != null) { + return propsDef.get(name).getPDValue(); + } + return null; + } + + public LinkedHashMap getDefs() { + return defs; + } + +} + +/*python + +from toscaparser.elements.statefulentitytype import StatefulEntityType + + +class DataType(StatefulEntityType): + '''TOSCA built-in and user defined complex data type.''' + + def __init__(self, datatypename, custom_def=None): + super(DataType, self).__init__(datatypename, + self.DATATYPE_NETWORK_PREFIX, + custom_def) + self.custom_def = custom_def + + @property + def parent_type(self): + '''Return a datatype this datatype is derived from.''' + ptype = self.derived_from(self.defs) + if ptype: + return DataType(ptype, self.custom_def) + return None + + @property + def value_type(self): + '''Return 'type' section in the datatype schema.''' + return self.entity_value(self.defs, 'type') + + def get_all_properties_objects(self): + '''Return all properties objects defined in type and parent type.''' + props_def = self.get_properties_def_objects() + ptype = self.parent_type + while ptype: + props_def.extend(ptype.get_properties_def_objects()) + ptype = ptype.parent_type + return props_def + + def get_all_properties(self): + '''Return a dictionary of all property definition name-object pairs.''' + return {prop.name: prop + for prop in self.get_all_properties_objects()} + + def get_all_property_value(self, name): + '''Return the value of a given property name.''' + props_def = self.get_all_properties() + if props_def and name in props_def.key(): + return props_def[name].value +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java new file mode 100644 index 0000000..650166d --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java @@ -0,0 +1,418 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.extensions.ExtTools; +import org.openecomp.sdc.toscaparser.api.utils.CopyUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.Yaml; + +public class EntityType { + + private static Logger log = LoggerFactory.getLogger(EntityType.class.getName()); + + private static final String TOSCA_DEFINITION_1_0_YAML = "TOSCA_definition_1_0.yaml"; + protected static final String DERIVED_FROM = "derived_from"; + protected static final String PROPERTIES = "properties"; + protected static final String ATTRIBUTES = "attributes"; + protected static final String REQUIREMENTS = "requirements"; + protected static final String INTERFACES = "interfaces"; + protected static final String CAPABILITIES = "capabilities"; + protected static final String TYPE = "type"; + protected static final String ARTIFACTS = "artifacts"; + + @SuppressWarnings("unused") + private static final String SECTIONS[] = { + DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS, + INTERFACES, CAPABILITIES, TYPE, ARTIFACTS + }; + + public static final String TOSCA_DEF_SECTIONS[] = { + "node_types", "data_types", "artifact_types", + "group_types", "relationship_types", + "capability_types", "interface_types", + "policy_types"}; + + + // TOSCA definition file + //private final static String path = EntityType.class.getProtectionDomain().getCodeSource().getLocation().getPath(); + + //private final static String path = EntityType.class.getClassLoader().getResource("TOSCA_definition_1_0.yaml").getFile(); + //private final static String TOSCA_DEF_FILE = EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); + + private static LinkedHashMap TOSCA_DEF_LOAD_AS_IS = loadTdf(); + + //EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); + + @SuppressWarnings("unchecked") + private static LinkedHashMap loadTdf() { + String toscaDefLocation = EntityType.class.getClassLoader().getResource(TOSCA_DEFINITION_1_0_YAML).getFile(); + InputStream input = EntityType.class.getClassLoader().getResourceAsStream(TOSCA_DEFINITION_1_0_YAML); + if (input == null){ + log.error("EntityType - loadTdf - Couldn't load TOSCA_DEF_FILE {}", toscaDefLocation); + } + Yaml yaml = new Yaml(); + Object loaded = yaml.load(input); + //@SuppressWarnings("unchecked") + return (LinkedHashMap) loaded; + } + + // Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS + public static LinkedHashMap TOSCA_DEF; + static { + TOSCA_DEF = new LinkedHashMap(); + for(String section: TOSCA_DEF_SECTIONS) { + @SuppressWarnings("unchecked") + LinkedHashMap value = (LinkedHashMap)TOSCA_DEF_LOAD_AS_IS.get(section); + if(value != null) { + for(String key: value.keySet()) { + TOSCA_DEF.put(key, value.get(key)); + } + } + } + } + + public static final String DEPENDSON = "tosca.relationships.DependsOn"; + public static final String HOSTEDON = "tosca.relationships.HostedOn"; + public static final String CONNECTSTO = "tosca.relationships.ConnectsTo"; + public static final String ATTACHESTO = "tosca.relationships.AttachesTo"; + public static final String LINKSTO = "tosca.relationships.network.LinksTo"; + public static final String BINDSTO = "tosca.relationships.network.BindsTo"; + + public static final String RELATIONSHIP_TYPE[] = { + "tosca.relationships.DependsOn", + "tosca.relationships.HostedOn", + "tosca.relationships.ConnectsTo", + "tosca.relationships.AttachesTo", + "tosca.relationships.network.LinksTo", + "tosca.relationships.network.BindsTo"}; + + public static final String NODE_PREFIX = "tosca.nodes."; + public static final String RELATIONSHIP_PREFIX = "tosca.relationships."; + public static final String CAPABILITY_PREFIX = "tosca.capabilities."; + public static final String INTERFACE_PREFIX = "tosca.interfaces."; + public static final String ARTIFACT_PREFIX = "tosca.artifacts."; + public static final String POLICY_PREFIX = "tosca.policies."; + public static final String GROUP_PREFIX = "tosca.groups."; + //currently the data types are defined only for network + // but may have changes in the future. + public static final String DATATYPE_PREFIX = "tosca.datatypes."; + public static final String DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + "network."; + public static final String TOSCA = "tosca"; + + protected String type; + protected LinkedHashMap defs = null; + public Object getParentType() { return null; } + + public String derivedFrom(LinkedHashMap defs) { + // Return a type this type is derived from + return (String)entityValue(defs, "derived_from"); + } + + public boolean isDerivedFrom(String type_str) { + // Check if object inherits from the given type + // Returns true if this object is derived from 'type_str' + // False otherwise. + if(type == null || this.type.isEmpty()) { + return false; + } + else if(type == type_str) { + return true; + } + else if(getParentType() != null) { + return ((EntityType)getParentType()).isDerivedFrom(type_str); + } + else { + return false; + } + } + + public Object entityValue(LinkedHashMap defs, String key) { + if(defs != null) { + return defs.get(key); + } + return null; + } + + @SuppressWarnings("unchecked") + public Object getValue(String ndtype, LinkedHashMap _defs, boolean parent) { + Object value = null; + if(_defs == null) { + if(defs == null) { + return null; + } + _defs = this.defs; + } + Object defndt = _defs.get(ndtype); + if(defndt != null) { + // copy the value to avoid that next operations add items in the + // item definitions + //value = copy.copy(defs[ndtype]) + value = CopyUtils.copyLhmOrAl(defndt); + } + + if(parent) { + EntityType p = this; + if(p != null) { + while(p != null) { + if(p.defs != null && p.defs.get(ndtype) != null) { + // get the parent value + Object parentValue = p.defs.get(ndtype); + if(value != null) { + if(value instanceof LinkedHashMap) { + for(Map.Entry me: ((LinkedHashMap)parentValue).entrySet()) { + String k = me.getKey(); + if(((LinkedHashMap)value).get(k) == null) { + ((LinkedHashMap)value).put(k,me.getValue()); + } + } + } + if(value instanceof ArrayList) { + for(Object pValue: (ArrayList)parentValue) { + if(!((ArrayList)value).contains(pValue)) { + ((ArrayList)value).add(pValue); + } + } + } + } + else { + // value = copy.copy(parent_value) + value = CopyUtils.copyLhmOrAl(parentValue); + } + } + p = (EntityType)p.getParentType(); + } + } + } + + return value; + } + + @SuppressWarnings("unchecked") + public Object getDefinition(String ndtype) { + Object value = null; + LinkedHashMap _defs; + // no point in hasattr, because we have it, and it + // doesn't do anything except emit an exception anyway + //if not hasattr(self, 'defs'): + // defs = None + // ExceptionCollector.appendException( + // ValidationError(message="defs is " + str(defs))) + //else: + // defs = self.defs + _defs = this.defs; + + + if(_defs != null && _defs.get(ndtype) != null) { + value = _defs.get(ndtype); + } + + Object p = getParentType(); + if(p != null) { + Object inherited = ((EntityType)p).getDefinition(ndtype); + if(inherited != null) { + // inherited = dict(inherited) WTF?!? + if(value == null) { + value = inherited; + } + else { + //????? + //inherited.update(value) + //value.update(inherited) + for(Map.Entry me: ((LinkedHashMap)inherited).entrySet()) { + ((LinkedHashMap)value).put(me.getKey(),me.getValue()); + } + } + } + } + return value; + } + + public static void updateDefinitions(String version) { + ExtTools exttools = new ExtTools(); + String extensionDefsFile = exttools.getDefsFile(version); + + InputStream input = null; + try { + input = new FileInputStream(new File(extensionDefsFile)); + } + catch (FileNotFoundException e) { + log.error("EntityType - updateDefinitions - Failed to open extension defs file ", extensionDefsFile); + return; + } + Yaml yaml = new Yaml(); + LinkedHashMap nfvDefFile = (LinkedHashMap)yaml.load(input); + LinkedHashMap nfvDef = new LinkedHashMap<>(); + for(String section: TOSCA_DEF_SECTIONS) { + if(nfvDefFile.get(section) != null) { + LinkedHashMap value = + (LinkedHashMap)nfvDefFile.get(section); + for(String key: value.keySet()) { + nfvDef.put(key, value.get(key)); + } + } + } + TOSCA_DEF.putAll(nfvDef); + } + +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationError +from toscaparser.extensions.exttools import ExtTools +import org.openecomp.sdc.toscaparser.api.utils.yamlparser + +log = logging.getLogger('tosca') + + +class EntityType(object): + '''Base class for TOSCA elements.''' + + SECTIONS = (DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS, + INTERFACES, CAPABILITIES, TYPE, ARTIFACTS) = \ + ('derived_from', 'properties', 'attributes', 'requirements', + 'interfaces', 'capabilities', 'type', 'artifacts') + + TOSCA_DEF_SECTIONS = ['node_types', 'data_types', 'artifact_types', + 'group_types', 'relationship_types', + 'capability_types', 'interface_types', + 'policy_types'] + + '''TOSCA definition file.''' + TOSCA_DEF_FILE = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "TOSCA_definition_1_0.yaml") + + loader = toscaparser.utils.yamlparser.load_yaml + + TOSCA_DEF_LOAD_AS_IS = loader(TOSCA_DEF_FILE) + + # Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS + TOSCA_DEF = {} + for section in TOSCA_DEF_SECTIONS: + if section in TOSCA_DEF_LOAD_AS_IS.keys(): + value = TOSCA_DEF_LOAD_AS_IS[section] + for key in value.keys(): + TOSCA_DEF[key] = value[key] + + RELATIONSHIP_TYPE = (DEPENDSON, HOSTEDON, CONNECTSTO, ATTACHESTO, + LINKSTO, BINDSTO) = \ + ('tosca.relationships.DependsOn', + 'tosca.relationships.HostedOn', + 'tosca.relationships.ConnectsTo', + 'tosca.relationships.AttachesTo', + 'tosca.relationships.network.LinksTo', + 'tosca.relationships.network.BindsTo') + + NODE_PREFIX = 'tosca.nodes.' + RELATIONSHIP_PREFIX = 'tosca.relationships.' + CAPABILITY_PREFIX = 'tosca.capabilities.' + INTERFACE_PREFIX = 'tosca.interfaces.' + ARTIFACT_PREFIX = 'tosca.artifacts.' + POLICY_PREFIX = 'tosca.policies.' + GROUP_PREFIX = 'tosca.groups.' + # currently the data types are defined only for network + # but may have changes in the future. + DATATYPE_PREFIX = 'tosca.datatypes.' + DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + 'network.' + TOSCA = 'tosca' + + def derived_from(self, defs): + '''Return a type this type is derived from.''' + return self.entity_value(defs, 'derived_from') + + def is_derived_from(self, type_str): + '''Check if object inherits from the given type. + + Returns true if this object is derived from 'type_str'. + False otherwise. + ''' + if not self.type: + return False + elif self.type == type_str: + return True + elif self.parent_type: + return self.parent_type.is_derived_from(type_str) + else: + return False + + def entity_value(self, defs, key): + if key in defs: + return defs[key] + + def get_value(self, ndtype, defs=None, parent=None): + value = None + if defs is None: + if not hasattr(self, 'defs'): + return None + defs = self.defs + if ndtype in defs: + # copy the value to avoid that next operations add items in the + # item definitions + value = copy.copy(defs[ndtype]) + if parent: + p = self + if p: + while p: + if ndtype in p.defs: + # get the parent value + parent_value = p.defs[ndtype] + if value: + if isinstance(value, dict): + for k, v in parent_value.items(): + if k not in value.keys(): + value[k] = v + if isinstance(value, list): + for p_value in parent_value: + if p_value not in value: + value.append(p_value) + else: + value = copy.copy(parent_value) + p = p.parent_type + return value + + def get_definition(self, ndtype): + value = None + if not hasattr(self, 'defs'): + defs = None + ExceptionCollector.appendException( + ValidationError(message="defs is " + str(defs))) + else: + defs = self.defs + if defs is not None and ndtype in defs: + value = defs[ndtype] + p = self.parent_type + if p: + inherited = p.get_definition(ndtype) + if inherited: + inherited = dict(inherited) + if not value: + value = inherited + else: + inherited.update(value) + value.update(inherited) + return value + + +def update_definitions(version): + exttools = ExtTools() + extension_defs_file = exttools.get_defs_file(version) + loader = toscaparser.utils.yamlparser.load_yaml + nfv_def_file = loader(extension_defs_file) + nfv_def = {} + for section in EntityType.TOSCA_DEF_SECTIONS: + if section in nfv_def_file.keys(): + value = nfv_def_file[section] + for key in value.keys(): + nfv_def[key] = value[key] + EntityType.TOSCA_DEF.update(nfv_def) +*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java new file mode 100644 index 0000000..d226b78 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java @@ -0,0 +1,215 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class GroupType extends StatefulEntityType { + + private static final String DERIVED_FROM = "derived_from"; + private static final String VERSION = "version"; + private static final String METADATA = "metadata"; + private static final String DESCRIPTION = "description"; + private static final String PROPERTIES = "properties"; + private static final String MEMBERS = "members"; + private static final String INTERFACES = "interfaces"; + + private static final String SECTIONS[] = { + DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; + + private String groupType; + private LinkedHashMap customDef; + private String groupDescription; + private String groupVersion; + //private LinkedHashMap groupProperties; + //private ArrayList groupMembers; + private LinkedHashMap metaData; + + @SuppressWarnings("unchecked") + public GroupType(String _grouptype,LinkedHashMap _customDef) { + super(_grouptype,GROUP_PREFIX,_customDef); + + groupType = _grouptype; + customDef = _customDef; + _validateFields(); + if(defs != null) { + groupDescription = (String)defs.get(DESCRIPTION); + groupVersion = (String)defs.get(VERSION); + //groupProperties = (LinkedHashMap)defs.get(PROPERTIES); + //groupMembers = (ArrayList)defs.get(MEMBERS); + Object mdo = defs.get(METADATA); + if(mdo instanceof LinkedHashMap) { + metaData = (LinkedHashMap)mdo; + } + else { + metaData = null; + } + + if(metaData != null) { + _validateMetadata(metaData); + } + } + } + + public GroupType getParentType() { + // Return a group statefulentity of this entity is derived from. + if(defs == null) { + return null; + } + String pgroupEntity = derivedFrom(defs); + if(pgroupEntity != null) { + return new GroupType(pgroupEntity,customDef); + } + return null; + } + + public String getDescription() { + return groupDescription; + } + + public String getVersion() { + return groupVersion; + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getInterfaces() { + Object ifo = getValue(INTERFACES,null,false); + if(ifo instanceof LinkedHashMap) { + return (LinkedHashMap)ifo; + } + return new LinkedHashMap(); + } + + private void _validateFields() { + if(defs != null) { + for(String name: defs.keySet()) { + boolean bFound = false; + for(String sect: SECTIONS) { + if(name.equals(sect)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: Group Type \"%s\" contains unknown field \"%s\"", + groupType,name)); + } + } + } + } + + @SuppressWarnings("unchecked") + private void _validateMetadata(LinkedHashMap metadata) { + String mtt = (String) metadata.get("type"); + if(mtt != null && !mtt.equals("map") && !mtt.equals("tosca:map")) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidTypeError: \"%s\" defined in group for metadata is invalid", + mtt)); + } + for(String entrySchema: metadata.keySet()) { + Object estob = metadata.get(entrySchema); + if(estob instanceof LinkedHashMap) { + String est = (String)((LinkedHashMap)estob).get("type"); + if(!est.equals("string")) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidTypeError: \"%s\" defined in group for metadata \"%s\" is invalid", + est,entrySchema)); + } + } + } + } + + public String getType() { + return groupType; + } + + +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import InvalidTypeError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.statefulentitytype import StatefulEntityType + + +class GroupType(StatefulEntityType): + '''TOSCA built-in group type.''' + + SECTIONS = (DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, + MEMBERS, INTERFACES) = \ + ("derived_from", "version", "metadata", "description", + "properties", "members", "interfaces") + + def __init__(self, grouptype, custom_def=None): + super(GroupType, self).__init__(grouptype, self.GROUP_PREFIX, + custom_def) + self.custom_def = custom_def + self.grouptype = grouptype + self._validate_fields() + self.group_description = None + if self.DESCRIPTION in self.defs: + self.group_description = self.defs[self.DESCRIPTION] + + self.group_version = None + if self.VERSION in self.defs: + self.group_version = self.defs[self.VERSION] + + self.group_properties = None + if self.PROPERTIES in self.defs: + self.group_properties = self.defs[self.PROPERTIES] + + self.group_members = None + if self.MEMBERS in self.defs: + self.group_members = self.defs[self.MEMBERS] + + if self.METADATA in self.defs: + self.meta_data = self.defs[self.METADATA] + self._validate_metadata(self.meta_data) + + @property + def parent_type(self): + '''Return a group statefulentity of this entity is derived from.''' + if not hasattr(self, 'defs'): + return None + pgroup_entity = self.derived_from(self.defs) + if pgroup_entity: + return GroupType(pgroup_entity, self.custom_def) + + @property + def description(self): + return self.group_description + + @property + def version(self): + return self.group_version + + @property + def interfaces(self): + return self.get_value(self.INTERFACES) + + def _validate_fields(self): + if self.defs: + for name in self.defs.keys(): + if name not in self.SECTIONS: + ExceptionCollector.appendException( + UnknownFieldError(what='Group Type %s' + % self.grouptype, field=name)) + + def _validate_metadata(self, meta_data): + if not meta_data.get('type') in ['map', 'tosca:map']: + ExceptionCollector.appendException( + InvalidTypeError(what='"%s" defined in group for ' + 'metadata' % (meta_data.get('type')))) + for entry_schema, entry_schema_type in meta_data.items(): + if isinstance(entry_schema_type, dict) and not \ + entry_schema_type.get('type') == 'string': + ExceptionCollector.appendException( + InvalidTypeError(what='"%s" defined in group for ' + 'metadata "%s"' + % (entry_schema_type.get('type'), + entry_schema))) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java new file mode 100644 index 0000000..8a2b4dd --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java @@ -0,0 +1,228 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.EntityTemplate; +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class InterfacesDef extends StatefulEntityType { + + public static final String LIFECYCLE = "tosca.interfaces.node.lifecycle.Standard"; + public static final String CONFIGURE = "tosca.interfaces.relationship.Configure"; + public static final String LIFECYCLE_SHORTNAME = "Standard"; + public static final String CONFIGURE_SHORTNAME = "Configure"; + + public static final String SECTIONS[] = { + LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME,CONFIGURE_SHORTNAME + }; + + public static final String IMPLEMENTATION = "implementation"; + public static final String INPUTS = "inputs"; + + public static final String INTERFACEVALUE[] = {IMPLEMENTATION, INPUTS}; + + public static final String INTERFACE_DEF_RESERVED_WORDS[] = { + "type", "inputs", "derived_from", "version", "description"}; + + private EntityType ntype; + private EntityTemplate nodeTemplate; + private String name; + private Object value; + private String implementation; + private LinkedHashMap inputs; + + + @SuppressWarnings("unchecked") + public InterfacesDef(EntityType inodeType, + String interfaceType, + EntityTemplate inodeTemplate, + String iname, + Object ivalue) { + // void + super(); + + ntype = inodeType; + nodeTemplate = inodeTemplate; + type = interfaceType; + name = iname; + value = ivalue; + implementation = null; + inputs = null; + defs = new LinkedHashMap(); + + if(interfaceType.equals(LIFECYCLE_SHORTNAME)) { + interfaceType = LIFECYCLE; + } + if(interfaceType.equals(CONFIGURE_SHORTNAME)) { + interfaceType = CONFIGURE; + } + + // only NodeType has getInterfaces "hasattr(ntype,interfaces)" + // while RelationshipType does not + if(ntype instanceof NodeType) { + if(((NodeType)ntype).getInterfaces() != null && + ((NodeType)ntype).getInterfaces().values().contains(interfaceType)) { + LinkedHashMap nii = (LinkedHashMap) + ((NodeType)ntype).getInterfaces().get(interfaceType); + interfaceType = (String)nii.get("type"); + } + } + if(inodeType != null) { + if(nodeTemplate != null && nodeTemplate.getCustomDef() != null && + nodeTemplate.getCustomDef().values().contains(interfaceType)) { + defs = (LinkedHashMap) + nodeTemplate.getCustomDef().get(interfaceType); + } + else { + defs = (LinkedHashMap)TOSCA_DEF.get(interfaceType); + } + } + + if(ivalue != null) { + if(ivalue instanceof LinkedHashMap) { + for(Map.Entry me: ((LinkedHashMap)ivalue).entrySet()) { + if(me.getKey().equals("implementation")) { + implementation = (String)me.getValue(); + } + else if(me.getKey().equals("inputs")) { + inputs = (LinkedHashMap)me.getValue(); + } + else { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: \"interfaces\" of template \"%s\" contain unknown field \"%s\"", + nodeTemplate.getName(),me.getKey())); + } + } + } + else { + implementation = (String)ivalue; + } + } + } + + public ArrayList getLifecycleOps() { + if(defs != null) { + if(type.equals(LIFECYCLE)) { + return _ops(); + } + } + return null; + } + + public ArrayList getConfigureOps() { + if(defs != null) { + if(type.equals(CONFIGURE)) { + return _ops(); + } + } + return null; + } + + private ArrayList _ops() { + return new ArrayList(defs.keySet()); + } + + // getters/setters + + public LinkedHashMap getInputs() { + return inputs; + } + + public void setInput(String name,Object value) { + inputs.put(name, value); + } +} + +/*python + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.statefulentitytype import StatefulEntityType + +SECTIONS = (LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME, + CONFIGURE_SHORTNAME) = \ + ('tosca.interfaces.node.lifecycle.Standard', + 'tosca.interfaces.relationship.Configure', + 'Standard', 'Configure') + +INTERFACEVALUE = (IMPLEMENTATION, INPUTS) = ('implementation', 'inputs') + +INTERFACE_DEF_RESERVED_WORDS = ['type', 'inputs', 'derived_from', 'version', + 'description'] + + +class InterfacesDef(StatefulEntityType): + '''TOSCA built-in interfaces type.''' + + def __init__(self, node_type, interfacetype, + node_template=None, name=None, value=None): + self.ntype = node_type + self.node_template = node_template + self.type = interfacetype + self.name = name + self.value = value + self.implementation = None + self.inputs = None + self.defs = {} + if interfacetype == LIFECYCLE_SHORTNAME: + interfacetype = LIFECYCLE + if interfacetype == CONFIGURE_SHORTNAME: + interfacetype = CONFIGURE + if hasattr(self.ntype, 'interfaces') \ + and self.ntype.interfaces \ + and interfacetype in self.ntype.interfaces: + interfacetype = self.ntype.interfaces[interfacetype]['type'] + if node_type: + if self.node_template and self.node_template.custom_def \ + and interfacetype in self.node_template.custom_def: + self.defs = self.node_template.custom_def[interfacetype] + else: + self.defs = self.TOSCA_DEF[interfacetype] + if value: + if isinstance(self.value, dict): + for i, j in self.value.items(): + if i == IMPLEMENTATION: + self.implementation = j + elif i == INPUTS: + self.inputs = j + else: + what = ('"interfaces" of template "%s"' % + self.node_template.name) + ExceptionCollector.appendException( + UnknownFieldError(what=what, field=i)) + else: + self.implementation = value + + @property + def lifecycle_ops(self): + if self.defs: + if self.type == LIFECYCLE: + return self._ops() + + @property + def configure_ops(self): + if self.defs: + if self.type == CONFIGURE: + return self._ops() + + def _ops(self): + ops = [] + for name in list(self.defs.keys()): + ops.append(name) + return ops +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java new file mode 100644 index 0000000..4f7bdd0 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java @@ -0,0 +1,35 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.util.Map; + +public class Metadata { + + private final Map metadataMap; + + public Metadata(Map metadataMap) { + this.metadataMap = metadataMap; + } + + public String getValue(String key) { + return !isEmpty() ? String.valueOf(this.metadataMap.get(key)) : null; + } + + public void setValue(String key, Object value) { + if (!isEmpty()) { + this.metadataMap.put(key, value); + } + } + + + private boolean isEmpty() { + return this.metadataMap == null || this.metadataMap.size() == 0; + } + + @Override + public String toString() { + return "Metadata{" + + "metadataMap=" + metadataMap + + '}'; + } + +} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java new file mode 100644 index 0000000..d5f1a18 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java @@ -0,0 +1,523 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.InterfacesDef; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class NodeType extends StatefulEntityType { + // TOSCA built-in node type + + private static final String DERIVED_FROM = "derived_from"; + private static final String METADATA = "metadata"; + private static final String PROPERTIES = "properties"; + private static final String VERSION = "version"; + private static final String DESCRIPTION = "description"; + private static final String ATTRIBUTES = "attributes"; + private static final String REQUIREMENTS = "requirements"; + private static final String CAPABILITIES = "capabilities"; + private static final String INTERFACES = "interfaces"; + private static final String ARTIFACTS = "artifacts"; + + private static final String SECTIONS[] = { + DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS + }; + + private String ntype; + public LinkedHashMap customDef; + + public NodeType(String nttype,LinkedHashMap ntcustomDef) { + super(nttype,NODE_PREFIX, ntcustomDef); + ntype = nttype; + customDef = ntcustomDef; + _validateKeys(); + } + + public Object getParentType() { + // Return a node this node is derived from + if(defs == null) { + return null; + } + String pnode = derivedFrom(defs); + if(pnode != null && !pnode.isEmpty()) { + return new NodeType(pnode,customDef); + } + return null; + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getRelationship() { + // Return a dictionary of relationships to other node types + + // This method returns a dictionary of named relationships that nodes + // of the current node type (self) can have to other nodes (of specific + // types) in a TOSCA template. + + LinkedHashMap relationship = new LinkedHashMap<>(); + ArrayList> requires; + Object treq = getAllRequirements(); + if(treq != null) { + // NOTE(sdmonov): Check if requires is a dict. + // If it is a dict convert it to a list of dicts. + // This is needed because currently the code below supports only + // lists as requirements definition. The following check will + // make sure if a map (dict) was provided it will be converted to + // a list before proceeding to the parsing. + if(treq instanceof LinkedHashMap) { + requires = new ArrayList<>(); + for(Map.Entry me: ((LinkedHashMap)treq).entrySet()) { + LinkedHashMap tl = new LinkedHashMap<>(); + tl.put(me.getKey(),me.getValue()); + requires.add(tl); + } + } + else { + requires = (ArrayList>)treq; + } + + String keyword = null; + String nodeType = null; + for(LinkedHashMap require: requires) { + String relation = null; + for(Map.Entry re: require.entrySet()) { + String key = re.getKey(); + LinkedHashMap req = (LinkedHashMap)re.getValue(); + if(req.get("relationship") != null) { + Object trelation = req.get("relationship"); + // trelation is a string or a dict with "type" mapped to the string we want + if(trelation instanceof String) { + relation = (String)trelation; + } + else { + if(((LinkedHashMap)trelation).get("type") != null) { + relation = (String)((LinkedHashMap)trelation).get("type"); + } + } + nodeType = (String)req.get("node"); + //BUG meaningless?? LinkedHashMap value = req; + if(nodeType != null) { + keyword = "node"; + } + else { + // If value is a dict and has a type key + // we need to lookup the node type using + // the capability type + String captype = (String)req.get("capability"); + String value = _getNodeTypeByCap(captype); + String getRelation = _getRelation(key,value); + if (getRelation != null) { + relation = getRelation; + } + keyword = key; + nodeType = value; + } + } + + } + RelationshipType rtype = new RelationshipType(relation, keyword, customDef); + NodeType relatednode = new NodeType(nodeType, customDef); + relationship.put(rtype, relatednode); + } + } + return relationship; + + } + + @SuppressWarnings("unchecked") + private String _getNodeTypeByCap(String cap) { + // Find the node type that has the provided capability + + // This method will lookup all node types if they have the + // provided capability. + + // Filter the node types + ArrayList nodeTypes = new ArrayList<>(); + for(String nt: TOSCA_DEF.keySet()) { + if(nt.startsWith(NODE_PREFIX) && !nt.equals("tosca.nodes.Root")) { + nodeTypes.add(nt); + } + } + for(String nt: nodeTypes) { + LinkedHashMap nodeDef = (LinkedHashMap)TOSCA_DEF.get(nt); + if(nodeDef instanceof LinkedHashMap && nodeDef.get("capabilities") != null) { + LinkedHashMap nodeCaps = (LinkedHashMap)nodeDef.get("capabilities"); + if(nodeCaps != null) { + for(Object val: nodeCaps.values()) { + if(val instanceof LinkedHashMap) { + String tp = (String)((LinkedHashMap)val).get("type"); + if(tp != null && tp.equals(cap)) { + return nt; + } + } + } + } + } + } + return null; + } + + @SuppressWarnings("unchecked") + private String _getRelation(String key,String ndtype) { + String relation = null; + NodeType ntype = new NodeType(ndtype,null); + LinkedHashMap caps = ntype.getCapabilities(); + if(caps != null && caps.get(key) != null) { + CapabilityTypeDef c = caps.get(key); + for(int i=0; i< RELATIONSHIP_TYPE.length; i++) { + String r = RELATIONSHIP_TYPE[i]; + LinkedHashMap rtypedef = (LinkedHashMap)TOSCA_DEF.get(r); + for(Object o: rtypedef.values()) { + LinkedHashMap properties = (LinkedHashMap)o; + if(properties.get(c.getType()) != null) { + relation = r; + break; + } + } + if(relation != null) { + break; + } + else { + for(Object o: rtypedef.values()) { + LinkedHashMap properties = (LinkedHashMap)o; + if(properties.get(c.getParentType()) != null) { + relation = r; + break; + } + } + } + } + } + return relation; + } + + @SuppressWarnings("unchecked") + public ArrayList getCapabilitiesObjects() { + // Return a list of capability objects + ArrayList typecapabilities = new ArrayList<>(); + LinkedHashMap caps = (LinkedHashMap)getValue(CAPABILITIES, null, true); + if(caps != null) { + // 'cname' is symbolic name of the capability + // 'cvalue' is a dict { 'type': } + for(Map.Entry me: caps.entrySet()) { + String cname = me.getKey(); + LinkedHashMap cvalue = (LinkedHashMap)me.getValue(); + String ctype = cvalue.get("type"); + CapabilityTypeDef cap = new CapabilityTypeDef(cname,ctype,type,customDef); + typecapabilities.add(cap); + } + } + return typecapabilities; + } + + public LinkedHashMap getCapabilities() { + // Return a dictionary of capability name-objects pairs + LinkedHashMap caps = new LinkedHashMap<>(); + for(CapabilityTypeDef ctd: getCapabilitiesObjects()) { + caps.put(ctd.getName(),ctd); + } + return caps; + } + + @SuppressWarnings("unchecked") + public ArrayList getRequirements() { + return (ArrayList)getValue(REQUIREMENTS,null,true); + } + + public ArrayList getAllRequirements() { + return getRequirements(); + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getInterfaces() { + return (LinkedHashMap)getValue(INTERFACES,null,false); + } + + + @SuppressWarnings("unchecked") + public ArrayList getLifecycleInputs() + { + // Return inputs to life cycle operations if found + ArrayList inputs = new ArrayList<>(); + LinkedHashMap interfaces = getInterfaces(); + if(interfaces != null) { + for(Map.Entry me: interfaces.entrySet()) { + String iname = me.getKey(); + LinkedHashMap ivalue = (LinkedHashMap)me.getValue(); + if(iname.equals(InterfacesDef.LIFECYCLE)) { + for(Map.Entry ie: ivalue.entrySet()) { + if(ie.getKey().equals("input")) { + LinkedHashMap y = (LinkedHashMap)ie.getValue(); + for(String i: y.keySet()) { + inputs.add(i); + } + } + } + } + } + } + return inputs; + } + + public ArrayList getLifecycleOperations() { + // Return available life cycle operations if found + ArrayList ops = null; + LinkedHashMap interfaces = getInterfaces(); + if(interfaces != null) { + InterfacesDef i = new InterfacesDef(this,InterfacesDef.LIFECYCLE,null,null,null); + ops = i.getLifecycleOps(); + } + return ops; + } + + public CapabilityTypeDef getCapability(String name) { + //BUG?? the python code has to be wrong + // it refers to a bad attribute 'value'... + LinkedHashMap caps = getCapabilities(); + if(caps != null) { + return caps.get(name); + } + return null; + /* + def get_capability(self, name): + caps = self.get_capabilities() + if caps and name in caps.keys(): + return caps[name].value + */ + } + + public String getCapabilityType(String name) { + //BUG?? the python code has to be wrong + // it refers to a bad attribute 'value'... + CapabilityTypeDef captype = getCapability(name); + if(captype != null) { + return captype.getType(); + } + return null; + /* + def get_capability_type(self, name): + captype = self.get_capability(name) + if captype and name in captype.keys(): + return captype[name].value + */ + } + + private void _validateKeys() { + if(defs != null) { + for(String key: defs.keySet()) { + boolean bFound = false; + for(int i=0; i< SECTIONS.length; i++) { + if(key.equals(SECTIONS[i])) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: Nodetype \"%s\" has unknown field \"%s\"",ntype,key)); + } + } + } + } + +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.capabilitytype import CapabilityTypeDef +import org.openecomp.sdc.toscaparser.api.elements.interfaces as ifaces +from toscaparser.elements.interfaces import InterfacesDef +from toscaparser.elements.relationshiptype import RelationshipType +from toscaparser.elements.statefulentitytype import StatefulEntityType + + +class NodeType(StatefulEntityType): + '''TOSCA built-in node type.''' + SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS) = \ + ('derived_from', 'metadata', 'properties', 'version', + 'description', 'attributes', 'requirements', 'capabilities', + 'interfaces', 'artifacts') + + def __init__(self, ntype, custom_def=None): + super(NodeType, self).__init__(ntype, self.NODE_PREFIX, custom_def) + self.ntype = ntype + self.custom_def = custom_def + self._validate_keys() + + @property + def parent_type(self): + '''Return a node this node is derived from.''' + if not hasattr(self, 'defs'): + return None + pnode = self.derived_from(self.defs) + if pnode: + return NodeType(pnode, self.custom_def) + + @property + def relationship(self): + '''Return a dictionary of relationships to other node types. + + This method returns a dictionary of named relationships that nodes + of the current node type (self) can have to other nodes (of specific + types) in a TOSCA template. + + ''' + relationship = {} + requires = self.get_all_requirements() + if requires: + # NOTE(sdmonov): Check if requires is a dict. + # If it is a dict convert it to a list of dicts. + # This is needed because currently the code below supports only + # lists as requirements definition. The following check will + # make sure if a map (dict) was provided it will be converted to + # a list before proceeding to the parsing. + if isinstance(requires, dict): + requires = [{key: value} for key, value in requires.items()] + + keyword = None + node_type = None + for require in requires: + for key, req in require.items(): + if 'relationship' in req: + relation = req.get('relationship') + if 'type' in relation: + relation = relation.get('type') + node_type = req.get('node') + value = req + if node_type: + keyword = 'node' + else: + # If value is a dict and has a type key + # we need to lookup the node type using + # the capability type + value = req + if isinstance(value, dict): + captype = value['capability'] + value = (self. + _get_node_type_by_cap(key, captype)) + relation = self._get_relation(key, value) + keyword = key + node_type = value + rtype = RelationshipType(relation, keyword, self.custom_def) + relatednode = NodeType(node_type, self.custom_def) + relationship[rtype] = relatednode + return relationship + + def _get_node_type_by_cap(self, key, cap): + '''Find the node type that has the provided capability + + This method will lookup all node types if they have the + provided capability. + ''' + + # Filter the node types + node_types = [node_type for node_type in self.TOSCA_DEF.keys() + if node_type.startswith(self.NODE_PREFIX) and + node_type != 'tosca.nodes.Root'] + + for node_type in node_types: + node_def = self.TOSCA_DEF[node_type] + if isinstance(node_def, dict) and 'capabilities' in node_def: + node_caps = node_def['capabilities'] + for value in node_caps.values(): + if isinstance(value, dict) and \ + 'type' in value and value['type'] == cap: + return node_type + + def _get_relation(self, key, ndtype): + relation = None + ntype = NodeType(ndtype) + caps = ntype.get_capabilities() + if caps and key in caps.keys(): + c = caps[key] + for r in self.RELATIONSHIP_TYPE: + rtypedef = ntype.TOSCA_DEF[r] + for properties in rtypedef.values(): + if c.type in properties: + relation = r + break + if relation: + break + else: + for properties in rtypedef.values(): + if c.parent_type in properties: + relation = r + break + return relation + + def get_capabilities_objects(self): + '''Return a list of capability objects.''' + typecapabilities = [] + caps = self.get_value(self.CAPABILITIES, None, True) + if caps: + # 'name' is symbolic name of the capability + # 'value' is a dict { 'type': } + for name, value in caps.items(): + ctype = value.get('type') + cap = CapabilityTypeDef(name, ctype, self.type, + self.custom_def) + typecapabilities.append(cap) + return typecapabilities + + def get_capabilities(self): + '''Return a dictionary of capability name-objects pairs.''' + return {cap.name: cap + for cap in self.get_capabilities_objects()} + + @property + def requirements(self): + return self.get_value(self.REQUIREMENTS, None, True) + + def get_all_requirements(self): + return self.requirements + + @property + def interfaces(self): + return self.get_value(self.INTERFACES) + + @property + def lifecycle_inputs(self): + '''Return inputs to life cycle operations if found.''' + inputs = [] + interfaces = self.interfaces + if interfaces: + for name, value in interfaces.items(): + if name == ifaces.LIFECYCLE: + for x, y in value.items(): + if x == 'inputs': + for i in y.iterkeys(): + inputs.append(i) + return inputs + + @property + def lifecycle_operations(self): + '''Return available life cycle operations if found.''' + ops = None + interfaces = self.interfaces + if interfaces: + i = InterfacesDef(self.type, ifaces.LIFECYCLE) + ops = i.lifecycle_ops + return ops + + def get_capability(self, name): + caps = self.get_capabilities() + if caps and name in caps.keys(): + return caps[name].value + + def get_capability_type(self, name): + captype = self.get_capability(name) + if captype and name in captype.keys(): + return captype[name].value + + def _validate_keys(self): + if self.defs: + for key in self.defs.keys(): + if key not in self.SECTIONS: + ExceptionCollector.appendException( + UnknownFieldError(what='Nodetype"%s"' % self.ntype, + field=key)) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java new file mode 100644 index 0000000..c60bed1 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java @@ -0,0 +1,290 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.TOSCAVersionProperty; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class PolicyType extends StatefulEntityType { + + private static final String DERIVED_FROM = "derived_from"; + private static final String METADATA = "metadata"; + private static final String PROPERTIES = "properties"; + private static final String VERSION = "version"; + private static final String DESCRIPTION = "description"; + private static final String TARGETS = "targets"; + private static final String TRIGGERS = "triggers"; + private static final String TYPE = "type"; + + private static final String SECTIONS[] = { + DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS, TRIGGERS, TYPE + }; + + private LinkedHashMap customDef; + private String policyDescription; + private Object policyVersion; + private LinkedHashMap properties; + private LinkedHashMap parentPolicies; + private LinkedHashMap metaData; + private ArrayList targetsList; + + + public PolicyType(String _type, LinkedHashMap _customDef) { + super(_type,POLICY_PREFIX,_customDef); + + type = _type; + customDef = _customDef; + _validateKeys(); + + metaData = null; + if(defs != null && defs.get(METADATA) != null) { + metaData = (LinkedHashMap)defs.get(METADATA); + _validateMetadata(metaData); + } + + properties = null; + if(defs != null && defs.get(PROPERTIES) != null) { + properties = (LinkedHashMap)defs.get(PROPERTIES); + } + parentPolicies = _getParentPolicies(); + + policyVersion = null; + if(defs != null && defs.get(VERSION) != null) { + policyVersion = (new TOSCAVersionProperty( + defs.get(VERSION))).getVersion(); + } + + policyDescription = null; + if(defs != null && defs.get(DESCRIPTION) != null) { + policyDescription = (String)defs.get(DESCRIPTION); + } + + targetsList = null; + if(defs != null && defs.get(TARGETS) != null) { + targetsList = (ArrayList)defs.get(TARGETS); + _validateTargets(targetsList,customDef); + } + + } + + private LinkedHashMap _getParentPolicies() { + LinkedHashMap policies = new LinkedHashMap<>(); + String parentPolicy; + if(getParentType() != null) { + parentPolicy = getParentType().getType(); + } + else { + parentPolicy = null; + } + if(parentPolicy != null) { + while(parentPolicy != null && !parentPolicy.equals("tosca.policies.Root")) { + policies.put(parentPolicy, TOSCA_DEF.get(parentPolicy)); + parentPolicy = (String) + ((LinkedHashMap)policies.get(parentPolicy)).get("derived_from);"); + } + } + return policies; + } + + public String getType() { + return type; + } + + public PolicyType getParentType() { + // Return a policy statefulentity of this node is derived from + if(defs == null) { + return null; + } + String ppolicyEntity = derivedFrom(defs); + if(ppolicyEntity != null) { + return new PolicyType(ppolicyEntity,customDef); + } + return null; + } + + public Object getPolicy(String name) { + // Return the definition of a policy field by name + if(defs != null && defs.get(name) != null) { + return defs.get(name); + } + return null; + } + + public ArrayList getTargets() { + // Return targets + return targetsList; + } + + public String getDescription() { + return policyDescription; + } + + public Object getVersion() { + return policyVersion; + } + + private void _validateKeys() { + for(String key: defs.keySet()) { + boolean bFound = false; + for(String sect: SECTIONS) { + if(key.equals(sect)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"", + type,key)); + } + } + } + + private void _validateTargets(ArrayList _targetsList, + LinkedHashMap _customDef) { + for(String nodetype: _targetsList) { + if(_customDef.get(nodetype) == null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidTypeError: \"%s\" defined in targets for policy \"%s\"", + nodetype,type)); + + } + } + } + + private void _validateMetadata(LinkedHashMap _metaData) { + String mtype = (String)_metaData.get("type"); + if(mtype != null && !mtype.equals("map") && !mtype.equals("tosca:map")) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidTypeError: \"%s\" defined in policy for metadata", + mtype)); + } + for(String entrySchema: metaData.keySet()) { + Object estob = metaData.get(entrySchema); + if(estob instanceof LinkedHashMap) { + String est = (String) + ((LinkedHashMap)estob).get("type"); + if(!est.equals("string")) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidTypeError: \"%s\" defined in policy for metadata \"%s\"", + est,entrySchema)); + } + } + } + } + +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import InvalidTypeError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.statefulentitytype import StatefulEntityType +from toscaparser.utils.validateutils import TOSCAVersionProperty + + +class PolicyType(StatefulEntityType): + + '''TOSCA built-in policies type.''' + SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS) = \ + ('derived_from', 'metadata', 'properties', 'version', + 'description', 'targets') + + def __init__(self, ptype, custom_def=None): + super(PolicyType, self).__init__(ptype, self.POLICY_PREFIX, + custom_def) + self.type = ptype + self.custom_def = custom_def + self._validate_keys() + + self.meta_data = None + if self.METADATA in self.defs: + self.meta_data = self.defs[self.METADATA] + self._validate_metadata(self.meta_data) + + self.properties = None + if self.PROPERTIES in self.defs: + self.properties = self.defs[self.PROPERTIES] + self.parent_policies = self._get_parent_policies() + + self.policy_version = None + if self.VERSION in self.defs: + self.policy_version = TOSCAVersionProperty( + self.defs[self.VERSION]).get_version() + + self.policy_description = self.defs[self.DESCRIPTION] \ + if self.DESCRIPTION in self.defs else None + + self.targets_list = None + if self.TARGETS in self.defs: + self.targets_list = self.defs[self.TARGETS] + self._validate_targets(self.targets_list, custom_def) + + def _get_parent_policies(self): + policies = {} + parent_policy = self.parent_type.type if self.parent_type else None + if parent_policy: + while parent_policy != 'tosca.policies.Root': + policies[parent_policy] = self.TOSCA_DEF[parent_policy] + parent_policy = policies[parent_policy]['derived_from'] + return policies + + @property + def parent_type(self): + '''Return a policy statefulentity of this node is derived from.''' + if not hasattr(self, 'defs'): + return None + ppolicy_entity = self.derived_from(self.defs) + if ppolicy_entity: + return PolicyType(ppolicy_entity, self.custom_def) + + def get_policy(self, name): + '''Return the definition of a policy field by name.''' + if name in self.defs: + return self.defs[name] + + @property + def targets(self): + '''Return targets.''' + return self.targets_list + + @property + def description(self): + return self.policy_description + + @property + def version(self): + return self.policy_version + + def _validate_keys(self): + for key in self.defs.keys(): + if key not in self.SECTIONS: + ExceptionCollector.appendException( + UnknownFieldError(what='Policy "%s"' % self.type, + field=key)) + + def _validate_targets(self, targets_list, custom_def): + for nodetype in targets_list: + if nodetype not in custom_def: + ExceptionCollector.appendException( + InvalidTypeError(what='"%s" defined in targets for ' + 'policy "%s"' % (nodetype, self.type))) + + def _validate_metadata(self, meta_data): + if not meta_data.get('type') in ['map', 'tosca:map']: + ExceptionCollector.appendException( + InvalidTypeError(what='"%s" defined in policy for ' + 'metadata' % (meta_data.get('type')))) + + for entry_schema, entry_schema_type in meta_data.items(): + if isinstance(entry_schema_type, dict) and not \ + entry_schema_type.get('type') == 'string': + ExceptionCollector.appendException( + InvalidTypeError(what='"%s" defined in policy for ' + 'metadata "%s"' + % (entry_schema_type.get('type'), + entry_schema))) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java new file mode 100644 index 0000000..8d490ee --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java @@ -0,0 +1,160 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.DataEntity; +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; + +public class PortSpec { + // Parent class for tosca.datatypes.network.PortSpec type + + private static final String SHORTNAME = "PortSpec"; + private static final String TYPE_URI = "tosca.datatypes.network." + SHORTNAME; + + private static final String PROTOCOL = "protocol"; + private static final String SOURCE = "source"; + private static final String SOURCE_RANGE = "source_range"; + private static final String TARGET = "target"; + private static final String TARGET_RANGE = "target_range"; + + private static final String PROPERTY_NAMES[] = { + PROTOCOL, SOURCE, SOURCE_RANGE, + TARGET, TARGET_RANGE + }; + + // todo(TBD) May want to make this a subclass of DataType + // and change init method to set PortSpec's properties + public PortSpec() { + + } + + // The following additional requirements MUST be tested: + // 1) A valid PortSpec MUST have at least one of the following properties: + // target, target_range, source or source_range. + // 2) A valid PortSpec MUST have a value for the source property that + // is within the numeric range specified by the property source_range + // when source_range is specified. + // 3) A valid PortSpec MUST have a value for the target property that is + // within the numeric range specified by the property target_range + // when target_range is specified. + public static void validateAdditionalReq(Object _properties, + String propName, + LinkedHashMap custom_def) { + + try { + LinkedHashMap properties = (LinkedHashMap)_properties; + Object source = properties.get(PortSpec.SOURCE); + Object sourceRange = properties.get(PortSpec.SOURCE_RANGE); + Object target = properties.get(PortSpec.TARGET); + Object targetRange = properties.get(PortSpec.TARGET_RANGE); + + // verify one of the specified values is set + if(source == null && sourceRange == null && + target == null && targetRange == null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidTypeAdditionalRequirementsError: Additional requirements for type \"%s\" not met", + TYPE_URI)); + } + // Validate source value is in specified range + if(source != null && sourceRange != null) { + ValidateUtils.validateValueInRange(source,sourceRange,SOURCE); + } + else { + DataEntity portdef = new DataEntity("PortDef", source, null, SOURCE); + portdef.validate(); + } + // Validate target value is in specified range + if(target != null && targetRange != null) { + ValidateUtils.validateValueInRange(target,targetRange,SOURCE); + } + else { + DataEntity portdef = new DataEntity("PortDef", source, null, TARGET); + portdef.validate(); + } + } + catch(Exception e) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" do not meet requirements for type \"%s\"", + _properties.toString(),SHORTNAME)); + } + } + +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import InvalidTypeAdditionalRequirementsError +from toscaparser.utils.gettextutils import _ +import org.openecomp.sdc.toscaparser.api.utils.validateutils as validateutils + +log = logging.getLogger('tosca') + + +class PortSpec(object): + '''Parent class for tosca.datatypes.network.PortSpec type.''' + + SHORTNAME = 'PortSpec' + TYPE_URI = 'tosca.datatypes.network.' + SHORTNAME + + PROPERTY_NAMES = ( + PROTOCOL, SOURCE, SOURCE_RANGE, + TARGET, TARGET_RANGE + ) = ( + 'protocol', 'source', 'source_range', + 'target', 'target_range' + ) + + # TODO(TBD) May want to make this a subclass of DataType + # and change init method to set PortSpec's properties + def __init__(self): + pass + + # The following additional requirements MUST be tested: + # 1) A valid PortSpec MUST have at least one of the following properties: + # target, target_range, source or source_range. + # 2) A valid PortSpec MUST have a value for the source property that + # is within the numeric range specified by the property source_range + # when source_range is specified. + # 3) A valid PortSpec MUST have a value for the target property that is + # within the numeric range specified by the property target_range + # when target_range is specified. + @staticmethod + def validate_additional_req(properties, prop_name, custom_def=None, ): + try: + source = properties.get(PortSpec.SOURCE) + source_range = properties.get(PortSpec.SOURCE_RANGE) + target = properties.get(PortSpec.TARGET) + target_range = properties.get(PortSpec.TARGET_RANGE) + + # verify one of the specified values is set + if source is None and source_range is None and \ + target is None and target_range is None: + ExceptionCollector.appendException( + InvalidTypeAdditionalRequirementsError( + type=PortSpec.TYPE_URI)) + # Validate source value is in specified range + if source and source_range: + validateutils.validate_value_in_range(source, source_range, + PortSpec.SOURCE) + else: + from toscaparser.dataentity import DataEntity + portdef = DataEntity('PortDef', source, None, PortSpec.SOURCE) + portdef.validate() + # Validate target value is in specified range + if target and target_range: + validateutils.validate_value_in_range(target, target_range, + PortSpec.TARGET) + else: + from toscaparser.dataentity import DataEntity + portdef = DataEntity('PortDef', source, None, PortSpec.TARGET) + portdef.validate() + except Exception: + msg = _('"%(value)s" do not meet requirements ' + 'for type "%(type)s".') \ + % {'value': properties, 'type': PortSpec.SHORTNAME} + ExceptionCollector.appendException( + ValueError(msg)) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java new file mode 100644 index 0000000..c139eb6 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java @@ -0,0 +1,231 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class PropertyDef { + + private static final String PROPERTY_KEYNAME_DEFAULT = "default"; + private static final String PROPERTY_KEYNAME_REQUIRED = "required"; + private static final String PROPERTY_KEYNAME_STATUS = "status"; + private static final String VALID_PROPERTY_KEYNAMES[] = { + PROPERTY_KEYNAME_DEFAULT, + PROPERTY_KEYNAME_REQUIRED, + PROPERTY_KEYNAME_STATUS}; + + private static final boolean PROPERTY_REQUIRED_DEFAULT = true; + + private static final String VALID_REQUIRED_VALUES[] = {"true", "false"}; + + private static final String PROPERTY_STATUS_SUPPORTED = "supported"; + private static final String PROPERTY_STATUS_EXPERIMENTAL = "experimental"; + private static final String VALID_STATUS_VALUES[] = { + PROPERTY_STATUS_SUPPORTED, PROPERTY_STATUS_EXPERIMENTAL}; + + private static final String PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED; + + private String name; + private Object value; + private LinkedHashMap schema; + private String _status; + private boolean _required; + + public PropertyDef(String pdName, Object pdValue, + LinkedHashMap pdSchema) { + name = pdName; + value = pdValue; + schema = pdSchema; + _status = PROPERTY_STATUS_DEFAULT; + _required = PROPERTY_REQUIRED_DEFAULT; + + if(schema != null) { + // Validate required 'type' property exists + if(schema.get("type") == null) { + //msg = (_('Schema definition of "%(pname)s" must have a "type" ' + // 'attribute.') % dict(pname=self.name)) + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name)); + } + _loadRequiredAttrFromSchema(); + _loadStatusAttrFromSchema(); + } + } + + public Object getDefault() { + if(schema != null) { + for(Map.Entry me: schema.entrySet()) { + if(me.getKey().equals(PROPERTY_KEYNAME_DEFAULT)) { + return me.getValue(); + } + } + } + return null; + } + + public boolean isRequired() { + return _required; + } + + private void _loadRequiredAttrFromSchema() { + // IF 'required' keyname exists verify it's a boolean, + // if so override default + Object val = schema.get(PROPERTY_KEYNAME_REQUIRED); + if(val != null) { + if(val instanceof Boolean) { + _required = (boolean)val; + } + else { + //valid_values = ', '.join(self.VALID_REQUIRED_VALUES) + //attr = self.PROPERTY_KEYNAME_REQUIRED + //TOSCAException.generate_inv_schema_property_error(self, + // attr, + // value, + // valid_values) + ThreadLocalsHolder.getCollector().appendException(String.format( + "Schema definition of \"%s\" has \"required\" attribute with an invalid value", + name)); + } + } + } + + public String getStatus() { + return _status; + } + + private void _loadStatusAttrFromSchema() { + // IF 'status' keyname exists verify it's a boolean, + // if so override default + String sts = (String)schema.get(PROPERTY_KEYNAME_STATUS); + if(sts != null) { + boolean bFound = false; + for(String vsv: VALID_STATUS_VALUES) { + if(vsv.equals(sts)) { + bFound = true; + break; + } + } + if(bFound) { + _status = sts; + } + else { + //valid_values = ', '.join(self.VALID_STATUS_VALUES) + //attr = self.PROPERTY_KEYNAME_STATUS + //TOSCAException.generate_inv_schema_property_error(self, + // attr, + // value, + // valid_values) + ThreadLocalsHolder.getCollector().appendWarning(String.format( + "Schema definition of \"%s\" has \"status\" attribute with an invalid value", + name)); + } + } + } + + public String getName() { + return name; + } + + public LinkedHashMap getSchema() { + return schema; + } + + public Object getPDValue() { + // there's getValue in EntityType... + return value; + } + +} +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import InvalidSchemaError +from toscaparser.common.exception import TOSCAException +from toscaparser.utils.gettextutils import _ + + +class PropertyDef(object): + '''TOSCA built-in Property type.''' + + VALID_PROPERTY_KEYNAMES = (PROPERTY_KEYNAME_DEFAULT, + PROPERTY_KEYNAME_REQUIRED, + PROPERTY_KEYNAME_STATUS) = \ + ('default', 'required', 'status') + + PROPERTY_REQUIRED_DEFAULT = True + + VALID_REQUIRED_VALUES = ['true', 'false'] + VALID_STATUS_VALUES = (PROPERTY_STATUS_SUPPORTED, + PROPERTY_STATUS_EXPERIMENTAL) = \ + ('supported', 'experimental') + + PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED + + def __init__(self, name, value=None, schema=None): + self.name = name + self.value = value + self.schema = schema + self._status = self.PROPERTY_STATUS_DEFAULT + self._required = self.PROPERTY_REQUIRED_DEFAULT + + # Validate required 'type' property exists + try: + self.schema['type'] + except KeyError: + msg = (_('Schema definition of "%(pname)s" must have a "type" ' + 'attribute.') % dict(pname=self.name)) + ExceptionCollector.appendException( + InvalidSchemaError(message=msg)) + + if self.schema: + self._load_required_attr_from_schema() + self._load_status_attr_from_schema() + + @property + def default(self): + if self.schema: + for prop_key, prop_value in self.schema.items(): + if prop_key == self.PROPERTY_KEYNAME_DEFAULT: + return prop_value + return None + + @property + def required(self): + return self._required + + def _load_required_attr_from_schema(self): + # IF 'required' keyname exists verify it's a boolean, + # if so override default + if self.PROPERTY_KEYNAME_REQUIRED in self.schema: + value = self.schema[self.PROPERTY_KEYNAME_REQUIRED] + if isinstance(value, bool): + self._required = value + else: + valid_values = ', '.join(self.VALID_REQUIRED_VALUES) + attr = self.PROPERTY_KEYNAME_REQUIRED + TOSCAException.generate_inv_schema_property_error(self, + attr, + value, + valid_values) + + @property + def status(self): + return self._status + + def _load_status_attr_from_schema(self): + # IF 'status' keyname exists verify it's a valid value, + # if so override default + if self.PROPERTY_KEYNAME_STATUS in self.schema: + value = self.schema[self.PROPERTY_KEYNAME_STATUS] + if value in self.VALID_STATUS_VALUES: + self._status = value + else: + valid_values = ', '.join(self.VALID_STATUS_VALUES) + attr = self.PROPERTY_KEYNAME_STATUS + TOSCAException.generate_inv_schema_property_error(self, + attr, + value, + valid_values) +*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java new file mode 100644 index 0000000..3903941 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java @@ -0,0 +1,103 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.EntityType; +import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class RelationshipType extends StatefulEntityType { + + private static final String DERIVED_FROM = "derived_from"; + private static final String VALID_TARGET_TYPES = "valid_target_types"; + private static final String INTERFACES = "interfaces"; + private static final String ATTRIBUTES = "attributes"; + private static final String PROPERTIES = "properties"; + private static final String DESCRIPTION = "description"; + private static final String VERSION = "version"; + private static final String CREDENTIAL = "credential"; + + private static final String SECTIONS[] = { + DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES, + ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, CREDENTIAL}; + + private String capabilityName; + private LinkedHashMap customDef; + + public RelationshipType(String _type, String _capabilityName, LinkedHashMap _customDef) { + super(_type,RELATIONSHIP_PREFIX,_customDef); + capabilityName = _capabilityName; + customDef = _customDef; + } + + public RelationshipType getParentType() { + // Return a relationship this reletionship is derived from.''' + String prel = derivedFrom(defs); + if(prel != null) { + return new RelationshipType(prel,null,customDef); + } + return null; + } + + public Object getValidTargetTypes() { + return entityValue(defs,"valid_target_types"); + } + + private void _validateKeys() { + for(String key: defs.keySet()) { + boolean bFound = false; + for(int i=0; i< SECTIONS.length; i++) { + if(key.equals(SECTIONS[i])) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: Relationshiptype \"%s\" has unknown field \"%s\"",type,key)); + } + } + } +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.statefulentitytype import StatefulEntityType + + +class RelationshipType(StatefulEntityType): + '''TOSCA built-in relationship type.''' + SECTIONS = (DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES, + ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, + CREDENTIAL) = ('derived_from', 'valid_target_types', + 'interfaces', 'attributes', 'properties', + 'description', 'version', 'credential') + + def __init__(self, type, capability_name=None, custom_def=None): + super(RelationshipType, self).__init__(type, self.RELATIONSHIP_PREFIX, + custom_def) + self.capability_name = capability_name + self.custom_def = custom_def + self._validate_keys() + + @property + def parent_type(self): + '''Return a relationship this reletionship is derived from.''' + prel = self.derived_from(self.defs) + if prel: + return RelationshipType(prel, self.custom_def) + + @property + def valid_target_types(self): + return self.entity_value(self.defs, 'valid_target_types') + + def _validate_keys(self): + for key in self.defs.keys(): + if key not in self.SECTIONS: + ExceptionCollector.appendException( + UnknownFieldError(what='Relationshiptype "%s"' % self.type, + field=key)) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java new file mode 100644 index 0000000..de18cd6 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java @@ -0,0 +1,262 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.util.HashMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public abstract class ScalarUnit { + + private static Logger log = LoggerFactory.getLogger(ScalarUnit.class.getName()); + + private static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; + private static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; + private static final String SCALAR_UNIT_TIME = "scalar-unit.time"; + + public static final String SCALAR_UNIT_TYPES[] = { + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME + }; + + private Object value; + protected HashMap SCALAR_UNIT_DICT; + protected String SCALAR_UNIT_DEFAULT; + + public ScalarUnit(Object _value) { + value = _value; + SCALAR_UNIT_DICT = new HashMap<>(); + SCALAR_UNIT_DEFAULT = ""; + } + + + private String _checkUnitInScalarStandardUnits(String inputUnit) { + // Check whether the input unit is following specified standard + + // If unit is not following specified standard, convert it to standard + // unit after displaying a warning message. + + if(SCALAR_UNIT_DICT.get(inputUnit) != null) { + return inputUnit; + } + else { + for(String key: SCALAR_UNIT_DICT.keySet()) { + if(key.toUpperCase().equals(inputUnit.toUpperCase())) { + log.debug("ScalarUnit - _checkUnitInScalarStandardUnits - \n" + + "The unit {} does not follow scalar unit standards\n" + + "using {} instead", + inputUnit, key); + return key; + } + } + ThreadLocalsHolder.getCollector().appendWarning(String.format( + "'The unit \"%s\" is not valid. Valid units are \n%s", + inputUnit,SCALAR_UNIT_DICT.keySet().toString())); + return inputUnit; + } + } + + public Object validateScalarUnit() { + Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); + Matcher matcher = pattern.matcher(value.toString()); + if(matcher.find()) { + ValidateUtils.strToNum(matcher.group(1)); + String scalarUnit = _checkUnitInScalarStandardUnits(matcher.group(2)); + value = matcher.group(1) + " " + scalarUnit; + } + else { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not a valid scalar-unit",value.toString())); + } + return value; + } + + public double getNumFromScalarUnit(String unit) { + if(unit != null) { + unit = _checkUnitInScalarStandardUnits(unit); + } + else { + unit = SCALAR_UNIT_DEFAULT; + } + Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); + Matcher matcher = pattern.matcher(value.toString()); + if(matcher.find()) { + ValidateUtils.strToNum(matcher.group(1)); + String scalarUnit = _checkUnitInScalarStandardUnits(matcher.group(2)); + value = matcher.group(1) + " " + scalarUnit; + Object on1 = ValidateUtils.strToNum(matcher.group(1)) != null ? ValidateUtils.strToNum(matcher.group(1)) : 0; + Object on2 = SCALAR_UNIT_DICT.get(matcher.group(2)) != null ? SCALAR_UNIT_DICT.get(matcher.group(2)) : 0; + Object on3 = SCALAR_UNIT_DICT.get(unit) != null ? SCALAR_UNIT_DICT.get(unit) : 0; + + Double n1 = new Double(on1.toString()); + Double n2 = new Double(on2.toString()); + Double n3 = new Double(on3.toString()); + double converted = n1 * n2 / n3; + if(Math.abs(converted - Math.round(converted)) < 0.0000000000001 ) { + converted = Math.round(converted); + } + return converted; + } + return 0l; //??? + } + + protected static HashMap scalarunitMapping = _getScalarunitMappings(); + + private static HashMap _getScalarunitMappings() { + HashMap map = new HashMap<>(); + map.put(SCALAR_UNIT_FREQUENCY,"ScalarUnitFrequency"); + map.put(SCALAR_UNIT_SIZE, "ScalarUnitSize"); + map.put(SCALAR_UNIT_TIME, "ScalarUnit_Time"); + return map; + } + + public static ScalarUnit getScalarunitClass(String type,Object val) { + if(type.equals(SCALAR_UNIT_SIZE)) { + return new ScalarUnitSize(val); + } + else if(type.equals(SCALAR_UNIT_TIME)) { + return new ScalarUnitTime(val); + } + else if(type.equals(SCALAR_UNIT_FREQUENCY)) { + return new ScalarUnitFrequency(val); + } + return null; + } + + public static double getScalarunitValue(String type, Object value, String unit) { + if(type.equals(SCALAR_UNIT_SIZE)) { + return (new ScalarUnitSize(value)).getNumFromScalarUnit(unit); + } + if(type.equals(SCALAR_UNIT_TIME)) { + return (new ScalarUnitTime(value)).getNumFromScalarUnit(unit); + } + if(type.equals(SCALAR_UNIT_FREQUENCY)) { + return (new ScalarUnitFrequency(value)).getNumFromScalarUnit(unit); + } + ThreadLocalsHolder.getCollector().appendException(String.format( + "TypeError: \"%s\" is not a valid scalar-unit type",type)); + return 0.0; + } + +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.utils.gettextutils import _ +from toscaparser.utils import validateutils + +log = logging.getLogger('tosca') + + +class ScalarUnit(object): + '''Parent class for scalar-unit type.''' + + SCALAR_UNIT_TYPES = ( + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME + ) = ( + 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time' + ) + + def __init__(self, value): + self.value = value + + def _check_unit_in_scalar_standard_units(self, input_unit): + """Check whether the input unit is following specified standard + + If unit is not following specified standard, convert it to standard + unit after displaying a warning message. + """ + if input_unit in self.SCALAR_UNIT_DICT.keys(): + return input_unit + else: + for key in self.SCALAR_UNIT_DICT.keys(): + if key.upper() == input_unit.upper(): + log.warning(_('The unit "%(unit)s" does not follow ' + 'scalar unit standards; using "%(key)s" ' + 'instead.') % {'unit': input_unit, + 'key': key}) + return key + msg = (_('The unit "%(unit)s" is not valid. Valid units are ' + '"%(valid_units)s".') % + {'unit': input_unit, + 'valid_units': sorted(self.SCALAR_UNIT_DICT.keys())}) + ExceptionCollector.appendException(ValueError(msg)) + + def validate_scalar_unit(self): + regex = re.compile('([0-9.]+)\s*(\w+)') + try: + result = regex.match(str(self.value)).groups() + validateutils.str_to_num(result[0]) + scalar_unit = self._check_unit_in_scalar_standard_units(result[1]) + self.value = ' '.join([result[0], scalar_unit]) + return self.value + + except Exception: + ExceptionCollector.appendException( + ValueError(_('"%s" is not a valid scalar-unit.') + % self.value)) + + def get_num_from_scalar_unit(self, unit=None): + if unit: + unit = self._check_unit_in_scalar_standard_units(unit) + else: + unit = self.SCALAR_UNIT_DEFAULT + self.validate_scalar_unit() + + regex = re.compile('([0-9.]+)\s*(\w+)') + result = regex.match(str(self.value)).groups() + converted = (float(validateutils.str_to_num(result[0])) + * self.SCALAR_UNIT_DICT[result[1]] + / self.SCALAR_UNIT_DICT[unit]) + if converted - int(converted) < 0.0000000000001: + converted = int(converted) + return converted + + +class ScalarUnit_Size(ScalarUnit): + + SCALAR_UNIT_DEFAULT = 'B' + SCALAR_UNIT_DICT = {'B': 1, 'kB': 1000, 'KiB': 1024, 'MB': 1000000, + 'MiB': 1048576, 'GB': 1000000000, + 'GiB': 1073741824, 'TB': 1000000000000, + 'TiB': 1099511627776} + + +class ScalarUnit_Time(ScalarUnit): + + SCALAR_UNIT_DEFAULT = 'ms' + SCALAR_UNIT_DICT = {'d': 86400, 'h': 3600, 'm': 60, 's': 1, + 'ms': 0.001, 'us': 0.000001, 'ns': 0.000000001} + + +class ScalarUnit_Frequency(ScalarUnit): + + SCALAR_UNIT_DEFAULT = 'GHz' + SCALAR_UNIT_DICT = {'Hz': 1, 'kHz': 1000, + 'MHz': 1000000, 'GHz': 1000000000} + + +scalarunit_mapping = { + ScalarUnit.SCALAR_UNIT_FREQUENCY: ScalarUnit_Frequency, + ScalarUnit.SCALAR_UNIT_SIZE: ScalarUnit_Size, + ScalarUnit.SCALAR_UNIT_TIME: ScalarUnit_Time, + } + + +def get_scalarunit_class(type): + return scalarunit_mapping.get(type) + + +def get_scalarunit_value(type, value, unit=None): + if type in ScalarUnit.SCALAR_UNIT_TYPES: + ScalarUnit_Class = get_scalarunit_class(type) + return (ScalarUnit_Class(value). + get_num_from_scalar_unit(unit)) + else: + ExceptionCollector.appendException( + TypeError(_('"%s" is not a valid scalar-unit type.') % type)) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitFrequency.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitFrequency.java new file mode 100644 index 0000000..57a111e --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitFrequency.java @@ -0,0 +1,14 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +public class ScalarUnitFrequency extends ScalarUnit { + + public ScalarUnitFrequency(Object value) { + super(value); + SCALAR_UNIT_DEFAULT = "GHz"; + SCALAR_UNIT_DICT.put("Hz",1L); + SCALAR_UNIT_DICT.put("kHz",1000L); + SCALAR_UNIT_DICT.put("MHz",1000000L); + SCALAR_UNIT_DICT.put("GHz",1000000000L); + } + +} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitSize.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitSize.java new file mode 100644 index 0000000..72e7c33 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitSize.java @@ -0,0 +1,19 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +public class ScalarUnitSize extends ScalarUnit { + + public ScalarUnitSize(Object value) { + super(value); + + SCALAR_UNIT_DEFAULT = "B"; + SCALAR_UNIT_DICT.put("B",1L); + SCALAR_UNIT_DICT.put("kB",1000L); + SCALAR_UNIT_DICT.put("kiB",1024L); + SCALAR_UNIT_DICT.put("MB",1000000L); + SCALAR_UNIT_DICT.put("MiB",1048576L); + SCALAR_UNIT_DICT.put("GB",1000000000L); + SCALAR_UNIT_DICT.put("GiB",1073741824L); + SCALAR_UNIT_DICT.put("TB",1000000000000L); + SCALAR_UNIT_DICT.put("TiB",1099511627776L); + } +} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitTime.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitTime.java new file mode 100644 index 0000000..5cde10a --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitTime.java @@ -0,0 +1,17 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +public class ScalarUnitTime extends ScalarUnit { + + public ScalarUnitTime(Object value) { + super(value); + SCALAR_UNIT_DEFAULT = "ms"; + SCALAR_UNIT_DICT.put("d",86400L); + SCALAR_UNIT_DICT.put("h",3600L); + SCALAR_UNIT_DICT.put("m",60L); + SCALAR_UNIT_DICT.put("s",1L); + SCALAR_UNIT_DICT.put("ms",0.001); + SCALAR_UNIT_DICT.put("us",0.000001); + SCALAR_UNIT_DICT.put("ns",0.000000001); + } + +} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java new file mode 100644 index 0000000..5ab816f --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java @@ -0,0 +1,220 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.UnsupportedType; +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.AttributeDef; +import org.openecomp.sdc.toscaparser.api.elements.EntityType; +import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + + +public class StatefulEntityType extends EntityType { + // Class representing TOSCA states + + public static final String interfacesNodeLifecycleOperations[] = { + "create", "configure", "start", "stop", "delete"}; + + public static final String interfacesRelationshipConfigureOperations[] = { + "post_configure_source", "post_configure_target", "add_target", "remove_target"}; + + public StatefulEntityType() { + // void constructor for subclasses that don't want super + } + + @SuppressWarnings("unchecked") + public StatefulEntityType(String entityType, String prefix, LinkedHashMap customDef) { + + String entireEntityType = entityType; + if(UnsupportedType.validateType(entireEntityType)) { + defs = null; + } + else { + if(entityType.startsWith(TOSCA + ":")) { + entityType = entityType.substring(TOSCA.length()+1); + entireEntityType = prefix + entityType; + } + if(!entityType.startsWith(TOSCA)) { + entireEntityType = prefix + entityType; + } + if(TOSCA_DEF.get(entireEntityType) != null) { + defs = (LinkedHashMap )TOSCA_DEF.get(entireEntityType); + entityType = entireEntityType; + } + else if(customDef != null && customDef.get(entityType) != null) { + defs = (LinkedHashMap )customDef.get(entityType); + } + else{ + defs = null; + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidTypeError: \"%s\" is not a valid type",entityType)); + } + } + type = entityType; + } + + @SuppressWarnings("unchecked") + public ArrayList getPropertiesDefObjects() { + // Return a list of property definition objects + ArrayList properties = new ArrayList(); + LinkedHashMap props = (LinkedHashMap)getDefinition(PROPERTIES); + if(props != null) { + for(Map.Entry me: props.entrySet()) { + String pdname = me.getKey(); + Object to = me.getValue(); + if(to == null || !(to instanceof LinkedHashMap)) { + String s = to == null ? "null" : to.getClass().getSimpleName(); + ThreadLocalsHolder.getCollector().appendException(String.format( + "Unexpected type error: property \"%s\" has type \"%s\" (expected dict)",pdname,s)); + continue; + } + LinkedHashMap pdschema = (LinkedHashMap)to; + properties.add(new PropertyDef(pdname,null,pdschema)); + } + } + return properties; + } + + public LinkedHashMap getPropertiesDef() { + LinkedHashMap pds = new LinkedHashMap(); + for(PropertyDef pd: getPropertiesDefObjects()) { + pds.put(pd.getName(),pd); + } + return pds; + } + + public PropertyDef getPropertyDefValue(String name) { + // Return the property definition associated with a given name + PropertyDef pd = null; + LinkedHashMap propsDef = getPropertiesDef(); + if(propsDef != null) { + pd = propsDef.get(name); + } + return pd; + } + + public ArrayList getAttributesDefObjects() { + // Return a list of attribute definition objects + @SuppressWarnings("unchecked") + LinkedHashMap attrs = (LinkedHashMap)getValue(ATTRIBUTES,null,true); + ArrayList ads = new ArrayList<>(); + if(attrs != null) { + for(Map.Entry me: attrs.entrySet()) { + String attr = me.getKey(); + @SuppressWarnings("unchecked") + LinkedHashMap adschema = (LinkedHashMap)me.getValue(); + ads.add(new AttributeDef(attr,null,adschema)); + } + } + return ads; + } + + public LinkedHashMap getAttributesDef() { + // Return a dictionary of attribute definition name-object pairs + + LinkedHashMap ads = new LinkedHashMap<>(); + for(AttributeDef ado: getAttributesDefObjects()) { + ads.put(((AttributeDef)ado).getName(),ado); + } + return ads; + } + + public AttributeDef getAttributeDefValue(String name) { + // Return the attribute definition associated with a given name + AttributeDef ad = null; + LinkedHashMap attrsDef = getAttributesDef(); + if(attrsDef != null) { + ad = attrsDef.get(name); + } + return ad; + } + + public String getType() { + return type; + } + } + +/*python + +from toscaparser.common.exception import InvalidTypeError +from toscaparser.elements.attribute_definition import AttributeDef +from toscaparser.elements.entity_type import EntityType +from toscaparser.elements.property_definition import PropertyDef +from toscaparser.unsupportedtype import UnsupportedType + + +class StatefulEntityType(EntityType): + '''Class representing TOSCA states.''' + + interfaces_node_lifecycle_operations = ['create', + 'configure', 'start', + 'stop', 'delete'] + + interfaces_relationship_configure_operations = ['post_configure_source', + 'post_configure_target', + 'add_target', + 'remove_target'] + + def __init__(self, entitytype, prefix, custom_def=None): + entire_entitytype = entitytype + if UnsupportedType.validate_type(entire_entitytype): + self.defs = None + else: + if entitytype.startswith(self.TOSCA + ":"): + entitytype = entitytype[(len(self.TOSCA) + 1):] + entire_entitytype = prefix + entitytype + if not entitytype.startswith(self.TOSCA): + entire_entitytype = prefix + entitytype + if entire_entitytype in list(self.TOSCA_DEF.keys()): + self.defs = self.TOSCA_DEF[entire_entitytype] + entitytype = entire_entitytype + elif custom_def and entitytype in list(custom_def.keys()): + self.defs = custom_def[entitytype] + else: + self.defs = None + ExceptionCollector.appendException( + InvalidTypeError(what=entitytype)) + self.type = entitytype + + def get_properties_def_objects(self): + '''Return a list of property definition objects.''' + properties = [] + props = self.get_definition(self.PROPERTIES) + if props: + for prop, schema in props.items(): + properties.append(PropertyDef(prop, None, schema)) + return properties + + def get_properties_def(self): + '''Return a dictionary of property definition name-object pairs.''' + return {prop.name: prop + for prop in self.get_properties_def_objects()} + + def get_property_def_value(self, name): + '''Return the property definition associated with a given name.''' + props_def = self.get_properties_def() + if props_def and name in props_def.keys(): + return props_def[name].value + + def get_attributes_def_objects(self): + '''Return a list of attribute definition objects.''' + attrs = self.get_value(self.ATTRIBUTES, parent=True) + if attrs: + return [AttributeDef(attr, None, schema) + for attr, schema in attrs.items()] + return [] + + def get_attributes_def(self): + '''Return a dictionary of attribute definition name-object pairs.''' + return {attr.name: attr + for attr in self.get_attributes_def_objects()} + + def get_attribute_def_value(self, name): + '''Return the attribute definition associated with a given name.''' + attrs_def = self.get_attributes_def() + if attrs_def and name in attrs_def.keys(): + return attrs_def[name].value +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java new file mode 100644 index 0000000..2caf5c4 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java @@ -0,0 +1,151 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.extensions.ExtTools; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class TypeValidation { + + private static final String DEFINITION_VERSION = "tosca_definitions_version"; + private static final String DESCRIPTION = "description"; + private static final String IMPORTS = "imports"; + private static final String DSL_DEFINITIONS = "dsl_definitions"; + private static final String NODE_TYPES = "node_types"; + private static final String REPOSITORIES = "repositories"; + private static final String DATA_TYPES = "data_types"; + private static final String ARTIFACT_TYPES = "artifact_types"; + private static final String GROUP_TYPES = "group_types"; + private static final String RELATIONSHIP_TYPES = "relationship_types"; + private static final String CAPABILITY_TYPES = "capability_types"; + private static final String INTERFACE_TYPES = "interface_types"; + private static final String POLICY_TYPES = "policy_types"; + private static final String TOPOLOGY_TEMPLATE = "topology_template"; + //Pavel + private static final String METADATA = "metadata"; + + private String ALLOWED_TYPE_SECTIONS[] = { + DEFINITION_VERSION, DESCRIPTION, IMPORTS, + DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES, + DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES, + RELATIONSHIP_TYPES, CAPABILITY_TYPES, + INTERFACE_TYPES, POLICY_TYPES, + TOPOLOGY_TEMPLATE, METADATA + }; + + private static ArrayList VALID_TEMPLATE_VERSIONS = _getVTV(); + + private static ArrayList _getVTV() { + ArrayList vtv = new ArrayList<>(); + vtv.add("tosca_simple_yaml_1_0"); + ExtTools exttools = new ExtTools(); + vtv.addAll(exttools.getVersions()); + return vtv; + } + + //private LinkedHashMap customTypes; + private Object importDef; + //private String version; + + public TypeValidation(LinkedHashMap _customTypes, + Object _importDef) { + importDef = _importDef; + _validateTypeKeys(_customTypes); + } + + private void _validateTypeKeys(LinkedHashMap customTypes) { + + String sVersion = (String)customTypes.get(DEFINITION_VERSION); + if(sVersion != null) { + _validateTypeVersion(sVersion); + //version = sVersion; + } + for(String name: customTypes.keySet()) { + boolean bFound = false; + for(String ats: ALLOWED_TYPE_SECTIONS) { + if(name.equals(ats)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: Template \"%s\" contains unknown field \"%s\"", + importDef.toString(),name)); + } + } + } + + private void _validateTypeVersion(String sVersion) { + boolean bFound = false; + String allowed = ""; + for(String atv: VALID_TEMPLATE_VERSIONS) { + allowed += "\"" + atv + "\" "; + if(sVersion.equals(atv)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidTemplateVersion: version \"%s\" in \"%s\" is not supported\n" + + "Allowed versions: [%s]", + sVersion,importDef.toString(),allowed)); + } + } +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import InvalidTemplateVersion +from toscaparser.common.exception import UnknownFieldError +from toscaparser.extensions.exttools import ExtTools + + +class TypeValidation(object): + + ALLOWED_TYPE_SECTIONS = (DEFINITION_VERSION, DESCRIPTION, IMPORTS, + DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES, + DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES, + RELATIONSHIP_TYPES, CAPABILITY_TYPES, + INTERFACE_TYPES, POLICY_TYPES, + TOPOLOGY_TEMPLATE) = \ + ('tosca_definitions_version', 'description', 'imports', + 'dsl_definitions', 'node_types', 'repositories', + 'data_types', 'artifact_types', 'group_types', + 'relationship_types', 'capability_types', + 'interface_types', 'policy_types', 'topology_template') + VALID_TEMPLATE_VERSIONS = ['tosca_simple_yaml_1_0'] + exttools = ExtTools() + VALID_TEMPLATE_VERSIONS.extend(exttools.get_versions()) + + def __init__(self, custom_types, import_def): + self.import_def = import_def + self._validate_type_keys(custom_types) + + def _validate_type_keys(self, custom_type): + version = custom_type[self.DEFINITION_VERSION] \ + if self.DEFINITION_VERSION in custom_type \ + else None + if version: + self._validate_type_version(version) + self.version = version + + for name in custom_type: + if name not in self.ALLOWED_TYPE_SECTIONS: + ExceptionCollector.appendException( +# UnknownFieldError(what='Template ' + (self.import_def), + UnknownFieldError(what= (self.import_def), + field=name)) + + def _validate_type_version(self, version): + if version not in self.VALID_TEMPLATE_VERSIONS: + ExceptionCollector.appendException( + InvalidTemplateVersion( +# what=version + ' in ' + self.import_def, + what=self.import_def, + valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS))) +*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java new file mode 100644 index 0000000..3c60a66 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java @@ -0,0 +1,237 @@ +package org.openecomp.sdc.toscaparser.api.elements.constraints; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.ScalarUnit; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public abstract class Constraint { + + // Parent class for constraints for a Property or Input + + protected static final String EQUAL = "equal"; + protected static final String GREATER_THAN = "greater_than"; + protected static final String GREATER_OR_EQUAL = "greater_or_equal"; + protected static final String LESS_THAN = "less_than"; + protected static final String LESS_OR_EQUAL = "less_or_equal"; + protected static final String IN_RANGE = "in_range"; + protected static final String VALID_VALUES = "valid_values"; + protected static final String LENGTH = "length"; + protected static final String MIN_LENGTH = "min_length"; + protected static final String MAX_LENGTH = "max_length"; + protected static final String PATTERN = "pattern"; + + protected static final String CONSTRAINTS[] = { + EQUAL, GREATER_THAN,GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, + IN_RANGE, VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN}; + + @SuppressWarnings("unchecked") + public static Constraint factory(String constraintClass,String propname,String proptype,Object constraint) { + + // a factory for the different Constraint classes + // replaces Python's __new__() usage + + if(!(constraint instanceof LinkedHashMap) || + ((LinkedHashMap)constraint).size() != 1) { + ThreadLocalsHolder.getCollector().appendException( + "InvalidSchemaError: Invalid constraint schema " + constraint.toString()); + } + + if(constraintClass.equals(EQUAL)) { + return new Equal(propname,proptype,constraint); + } + else if(constraintClass.equals(GREATER_THAN)) { + return new GreaterThan(propname,proptype,constraint); + } + else if(constraintClass.equals(GREATER_OR_EQUAL)) { + return new GreaterOrEqual(propname,proptype,constraint); + } + else if(constraintClass.equals(LESS_THAN)) { + return new LessThan(propname,proptype,constraint); + } + else if(constraintClass.equals(LESS_OR_EQUAL)) { + return new LessOrEqual(propname,proptype,constraint); + } + else if(constraintClass.equals(IN_RANGE)) { + return new InRange(propname,proptype,constraint); + } + else if(constraintClass.equals(VALID_VALUES)) { + return new ValidValues(propname,proptype,constraint); + } + else if(constraintClass.equals(LENGTH)) { + return new Length(propname,proptype,constraint); + } + else if(constraintClass.equals(MIN_LENGTH)) { + return new MinLength(propname,proptype,constraint); + } + else if(constraintClass.equals(MAX_LENGTH)) { + return new MaxLength(propname,proptype,constraint); + } + else if(constraintClass.equals(PATTERN)) { + return new Pattern(propname,proptype,constraint); + } + else { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidSchemaError: Invalid property \"%s\"",constraintClass)); + return null; + } + } + + protected String constraintKey = "TBD"; + protected ArrayList validTypes = new ArrayList<>(); + protected ArrayList validPropTypes = new ArrayList<>(); + + protected String propertyName; + protected String propertyType; + protected Object constraintValue; + protected Object constraintValueMsg; + protected Object valueMsg; + + @SuppressWarnings("unchecked") + public Constraint(String propname,String proptype,Object constraint) { + + _setValues(); + + propertyName = propname; + propertyType = proptype; + constraintValue = ((LinkedHashMap)constraint).get(constraintKey); + constraintValueMsg = constraintValue; + boolean bFound = false; + for(String s: ScalarUnit.SCALAR_UNIT_TYPES) { + if(s.equals(propertyType)) { + bFound = true; + break; + } + } + if(bFound) { + constraintValue = _getScalarUnitConstraintValue(); + } + // check if constraint is valid for property type + bFound = false; + for(String s: validPropTypes) { + if(s.equals(propertyType)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidSchemaError: Property \"%s\" is not valid for data type \"%s\"", + constraintKey,propertyType)); + } + } + + @SuppressWarnings("unchecked") + private Object _getScalarUnitConstraintValue() { + // code differs from Python because of class creation + if(constraintValue instanceof ArrayList) { + ArrayList ret = new ArrayList<>(); + for(Object v: (ArrayList)constraintValue) { + ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType,v); + ret.add(su.getNumFromScalarUnit(null)); + } + return ret; + } + else { + ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType,constraintValue); + return su.getNumFromScalarUnit(null); + } + } + + public void validate(Object value) { + valueMsg = value; + boolean bFound = false; + for(String s: ScalarUnit.SCALAR_UNIT_TYPES) { + if(s.equals(propertyType)) { + bFound = true; + break; + } + } + if(bFound) { + value = ScalarUnit.getScalarunitValue(propertyType,value,null); + } + if(!_isValid(value)) { + ThreadLocalsHolder.getCollector().appendWarning("ValidationError: " + _errMsg(value)); + } + } + + protected abstract boolean _isValid(Object value); + + protected abstract void _setValues(); + + protected abstract String _errMsg(Object value); + +} + +/*python + +class Constraint(object): + '''Parent class for constraints for a Property or Input.''' + + CONSTRAINTS = (EQUAL, GREATER_THAN, + GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, IN_RANGE, + VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN) = \ + ('equal', 'greater_than', 'greater_or_equal', 'less_than', + 'less_or_equal', 'in_range', 'valid_values', 'length', + 'min_length', 'max_length', 'pattern') + + def __new__(cls, property_name, property_type, constraint): + if cls is not Constraint: + return super(Constraint, cls).__new__(cls) + + if(not isinstance(constraint, collections.Mapping) or + len(constraint) != 1): + ExceptionCollector.appendException( + InvalidSchemaError(message=_('Invalid constraint schema.'))) + + for type in constraint.keys(): + ConstraintClass = get_constraint_class(type) + if not ConstraintClass: + msg = _('Invalid property "%s".') % type + ExceptionCollector.appendException( + InvalidSchemaError(message=msg)) + + return ConstraintClass(property_name, property_type, constraint) + + def __init__(self, property_name, property_type, constraint): + self.property_name = property_name + self.property_type = property_type + self.constraint_value = constraint[self.constraint_key] + self.constraint_value_msg = self.constraint_value + if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: + self.constraint_value = self._get_scalarunit_constraint_value() + # check if constraint is valid for property type + if property_type not in self.valid_prop_types: + msg = _('Property "%(ctype)s" is not valid for data type ' + '"%(dtype)s".') % dict( + ctype=self.constraint_key, + dtype=property_type) + ExceptionCollector.appendException(InvalidSchemaError(message=msg)) + + def _get_scalarunit_constraint_value(self): + if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: + ScalarUnit_Class = (scalarunit. + get_scalarunit_class(self.property_type)) + if isinstance(self.constraint_value, list): + return [ScalarUnit_Class(v).get_num_from_scalar_unit() + for v in self.constraint_value] + else: + return (ScalarUnit_Class(self.constraint_value). + get_num_from_scalar_unit()) + + def _err_msg(self, value): + return _('Property "%s" could not be validated.') % self.property_name + + def validate(self, value): + self.value_msg = value + if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: + value = scalarunit.get_scalarunit_value(self.property_type, value) + if not self._is_valid(value): + err_msg = self._err_msg(value) + ExceptionCollector.appendException( + ValidationError(message=err_msg)) + + +*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Equal.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Equal.java new file mode 100644 index 0000000..e16cac3 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Equal.java @@ -0,0 +1,61 @@ +package org.openecomp.sdc.toscaparser.api.elements.constraints; + +public class Equal extends Constraint { + + protected void _setValues() { + + constraintKey = EQUAL; + + for(String s: Schema.PROPERTY_TYPES) { + validPropTypes.add(s); + } + + } + + public Equal(String name,String type,Object c) { + super(name,type,c); + + } + + protected boolean _isValid(Object val) { + // equality of objects is tricky so we're comparing + // the toString() representation + if(val.toString().equals(constraintValue.toString())) { + return true; + } + return false; + } + + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" is not equal to \"%s\"", + valueMsg,propertyName,constraintValueMsg); + } + +} + +/*python + +class Equal(Constraint): +"""Constraint class for "equal" + +Constrains a property or parameter to a value equal to ('=') +the value declared. +""" + +constraint_key = Constraint.EQUAL + +valid_prop_types = Schema.PROPERTY_TYPES + +def _is_valid(self, value): + if value == self.constraint_value: + return True + + return False + +def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" is not ' + 'equal to "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + cvalue=self.constraint_value_msg)) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java new file mode 100644 index 0000000..021bed3 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java @@ -0,0 +1,113 @@ +package org.openecomp.sdc.toscaparser.api.elements.constraints; + +import java.util.Date; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.functions.Function; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class GreaterOrEqual extends Constraint { + // Constraint class for "greater_or_equal" + + // Constrains a property or parameter to a value greater than or equal + // to ('>=') the value declared. + + protected void _setValues() { + + constraintKey = GREATER_OR_EQUAL; + + validTypes.add("Integer"); + validTypes.add("Double"); + validTypes.add("Float"); + // timestamps are loaded as Date objects + validTypes.add("Date"); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public GreaterOrEqual(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"greater_or_equal\" expects comparable values"); + } + } + + + + @Override + protected boolean _isValid(Object value) { + if(Function.isFunction(value)) { + return true; + } + + // timestamps + if(value instanceof Date) { + if(constraintValue instanceof Date) { + return !((Date)value).before((Date)constraintValue); + } + return false; + } + // all others + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 >= n2; + } + + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be greater or equal to \"%s\"", + valueMsg,propertyName,constraintValueMsg); + } +} + +/*python + +class GreaterOrEqual(Constraint): +"""Constraint class for "greater_or_equal" + +Constrains a property or parameter to a value greater than or equal +to ('>=') the value declared. +""" + +constraint_key = Constraint.GREATER_OR_EQUAL + +valid_types = (int, float, datetime.date, + datetime.time, datetime.datetime) + +valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, + Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, + Schema.SCALAR_UNIT_TIME) + +def __init__(self, property_name, property_type, constraint): + super(GreaterOrEqual, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, self.valid_types): + ThreadLocalsHolder.getCollector().appendException( + InvalidSchemaError(message=_('The property ' + '"greater_or_equal" expects ' + 'comparable values.'))) + +def _is_valid(self, value): + if toscaparser.functions.is_function(value) or \ + value >= self.constraint_value: + return True + return False + +def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' + 'greater than or equal to "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + cvalue=self.constraint_value_msg)) + + +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java new file mode 100644 index 0000000..d23d7ce --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java @@ -0,0 +1,102 @@ +package org.openecomp.sdc.toscaparser.api.elements.constraints; + +import java.util.Date; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class GreaterThan extends Constraint { + + @Override + protected void _setValues() { + + constraintKey = GREATER_THAN; + + validTypes.add("Integer"); + validTypes.add("Double"); + validTypes.add("Float"); + // timestamps are loaded as Date objects + validTypes.add("Date"); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public GreaterThan(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"greater_than\" expects comparable values"); + } + } + + @Override + protected boolean _isValid(Object value) { + + // timestamps + if(value instanceof Date) { + if(constraintValue instanceof Date) { + return ((Date)value).after((Date)constraintValue); + } + return false; + } + + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 > n2; + } + + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be greater than \"%s\"", + valueMsg,propertyName,constraintValueMsg); + } + +} + +/* +class GreaterThan(Constraint): + """Constraint class for "greater_than" + + Constrains a property or parameter to a value greater than ('>') + the value declared. + """ + + constraint_key = Constraint.GREATER_THAN + + valid_types = (int, float, datetime.date, + datetime.time, datetime.datetime) + + valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, + Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, + Schema.SCALAR_UNIT_TIME) + + def __init__(self, property_name, property_type, constraint): + super(GreaterThan, self).__init__(property_name, property_type, + constraint) + if not isinstance(constraint[self.GREATER_THAN], self.valid_types): + ExceptionCollector.appendException( + InvalidSchemaError(message=_('The property "greater_than" ' + 'expects comparable values.'))) + + def _is_valid(self, value): + if value > self.constraint_value: + return True + + return False + + def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' + 'greater than "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + cvalue=self.constraint_value_msg)) +*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java new file mode 100644 index 0000000..282267d --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java @@ -0,0 +1,171 @@ +package org.openecomp.sdc.toscaparser.api.elements.constraints; + +import java.util.Date; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; + +public class InRange extends Constraint { + // Constraint class for "in_range" + + //Constrains a property or parameter to a value in range of (inclusive) + //the two values declared. + + private static final String UNBOUNDED = "UNBOUNDED"; + + private Object min,max; + + protected void _setValues() { + + constraintKey = IN_RANGE; + + validTypes.add("Integer"); + validTypes.add("Double"); + validTypes.add("Float"); + validTypes.add("String"); + // timestamps are loaded as Date objects + validTypes.add("Date"); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + validPropTypes.add(Schema.RANGE); + + } + + @SuppressWarnings("unchecked") + public InRange(String name,String type,Object c) { + super(name,type,c); + + if(!(constraintValue instanceof ArrayList) || ((ArrayList)constraintValue).size() != 2) { + ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"in_range\" expects a list"); + + } + + ArrayList alcv = (ArrayList)constraintValue; + String msg = "The property \"in_range\" expects comparable values"; + for(Object vo: alcv) { + if(!validTypes.contains(vo.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: " + msg); + } + // The only string we allow for range is the special value 'UNBOUNDED' + if((vo instanceof String) && !((String)vo).equals(UNBOUNDED)) { + ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: " + msg); + } + } + min = alcv.get(0); + max = alcv.get(1); + + } + + @Override + protected boolean _isValid(Object value) { + + // timestamps + if(value instanceof Date) { + if(min instanceof Date && max instanceof Date) { + return !((Date)value).before((Date)min) && + !((Date)value).after((Date)max); + } + return false; + } + + Double dvalue = new Double(value.toString()); + if(!(min instanceof String)) { + if(dvalue < new Double(min.toString())) { + return false; + } + } + else if(!((String)min).equals(UNBOUNDED)) { + return false; + } + if(!(max instanceof String)) { + if(dvalue > new Double(max.toString())) { + return false; + } + } + else if(!((String)max).equals(UNBOUNDED)) { + return false; + } + return true; + } + + @Override + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" is out of range \"(min:%s, max:%s)\"", + valueMsg,propertyName,min.toString(),max.toString()); + } + +} + +/*python + +class InRange(Constraint): + """Constraint class for "in_range" + + Constrains a property or parameter to a value in range of (inclusive) + the two values declared. + """ + UNBOUNDED = 'UNBOUNDED' + + constraint_key = Constraint.IN_RANGE + + valid_types = (int, float, datetime.date, + datetime.time, datetime.datetime, str) + + valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, + Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, + Schema.SCALAR_UNIT_TIME, Schema.RANGE) + + def __init__(self, property_name, property_type, constraint): + super(InRange, self).__init__(property_name, property_type, constraint) + if(not isinstance(self.constraint_value, collections.Sequence) or + (len(constraint[self.IN_RANGE]) != 2)): + ExceptionCollector.appendException( + InvalidSchemaError(message=_('The property "in_range" ' + 'expects a list.'))) + + msg = _('The property "in_range" expects comparable values.') + for value in self.constraint_value: + if not isinstance(value, self.valid_types): + ExceptionCollector.appendException( + InvalidSchemaError(message=msg)) + # The only string we allow for range is the special value + # 'UNBOUNDED' + if(isinstance(value, str) and value != self.UNBOUNDED): + ExceptionCollector.appendException( + InvalidSchemaError(message=msg)) + + self.min = self.constraint_value[0] + self.max = self.constraint_value[1] + + def _is_valid(self, value): + if not isinstance(self.min, str): + if value < self.min: + return False + elif self.min != self.UNBOUNDED: + return False + if not isinstance(self.max, str): + if value > self.max: + return False + elif self.max != self.UNBOUNDED: + return False + return True + + def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" is out of ' + 'range "(min:%(vmin)s, max:%(vmax)s)".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + vmin=self.constraint_value_msg[0], + vmax=self.constraint_value_msg[1])) + +*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java new file mode 100644 index 0000000..4cfd1c0 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java @@ -0,0 +1,79 @@ +package org.openecomp.sdc.toscaparser.api.elements.constraints; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class Length extends Constraint { + // Constraint class for "length" + + // Constrains the property or parameter to a value of a given length. + + @Override + protected void _setValues() { + + constraintKey = LENGTH; + + validTypes.add("Integer"); + + validPropTypes.add(Schema.STRING); + + } + + public Length(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"length\" expects an integer"); + } + } + + @Override + protected boolean _isValid(Object value) { + if(value instanceof String && constraintValue instanceof Integer && + ((String)value).length() == (Integer)constraintValue) { + return true; + } + return false; + } + + @Override + protected String _errMsg(Object value) { + return String.format("Length of value \"%s\" of property \"%s\" must be equal to \"%s\"", + value.toString(),propertyName,constraintValue.toString()); + } + +} + +/*python + class Length(Constraint): + """Constraint class for "length" + + Constrains the property or parameter to a value of a given length. + """ + + constraint_key = Constraint.LENGTH + + valid_types = (int, ) + + valid_prop_types = (Schema.STRING, ) + + def __init__(self, property_name, property_type, constraint): + super(Length, self).__init__(property_name, property_type, constraint) + if not isinstance(self.constraint_value, self.valid_types): + ExceptionCollector.appendException( + InvalidSchemaError(message=_('The property "length" expects ' + 'an integer.'))) + + def _is_valid(self, value): + if isinstance(value, str) and len(value) == self.constraint_value: + return True + + return False + + def _err_msg(self, value): + return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' + 'must be equal to "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=value, + cvalue=self.constraint_value)) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java new file mode 100644 index 0000000..00cba36 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java @@ -0,0 +1,106 @@ +package org.openecomp.sdc.toscaparser.api.elements.constraints; + +import java.util.Date; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class LessOrEqual extends Constraint { + // Constraint class for "less_or_equal" + + // Constrains a property or parameter to a value less than or equal + // to ('<=') the value declared. + + protected void _setValues() { + + constraintKey = LESS_OR_EQUAL; + + validTypes.add("Integer"); + validTypes.add("Double"); + validTypes.add("Float"); + // timestamps are loaded as Date objects + validTypes.add("Date"); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public LessOrEqual(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"less_or_equal\" expects comparable values"); + } + } + + @Override + protected boolean _isValid(Object value) { + + // timestamps + if(value instanceof Date) { + if(constraintValue instanceof Date) { + return !((Date)value).after((Date)constraintValue); + } + return false; + } + + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 <= n2; + } + + @Override + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be less or equal to \"%s\"", + valueMsg,propertyName,constraintValueMsg); + } + +} + +/*python + +class LessOrEqual(Constraint): + """Constraint class for "less_or_equal" + + Constrains a property or parameter to a value less than or equal + to ('<=') the value declared. + """ + + constraint_key = Constraint.LESS_OR_EQUAL + + valid_types = (int, float, datetime.date, + datetime.time, datetime.datetime) + + valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, + Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, + Schema.SCALAR_UNIT_TIME) + + def __init__(self, property_name, property_type, constraint): + super(LessOrEqual, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, self.valid_types): + ExceptionCollector.appendException( + InvalidSchemaError(message=_('The property "less_or_equal" ' + 'expects comparable values.'))) + + def _is_valid(self, value): + if value <= self.constraint_value: + return True + + return False + + def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' + 'less than or equal to "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + cvalue=self.constraint_value_msg)) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java new file mode 100644 index 0000000..eb5a41d --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java @@ -0,0 +1,104 @@ +package org.openecomp.sdc.toscaparser.api.elements.constraints; + +import java.util.Date; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class LessThan extends Constraint { + + @Override + protected void _setValues() { + + constraintKey = LESS_THAN; + + validTypes.add("Integer"); + validTypes.add("Double"); + validTypes.add("Float"); + // timestamps are loaded as Date objects + validTypes.add("Date"); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public LessThan(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"less_than\" expects comparable values"); + } + } + + @Override + protected boolean _isValid(Object value) { + + // timestamps + if(value instanceof Date) { + if(constraintValue instanceof Date) { + return ((Date)value).before((Date)constraintValue); + } + return false; + } + + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 < n2; + } + + @Override + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be less than \"%s\"", + valueMsg,propertyName,constraintValueMsg); + } + +} + +/*python + +class LessThan(Constraint): +"""Constraint class for "less_than" + +Constrains a property or parameter to a value less than ('<') +the value declared. +""" + +constraint_key = Constraint.LESS_THAN + +valid_types = (int, float, datetime.date, + datetime.time, datetime.datetime) + +valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, + Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, + Schema.SCALAR_UNIT_TIME) + +def __init__(self, property_name, property_type, constraint): + super(LessThan, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, self.valid_types): + ExceptionCollector.appendException( + InvalidSchemaError(message=_('The property "less_than" ' + 'expects comparable values.'))) + +def _is_valid(self, value): + if value < self.constraint_value: + return True + + return False + +def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' + 'less than "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + cvalue=self.constraint_value_msg)) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java new file mode 100644 index 0000000..278ae85 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java @@ -0,0 +1,90 @@ +package org.openecomp.sdc.toscaparser.api.elements.constraints; + +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class MaxLength extends Constraint { + // Constraint class for "min_length" + + // Constrains the property or parameter to a value of a maximum length. + + @Override + protected void _setValues() { + + constraintKey = MAX_LENGTH; + + validTypes.add("Integer"); + + validPropTypes.add(Schema.STRING); + validPropTypes.add(Schema.MAP); + + } + + public MaxLength(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"max_length\" expects an integer"); + } + } + + @SuppressWarnings("unchecked") + @Override + protected boolean _isValid(Object value) { + if(value instanceof String && constraintValue instanceof Integer && + ((String)value).length() <= (Integer)constraintValue) { + return true; + } + else if(value instanceof LinkedHashMap && constraintValue instanceof Integer && + ((LinkedHashMap)value).size() <= (Integer)constraintValue) { + return true; + } + return false; + } + + @Override + protected String _errMsg(Object value) { + return String.format("Length of value \"%s\" of property \"%s\" must be no greater than \"%s\"", + value.toString(),propertyName,constraintValue.toString()); + } + +} + +/*python + +class MaxLength(Constraint): + """Constraint class for "max_length" + + Constrains the property or parameter to a value to a maximum length. + """ + + constraint_key = Constraint.MAX_LENGTH + + valid_types = (int, ) + + valid_prop_types = (Schema.STRING, Schema.MAP) + + def __init__(self, property_name, property_type, constraint): + super(MaxLength, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, self.valid_types): + ExceptionCollector.appendException( + InvalidSchemaError(message=_('The property "max_length" ' + 'expects an integer.'))) + + def _is_valid(self, value): + if ((isinstance(value, str) or isinstance(value, dict)) and + len(value) <= self.constraint_value): + return True + + return False + + def _err_msg(self, value): + return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' + 'must be no greater than "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=value, + cvalue=self.constraint_value)) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java new file mode 100644 index 0000000..480c878 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java @@ -0,0 +1,90 @@ +package org.openecomp.sdc.toscaparser.api.elements.constraints; + +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class MinLength extends Constraint { + // Constraint class for "min_length" + + // Constrains the property or parameter to a value of a minimum length. + + @Override + protected void _setValues() { + + constraintKey = MIN_LENGTH; + + validTypes.add("Integer"); + + validPropTypes.add(Schema.STRING); + validPropTypes.add(Schema.MAP); + + } + + public MinLength(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"min_length\" expects an integer"); + } + } + + @SuppressWarnings("unchecked") + @Override + protected boolean _isValid(Object value) { + if(value instanceof String && constraintValue instanceof Integer && + ((String)value).length() >= (Integer)constraintValue) { + return true; + } + else if(value instanceof LinkedHashMap && constraintValue instanceof Integer && + ((LinkedHashMap)value).size() >= (Integer)constraintValue) { + return true; + } + return false; + } + + @Override + protected String _errMsg(Object value) { + return String.format("Length of value \"%s\" of property \"%s\" must be at least \"%s\"", + value.toString(),propertyName,constraintValue.toString()); + } + +} + +/*python + +class MinLength(Constraint): + """Constraint class for "min_length" + + Constrains the property or parameter to a value to a minimum length. + """ + + constraint_key = Constraint.MIN_LENGTH + + valid_types = (int, ) + + valid_prop_types = (Schema.STRING, Schema.MAP) + + def __init__(self, property_name, property_type, constraint): + super(MinLength, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, self.valid_types): + ExceptionCollector.appendException( + InvalidSchemaError(message=_('The property "min_length" ' + 'expects an integer.'))) + + def _is_valid(self, value): + if ((isinstance(value, str) or isinstance(value, dict)) and + len(value) >= self.constraint_value): + return True + + return False + + def _err_msg(self, value): + return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' + 'must be at least "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=value, + cvalue=self.constraint_value)) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java new file mode 100644 index 0000000..444a73c --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java @@ -0,0 +1,96 @@ +package org.openecomp.sdc.toscaparser.api.elements.constraints; + +import java.util.regex.Matcher; +import java.util.regex.PatternSyntaxException; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class Pattern extends Constraint { + + @Override + protected void _setValues() { + + constraintKey = PATTERN; + + validTypes.add("String"); + + validPropTypes.add(Schema.STRING); + + } + + + public Pattern(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"pattern\" expects a string"); + } + } + + @Override + protected boolean _isValid(Object value) { + try { + if(!(value instanceof String)) { + ThreadLocalsHolder.getCollector().appendException(String.format("ValueError: Input value \"%s\" to \"pattern\" property \"%s\" must be a string", + value.toString(),propertyName)); + return false; + } + String strp = constraintValue.toString(); + String strm = value.toString(); + java.util.regex.Pattern pattern = java.util.regex.Pattern.compile(strp); + Matcher matcher = pattern.matcher(strm); + if(matcher.find() && matcher.end() == strm.length()) { + return true; + } + return false; + } + catch(PatternSyntaxException pse) { + ThreadLocalsHolder.getCollector().appendException(String.format("ValueError: Invalid regex \"%s\" in \"pattern\" property \"%s\"", + constraintValue.toString(),propertyName)); + return false; + } + } + + @Override + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" does not match the pattern \"%s\"", + value.toString(),propertyName,constraintValue.toString()); + } + +} + +/*python + +class Pattern(Constraint): + """Constraint class for "pattern" + + Constrains the property or parameter to a value that is allowed by + the provided regular expression. + """ + + constraint_key = Constraint.PATTERN + + valid_types = (str, ) + + valid_prop_types = (Schema.STRING, ) + + def __init__(self, property_name, property_type, constraint): + super(Pattern, self).__init__(property_name, property_type, constraint) + if not isinstance(self.constraint_value, self.valid_types): + ExceptionCollector.appendException( + InvalidSchemaError(message=_('The property "pattern" ' + 'expects a string.'))) + self.match = re.compile(self.constraint_value).match + + def _is_valid(self, value): + match = self.match(value) + return match is not None and match.end() == len(value) + + def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" does not ' + 'match pattern "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=value, + cvalue=self.constraint_value)) +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java new file mode 100644 index 0000000..ca721e6 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java @@ -0,0 +1,278 @@ +package org.openecomp.sdc.toscaparser.api.elements.constraints; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + + +public class Schema { + + private static final String TYPE = "type"; + private static final String REQUIRED = "required"; + private static final String DESCRIPTION = "description"; + private static final String DEFAULT = "default"; + private static final String CONSTRAINTS = "constraints"; + private static final String STATUS = "status"; + private static final String ENTRYSCHEMA = "entry_schema"; + private static final String KEYS[] = { + TYPE, REQUIRED, DESCRIPTION,DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS}; + + public static final String INTEGER = "integer"; + public static final String STRING = "string"; + public static final String BOOLEAN = "boolean"; + public static final String FLOAT = "float"; + public static final String RANGE = "range"; + public static final String NUMBER = "number"; + public static final String TIMESTAMP = "timestamp"; + public static final String LIST = "list"; + public static final String MAP = "map"; + public static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; + public static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; + public static final String SCALAR_UNIT_TIME = "scalar-unit.time"; + public static final String VERSION = "version"; + public static final String PORTDEF = "PortDef"; + public static final String PORTSPEC = "PortSpec"; //??? PortSpec.SHORTNAME + public static final String JSON = "json"; + + public static final String PROPERTY_TYPES[] = { + INTEGER, STRING, BOOLEAN, FLOAT, RANGE,NUMBER, TIMESTAMP, LIST, MAP, + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, + VERSION, PORTDEF, PORTSPEC, JSON}; + + @SuppressWarnings("unused") + private static final String SCALAR_UNIT_SIZE_DEFAULT = "B"; + + private static Map SCALAR_UNIT_SIZE_DICT = new HashMap<>(); + static { + SCALAR_UNIT_SIZE_DICT.put("B", 1L); + SCALAR_UNIT_SIZE_DICT.put("KB", 1000L); + SCALAR_UNIT_SIZE_DICT.put("KIB", 1024L); + SCALAR_UNIT_SIZE_DICT.put("MB", 1000000L); + SCALAR_UNIT_SIZE_DICT.put("MIB", 1048576L); + SCALAR_UNIT_SIZE_DICT.put("GB", 1000000000L); + SCALAR_UNIT_SIZE_DICT.put("GIB", 1073741824L); + SCALAR_UNIT_SIZE_DICT.put("TB", 1000000000000L); + SCALAR_UNIT_SIZE_DICT.put("TIB", 1099511627776L); + } + + private String name; + private LinkedHashMap schema; + private int _len; + private ArrayList constraintsList; + + + public Schema(String _name,LinkedHashMap _schemaDict) { + name = _name; + + if(!(_schemaDict instanceof LinkedHashMap)) { + //msg = (_('Schema definition of "%(pname)s" must be a dict.') + // % dict(pname=name)) + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidSchemaError: Schema definition of \"%s\" must be a dict",name)); + } + + if(_schemaDict.get("type") == null) { + //msg = (_('Schema definition of "%(pname)s" must have a "type" ' + // 'attribute.') % dict(pname=name)) + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name)); + } + + schema = _schemaDict; + _len = 0; //??? None + constraintsList = new ArrayList<>(); + } + + public String getType() { + return (String)schema.get(TYPE); + } + + public boolean isRequired() { + return (boolean)schema.getOrDefault(REQUIRED, true); + } + + public String getDescription() { + return (String)schema.getOrDefault(DESCRIPTION,""); + } + + public Object getDefault() { + return schema.get(DEFAULT); + } + + public String getStatus() { + return (String)schema.getOrDefault(STATUS,""); + } + + @SuppressWarnings("unchecked") + public ArrayList getConstraints() { + if(constraintsList.size() == 0) { + Object cob = schema.get(CONSTRAINTS); + if(cob instanceof ArrayList) { + ArrayList constraintSchemata = (ArrayList)cob; + for(Object ob: constraintSchemata) { + if(ob instanceof LinkedHashMap) { + for(String cClass: ((LinkedHashMap)ob).keySet()) { + Constraint c = Constraint.factory(cClass,name,getType(),ob); + if(c != null) { + constraintsList.add(c); + } + else { + // error + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: Constraint type \"%s\" for property \"%s\" is not supported", + cClass,name)); + } + break; + } + } + } + } + } + return constraintsList; + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getEntrySchema() { + return (LinkedHashMap)schema.get(ENTRYSCHEMA); + } + + // Python intrinsic methods... + + // substitute for __getitem__ (aka self[key]) + public Object getItem(String key) { + return schema.get(key); + } + + /* + def __iter__(self): + for k in self.KEYS: + try: + self.schema[k] + except KeyError: + pass + else: + yield k + */ + + // substitute for __len__ (aka self.len()) + public int getLen() { + int len = 0; + for(String k: KEYS) { + if(schema.get(k) != null) { + len++; + } + _len = len; + } + return _len; + } + // getter + public LinkedHashMap getSchema() { + return schema; + } + +} + +/*python + +class Schema(collections.Mapping): + +KEYS = ( + TYPE, REQUIRED, DESCRIPTION, + DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS +) = ( + 'type', 'required', 'description', + 'default', 'constraints', 'entry_schema', 'status' +) + +PROPERTY_TYPES = ( + INTEGER, STRING, BOOLEAN, FLOAT, RANGE, + NUMBER, TIMESTAMP, LIST, MAP, + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, + VERSION, PORTDEF, PORTSPEC +) = ( + 'integer', 'string', 'boolean', 'float', 'range', + 'number', 'timestamp', 'list', 'map', + 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time', + 'version', 'PortDef', PortSpec.SHORTNAME +) + +SCALAR_UNIT_SIZE_DEFAULT = 'B' +SCALAR_UNIT_SIZE_DICT = {'B': 1, 'KB': 1000, 'KIB': 1024, 'MB': 1000000, + 'MIB': 1048576, 'GB': 1000000000, + 'GIB': 1073741824, 'TB': 1000000000000, + 'TIB': 1099511627776} + +def __init__(self, name, schema_dict): + self.name = name + if not isinstance(schema_dict, collections.Mapping): + msg = (_('Schema definition of "%(pname)s" must be a dict.') + % dict(pname=name)) + ExceptionCollector.appendException(InvalidSchemaError(message=msg)) + + try: + schema_dict['type'] + except KeyError: + msg = (_('Schema definition of "%(pname)s" must have a "type" ' + 'attribute.') % dict(pname=name)) + ExceptionCollector.appendException(InvalidSchemaError(message=msg)) + + self.schema = schema_dict + self._len = None + self.constraints_list = [] + +@property +def type(self): + return self.schema[self.TYPE] + +@property +def required(self): + return self.schema.get(self.REQUIRED, True) + +@property +def description(self): + return self.schema.get(self.DESCRIPTION, '') + +@property +def default(self): + return self.schema.get(self.DEFAULT) + +@property +def status(self): + return self.schema.get(self.STATUS, '') + +@property +def constraints(self): + if not self.constraints_list: + constraint_schemata = self.schema.get(self.CONSTRAINTS) + if constraint_schemata: + self.constraints_list = [Constraint(self.name, + self.type, + cschema) + for cschema in constraint_schemata] + return self.constraints_list + +@property +def entry_schema(self): + return self.schema.get(self.ENTRYSCHEMA) + +def __getitem__(self, key): + return self.schema[key] + +def __iter__(self): + for k in self.KEYS: + try: + self.schema[k] + except KeyError: + pass + else: + yield k + +def __len__(self): + if self._len is None: + self._len = len(list(iter(self))) + return self._len +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java.orig b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java.orig new file mode 100644 index 0000000..96eff34 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java.orig @@ -0,0 +1,281 @@ +package org.openecomp.sdc.toscaparser.api.elements.constraints; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; + + +public class Schema { + + private static final String TYPE = "type"; + private static final String REQUIRED = "required"; + private static final String DESCRIPTION = "description"; + private static final String DEFAULT = "default"; + private static final String CONSTRAINTS = "constraints"; + private static final String STATUS = "status"; + private static final String ENTRYSCHEMA = "entry_schema"; + private static final String KEYS[] = { + TYPE, REQUIRED, DESCRIPTION,DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS}; + + public static final String INTEGER = "integer"; + public static final String STRING = "string"; + public static final String BOOLEAN = "boolean"; + public static final String FLOAT = "float"; + public static final String RANGE = "range"; + public static final String NUMBER = "number"; + public static final String TIMESTAMP = "timestamp"; + public static final String LIST = "list"; + public static final String MAP = "map"; + public static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; + public static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; + public static final String SCALAR_UNIT_TIME = "scalar-unit.time"; + public static final String VERSION = "version"; + public static final String PORTDEF = "PortDef"; + public static final String PORTSPEC = "PortSpec"; //??? PortSpec.SHORTNAME +<<<<<<< HEAD + public static final String JSON = "json"; +======= + public static final String JSON = "json"; +>>>>>>> master + + public static final String PROPERTY_TYPES[] = { + INTEGER, STRING, BOOLEAN, FLOAT, RANGE,NUMBER, TIMESTAMP, LIST, MAP, + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, + VERSION, PORTDEF, PORTSPEC, JSON}; + + @SuppressWarnings("unused") + private static final String SCALAR_UNIT_SIZE_DEFAULT = "B"; + + private static Map SCALAR_UNIT_SIZE_DICT = new HashMap<>(); + static { + SCALAR_UNIT_SIZE_DICT.put("B", 1L); + SCALAR_UNIT_SIZE_DICT.put("KB", 1000L); + SCALAR_UNIT_SIZE_DICT.put("KIB", 1024L); + SCALAR_UNIT_SIZE_DICT.put("MB", 1000000L); + SCALAR_UNIT_SIZE_DICT.put("MIB", 1048576L); + SCALAR_UNIT_SIZE_DICT.put("GB", 1000000000L); + SCALAR_UNIT_SIZE_DICT.put("GIB", 1073741824L); + SCALAR_UNIT_SIZE_DICT.put("TB", 1000000000000L); + SCALAR_UNIT_SIZE_DICT.put("TIB", 1099511627776L); + } + + private String name; + private LinkedHashMap schema; + private int _len; + private ArrayList constraintsList; + + + public Schema(String _name,LinkedHashMap _schemaDict) { + name = _name; + + if(!(_schemaDict instanceof LinkedHashMap)) { + //msg = (_('Schema definition of "%(pname)s" must be a dict.') + // % dict(pname=name)) + ExceptionCollector.appendException(String.format( + "InvalidSchemaError: Schema definition of \"%s\" must be a dict",name)); + } + + if(_schemaDict.get("type") == null) { + //msg = (_('Schema definition of "%(pname)s" must have a "type" ' + // 'attribute.') % dict(pname=name)) + ExceptionCollector.appendException(String.format( + "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name)); + } + + schema = _schemaDict; + _len = 0; //??? None + constraintsList = new ArrayList<>(); + } + + public String getType() { + return (String)schema.get(TYPE); + } + + public boolean isRequired() { + return (boolean)schema.getOrDefault(REQUIRED, true); + } + + public String getDescription() { + return (String)schema.getOrDefault(DESCRIPTION,""); + } + + public Object getDefault() { + return schema.get(DEFAULT); + } + + public String getStatus() { + return (String)schema.getOrDefault(STATUS,""); + } + + @SuppressWarnings("unchecked") + public ArrayList getConstraints() { + if(constraintsList.size() == 0) { + Object cob = schema.get(CONSTRAINTS); + if(cob instanceof ArrayList) { + ArrayList constraintSchemata = (ArrayList)cob; + for(Object ob: constraintSchemata) { + if(ob instanceof LinkedHashMap) { + for(String cClass: ((LinkedHashMap)ob).keySet()) { + Constraint c = Constraint.factory(cClass,name,getType(),ob); + if(c != null) { + constraintsList.add(c); + } + else { + // error + ExceptionCollector.appendException(String.format( + "UnknownFieldError: Constraint type \"%s\" for property \"%s\" is not supported", + cClass,name)); + } + break; + } + } + } + } + } + return constraintsList; + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getEntrySchema() { + return (LinkedHashMap)schema.get(ENTRYSCHEMA); + } + + // Python intrinsic methods... + + // substitute for __getitem__ (aka self[key]) + public Object getItem(String key) { + return schema.get(key); + } + + /* + def __iter__(self): + for k in self.KEYS: + try: + self.schema[k] + except KeyError: + pass + else: + yield k + */ + + // substitute for __len__ (aka self.len()) + public int getLen() { + int len = 0; + for(String k: KEYS) { + if(schema.get(k) != null) { + len++; + } + _len = len; + } + return _len; + } + // getter + public LinkedHashMap getSchema() { + return schema; + } + +} + +/*python + +class Schema(collections.Mapping): + +KEYS = ( + TYPE, REQUIRED, DESCRIPTION, + DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS +) = ( + 'type', 'required', 'description', + 'default', 'constraints', 'entry_schema', 'status' +) + +PROPERTY_TYPES = ( + INTEGER, STRING, BOOLEAN, FLOAT, RANGE, + NUMBER, TIMESTAMP, LIST, MAP, + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, + VERSION, PORTDEF, PORTSPEC +) = ( + 'integer', 'string', 'boolean', 'float', 'range', + 'number', 'timestamp', 'list', 'map', + 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time', + 'version', 'PortDef', PortSpec.SHORTNAME +) + +SCALAR_UNIT_SIZE_DEFAULT = 'B' +SCALAR_UNIT_SIZE_DICT = {'B': 1, 'KB': 1000, 'KIB': 1024, 'MB': 1000000, + 'MIB': 1048576, 'GB': 1000000000, + 'GIB': 1073741824, 'TB': 1000000000000, + 'TIB': 1099511627776} + +def __init__(self, name, schema_dict): + self.name = name + if not isinstance(schema_dict, collections.Mapping): + msg = (_('Schema definition of "%(pname)s" must be a dict.') + % dict(pname=name)) + ExceptionCollector.appendException(InvalidSchemaError(message=msg)) + + try: + schema_dict['type'] + except KeyError: + msg = (_('Schema definition of "%(pname)s" must have a "type" ' + 'attribute.') % dict(pname=name)) + ExceptionCollector.appendException(InvalidSchemaError(message=msg)) + + self.schema = schema_dict + self._len = None + self.constraints_list = [] + +@property +def type(self): + return self.schema[self.TYPE] + +@property +def required(self): + return self.schema.get(self.REQUIRED, True) + +@property +def description(self): + return self.schema.get(self.DESCRIPTION, '') + +@property +def default(self): + return self.schema.get(self.DEFAULT) + +@property +def status(self): + return self.schema.get(self.STATUS, '') + +@property +def constraints(self): + if not self.constraints_list: + constraint_schemata = self.schema.get(self.CONSTRAINTS) + if constraint_schemata: + self.constraints_list = [Constraint(self.name, + self.type, + cschema) + for cschema in constraint_schemata] + return self.constraints_list + +@property +def entry_schema(self): + return self.schema.get(self.ENTRYSCHEMA) + +def __getitem__(self, key): + return self.schema[key] + +def __iter__(self): + for k in self.KEYS: + try: + self.schema[k] + except KeyError: + pass + else: + yield k + +def __len__(self): + if self._len is None: + self._len = len(list(iter(self))) + return self._len +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java new file mode 100644 index 0000000..06622e4 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java @@ -0,0 +1,84 @@ +package org.openecomp.sdc.toscaparser.api.elements.constraints; + +import java.util.ArrayList; + +public class ValidValues extends Constraint { + + + protected void _setValues() { + + constraintKey = VALID_VALUES; + + for(String s: Schema.PROPERTY_TYPES) { + validPropTypes.add(s); + } + + } + + + public ValidValues(String name,String type,Object c) { + super(name,type,c); + + } + + @SuppressWarnings("unchecked") + protected boolean _isValid(Object val) { + if(!(constraintValue instanceof ArrayList)) { + return false; + } + if(val instanceof ArrayList) { + boolean bAll = true; + for(Object v: (ArrayList)val) { + if(!((ArrayList)constraintValue).contains(v)) { + bAll = false; + break; + }; + } + return bAll; + } + return ((ArrayList)constraintValue).contains(val); + } + + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" is not valid. Expected a value from \"%s\"", + value.toString(),propertyName,constraintValue.toString()); + } + +} + +/*python + +class ValidValues(Constraint): +"""Constraint class for "valid_values" + +Constrains a property or parameter to a value that is in the list of +declared values. +""" +constraint_key = Constraint.VALID_VALUES + +valid_prop_types = Schema.PROPERTY_TYPES + +def __init__(self, property_name, property_type, constraint): + super(ValidValues, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, collections.Sequence): + ExceptionCollector.appendException( + InvalidSchemaError(message=_('The property "valid_values" ' + 'expects a list.'))) + +def _is_valid(self, value): + print '*** payton parser validating ',value,' in ',self.constraint_value#GGG + if isinstance(value, list): + return all(v in self.constraint_value for v in value) + return value in self.constraint_value + +def _err_msg(self, value): + allowed = '[%s]' % ', '.join(str(a) for a in self.constraint_value) + return (_('The value "%(pvalue)s" of property "%(pname)s" is not ' + 'valid. Expected a value from "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=value, + cvalue=allowed)) + + +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java b/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java new file mode 100644 index 0000000..6403d6e --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java @@ -0,0 +1,210 @@ +package org.openecomp.sdc.toscaparser.api.extensions; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class ExtTools { + + private static Logger log = LoggerFactory.getLogger(ExtTools.class.getName()); + + private static LinkedHashMap EXTENSION_INFO = new LinkedHashMap<>(); + + public ExtTools() { + + EXTENSION_INFO = _loadExtensions(); + } + + private LinkedHashMap _loadExtensions() { + + LinkedHashMap extensions = new LinkedHashMap<>(); + + String path = ExtTools.class.getProtectionDomain().getCodeSource().getLocation().getPath(); + //String extdir = path + File.separator + "resources/extensions"; + + String extdir = ExtTools.class.getClassLoader().getResource("extensions").getFile(); + + // for all folders in extdir + File extDir = new File(extdir); + File extDirList[] = extDir.listFiles(); + if (extDirList == null) { + String a = "aaaa"; + + } + if (extDirList != null) { + for(File f: extDirList) { + if(f.isDirectory()) { + // for all .py files in folder + File extFileList[] = f.listFiles(); + for(File pyf: extFileList) { + String pyfName = pyf.getName(); + String pyfPath = pyf.getAbsolutePath(); + if(pyfName.endsWith(".py")) { + // get VERSION,SECTIONS,DEF_FILE + try { + String version = null; + ArrayList sections = null; + String defsFile = null; + String line; + InputStream fis = new FileInputStream(pyfPath); + InputStreamReader isr = new InputStreamReader(fis, Charset.forName("UTF-8")); + BufferedReader br = new BufferedReader(isr); + Pattern pattern = Pattern.compile("^([^#]\\S+)\\s*=\\s*(\\S.*)$"); + while((line = br.readLine()) != null) { + line = line.replace("'","\""); + Matcher matcher = pattern.matcher(line.toString()); + if(matcher.find()) { + if(matcher.group(1).equals("VERSION")) { + version = matcher.group(2); + if(version.startsWith("'") || version.startsWith("\"")) { + version = version.substring(1,version.length()-1); + } + } + else if(matcher.group(1).equals("DEFS_FILE")) { + String fn = matcher.group(2); + if(fn.startsWith("'") || fn.startsWith("\"")) { + fn = fn.substring(1,fn.length()-1); + } + defsFile = pyf.getParent() + File.separator + fn;//matcher.group(2); + } + else if(matcher.group(1).equals("SECTIONS")) { + sections = new ArrayList<>(); + Pattern secpat = Pattern.compile("\"([^\"]+)\""); + Matcher secmat = secpat.matcher(matcher.group(2)); + while(secmat.find()) { + sections.add(secmat.group(1)); + } + } + } + } + br.close(); + + if(version != null && defsFile != null) { + LinkedHashMap ext = new LinkedHashMap<>(); + ext.put("defs_file", defsFile); + if(sections != null) { + ext.put("sections", sections); + } + extensions.put(version, ext); + } + else { + // error + } + } + catch(Exception e) { + log.error("ExtTools - _loadExtensions - {}", e.getMessage()); + // ... + } + } + } + } + } + } + return extensions; + } + + public ArrayList getVersions() { + return new ArrayList(EXTENSION_INFO.keySet()); + } + + public LinkedHashMap> getSections() { + LinkedHashMap> sections = new LinkedHashMap<>(); + for(String version: EXTENSION_INFO.keySet()) { + LinkedHashMap eiv = (LinkedHashMap)EXTENSION_INFO.get(version); + sections.put(version,(ArrayList)eiv.get("sections")); + } + return sections; + } + + public String getDefsFile(String version) { + LinkedHashMap eiv = (LinkedHashMap)EXTENSION_INFO.get(version); + return (String)eiv.get("defs_file"); + } + +} + +/*python + +from toscaparser.common.exception import ToscaExtAttributeError +from toscaparser.common.exception import ToscaExtImportError + +log = logging.getLogger("tosca.model") + +REQUIRED_ATTRIBUTES = ['VERSION', 'DEFS_FILE'] + + +class ExtTools(object): + def __init__(self): + self.EXTENSION_INFO = self._load_extensions() + + def _load_extensions(self): + '''Dynamically load all the extensions .''' + extensions = {} + + # Use the absolute path of the class path + abs_path = os.path.dirname(os.path.abspath(__file__)) + + extdirs = [e for e in os.listdir(abs_path) if + not e.startswith('tests') and + os.path.isdir(os.path.join(abs_path, e))] + + for e in extdirs: + log.info(e) + extpath = abs_path + '/' + e + # Grab all the extension files in the given path + ext_files = [f for f in os.listdir(extpath) if f.endswith('.py') + and not f.startswith('__init__')] + + # For each module, pick out the target translation class + for f in ext_files: + log.info(f) + ext_name = 'toscaparser/extensions/' + e + '/' + f.strip('.py') + ext_name = ext_name.replace('/', '.') + try: + extinfo = importlib.import_module(ext_name) + version = getattr(extinfo, 'VERSION') + defs_file = extpath + '/' + getattr(extinfo, 'DEFS_FILE') + + # Sections is an optional attribute + sections = getattr(extinfo, 'SECTIONS', ()) + + extensions[version] = {'sections': sections, + 'defs_file': defs_file} + except ImportError: + raise ToscaExtImportError(ext_name=ext_name) + except AttributeError: + attrs = ', '.join(REQUIRED_ATTRIBUTES) + raise ToscaExtAttributeError(ext_name=ext_name, + attrs=attrs) + + print 'Extensions ',extensions#GGG + return extensions + + def get_versions(self): + return self.EXTENSION_INFO.keys() + + def get_sections(self): + sections = {} + for version in self.EXTENSION_INFO.keys(): + sections[version] = self.EXTENSION_INFO[version]['sections'] + + return sections + + def get_defs_file(self, version): + versiondata = self.EXTENSION_INFO.get(version) + + if versiondata: + return versiondata.get('defs_file') + else: + return None +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java new file mode 100644 index 0000000..6dc7deb --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java @@ -0,0 +1,77 @@ +package org.openecomp.sdc.toscaparser.api.functions; + +import java.util.ArrayList; + +import org.openecomp.sdc.toscaparser.api.TopologyTemplate; +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class Concat extends Function { + // Validate the function and provide an instance of the function + + // Concatenation of values are supposed to be produced at runtime and + // therefore its the responsibility of the TOSCA engine to implement the + // evaluation of Concat functions. + + // Arguments: + + // * List of strings that needs to be concatenated + + // Example: + + // [ 'http://', + // get_attribute: [ server, public_address ], + // ':' , + // get_attribute: [ server, port ] ] + + + public Concat(TopologyTemplate ttpl,Object context,String name,ArrayList args) { + super(ttpl,context,name,args); + } + + @Override + public Object result() { + return this; + } + + @Override + void validate() { + if(args.size() < 1) { + ThreadLocalsHolder.getCollector().appendException( + "ValueError: Invalid arguments for function \"concat\". " + + "Expected at least one argument"); + } + } + +} + +/*python + +class Concat(Function): +"""Validate the function and provide an instance of the function + +Concatenation of values are supposed to be produced at runtime and +therefore its the responsibility of the TOSCA engine to implement the +evaluation of Concat functions. + +Arguments: + +* List of strings that needs to be concatenated + +Example: + + [ 'http://', + get_attribute: [ server, public_address ], + ':' , + get_attribute: [ server, port ] ] +""" + +def validate(self): + if len(self.args) < 1: + ExceptionCollector.appendException( + ValueError(_('Invalid arguments for function "{0}". Expected ' + 'at least one arguments.').format(CONCAT))) + +def result(self): + return self +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java new file mode 100644 index 0000000..102fbc0 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java @@ -0,0 +1,191 @@ +package org.openecomp.sdc.toscaparser.api.functions; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.TopologyTemplate; + +public abstract class Function { + + protected static final String GET_PROPERTY = "get_property"; + protected static final String GET_ATTRIBUTE = "get_attribute"; + protected static final String GET_INPUT = "get_input"; + protected static final String GET_OPERATION_OUTPUT = "get_operation_output"; + protected static final String CONCAT = "concat"; + protected static final String TOKEN = "token"; + + protected static final String SELF = "SELF"; + protected static final String HOST = "HOST"; + protected static final String TARGET = "TARGET"; + protected static final String SOURCE = "SOURCE"; + + protected static final String HOSTED_ON = "tosca.relationships.HostedOn"; + + protected static HashMap functionMappings = _getFunctionMappings(); + + private static HashMap _getFunctionMappings() { + HashMap map = new HashMap<>(); + map.put(GET_PROPERTY,"GetProperty"); + map.put(GET_INPUT, "GetInput"); + map.put(GET_ATTRIBUTE, "GetAttribute"); + map.put(GET_OPERATION_OUTPUT, "GetOperationOutput"); + map.put(CONCAT, "Concat"); + map.put(TOKEN, "Token"); + return map; + } + + protected TopologyTemplate toscaTpl; + protected Object context; + protected String name; + protected ArrayList args; + + + public Function(TopologyTemplate _toscaTpl,Object _context,String _name,ArrayList _args) { + toscaTpl = _toscaTpl; + context = _context; + name = _name; + args = _args; + validate(); + + } + + abstract Object result(); + + abstract void validate(); + + @SuppressWarnings("unchecked") + public static boolean isFunction(Object funcObj) { + // Returns True if the provided function is a Tosca intrinsic function. + // + //Examples: + // + //* "{ get_property: { SELF, port } }" + //* "{ get_input: db_name }" + //* Function instance + + //:param function: Function as string or a Function instance. + //:return: True if function is a Tosca intrinsic function, otherwise False. + // + + if(funcObj instanceof LinkedHashMap) { + LinkedHashMap function = (LinkedHashMap)funcObj; + if(function.size() == 1) { + String funcName = (new ArrayList(function.keySet())).get(0); + return functionMappings.keySet().contains(funcName); + } + } + return (funcObj instanceof Function); + } + + @SuppressWarnings("unchecked") + public static Object getFunction(TopologyTemplate ttpl,Object context,Object rawFunctionObj) { + // Gets a Function instance representing the provided template function. + + // If the format provided raw_function format is not relevant for template + // functions or if the function name doesn't exist in function mapping the + // method returns the provided raw_function. + // + // :param tosca_tpl: The tosca template. + // :param node_template: The node template the function is specified for. + // :param raw_function: The raw function as dict. + // :return: Template function as Function instance or the raw_function if + // parsing was unsuccessful. + + if(isFunction(rawFunctionObj)) { + if(rawFunctionObj instanceof LinkedHashMap) { + LinkedHashMap rawFunction = (LinkedHashMap)rawFunctionObj; + String funcName = (new ArrayList(rawFunction.keySet())).get(0); + if(functionMappings.keySet().contains(funcName)) { + String funcType = functionMappings.get(funcName); + Object oargs = (new ArrayList(rawFunction.values())).get(0); + ArrayList funcArgs; + if(oargs instanceof ArrayList) { + funcArgs = (ArrayList)oargs; + } + else { + funcArgs = new ArrayList<>(); + funcArgs.add(oargs); + } + + if(funcType.equals("GetInput")) { + return new GetInput(ttpl,context,funcName,funcArgs); + } + else if(funcType.equals("GetAttribute")) { + return new GetAttribute(ttpl,context,funcName,funcArgs); + } + else if(funcType.equals("GetProperty")) { + return new GetProperty(ttpl,context,funcName,funcArgs); + } + else if(funcType.equals("GetOperationOutput")) { + return new GetOperationOutput(ttpl,context,funcName,funcArgs); + } + else if(funcType.equals("Concat")) { + return new Concat(ttpl,context,funcName,funcArgs); + } + else if(funcType.equals("Token")) { + return new Token(ttpl,context,funcName,funcArgs); + } + } + } + } + return rawFunctionObj; + } +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import UnknownInputError +from toscaparser.dataentity import DataEntity +from toscaparser.elements.constraints import Schema +from toscaparser.elements.datatype import DataType +from toscaparser.elements.entity_type import EntityType +from toscaparser.elements.relationshiptype import RelationshipType +from toscaparser.elements.statefulentitytype import StatefulEntityType +from toscaparser.utils.gettextutils import _ + + +GET_PROPERTY = 'get_property' +GET_ATTRIBUTE = 'get_attribute' +GET_INPUT = 'get_input' +GET_OPERATION_OUTPUT = 'get_operation_output' +CONCAT = 'concat' +TOKEN = 'token' + +SELF = 'SELF' +HOST = 'HOST' +TARGET = 'TARGET' +SOURCE = 'SOURCE' + +HOSTED_ON = 'tosca.relationships.HostedOn' + + +@six.add_metaclass(abc.ABCMeta) +class Function(object): + """An abstract type for representing a Tosca template function.""" + + def __init__(self, tosca_tpl, context, name, args): + self.tosca_tpl = tosca_tpl + self.context = context + self.name = name + self.args = args + self.validate() + + @abc.abstractmethod + def result(self): + """Invokes the function and returns its result + + Some methods invocation may only be relevant on runtime (for example, + getting runtime properties) and therefore its the responsibility of + the orchestrator/translator to take care of such functions invocation. + + :return: Function invocation result. + """ + return {self.name: self.args} + + @abc.abstractmethod + def validate(self): + """Validates function arguments.""" + pass +*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java new file mode 100644 index 0000000..549073b --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java @@ -0,0 +1,535 @@ +package org.openecomp.sdc.toscaparser.api.functions; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.*; +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.AttributeDef; +import org.openecomp.sdc.toscaparser.api.elements.CapabilityTypeDef; +import org.openecomp.sdc.toscaparser.api.elements.DataType; +import org.openecomp.sdc.toscaparser.api.elements.EntityType; +import org.openecomp.sdc.toscaparser.api.elements.NodeType; +import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; +import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; +import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType; +import org.openecomp.sdc.toscaparser.api.elements.constraints.Schema; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class GetAttribute extends Function { + // Get an attribute value of an entity defined in the service template + + // Node template attributes values are set in runtime and therefore its the + // responsibility of the Tosca engine to implement the evaluation of + // get_attribute functions. + + // Arguments: + + // * Node template name | HOST. + // * Attribute name. + + // If the HOST keyword is passed as the node template name argument the + // function will search each node template along the HostedOn relationship + // chain until a node which contains the attribute is found. + + // Examples: + + // * { get_attribute: [ server, private_address ] } + // * { get_attribute: [ HOST, private_address ] } + // * { get_attribute: [ HOST, private_address, 0 ] } + // * { get_attribute: [ HOST, private_address, 0, some_prop] } + + public GetAttribute(TopologyTemplate ttpl,Object context,String name,ArrayList args) { + super(ttpl,context,name,args); + } + + @Override + void validate() { + if(args.size() < 2) { + ThreadLocalsHolder.getCollector().appendException( + "ValueError: Illegal arguments for function \"get_attribute\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\""); + return; + } + else if(args.size() == 2) { + _findNodeTemplateContainingAttribute(); + } + else { + NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); + if(nodeTpl == null) { + return; + } + int index = 2; + AttributeDef attr = nodeTpl.getTypeDefinition().getAttributeDefValue((String)args.get(1)); + if(attr != null) { + // found + } + else { + index = 3; + // then check the req or caps + attr = _findReqOrCapAttribute((String)args.get(1),(String)args.get(2)); + if(attr == null) { + return; + } + } + + String valueType = (String)attr.getSchema().get("type"); + if(args.size() > index) { + for(Object elem: args.subList(index,args.size())) { + if(valueType.equals("list")) { + if(!(elem instanceof Integer)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: Illegal arguments for function \"get_attribute\" \"%s\". Expected positive integer argument", + elem.toString())); + } + Object ob = attr.getSchema().get("entry_schema"); + valueType = (String) + ((LinkedHashMap)ob).get("type"); + } + else if(valueType.equals("map")) { + Object ob = attr.getSchema().get("entry_schema"); + valueType = (String) + ((LinkedHashMap)ob).get("type"); + } + else { + boolean bFound = false; + for(String p: Schema.PROPERTY_TYPES) { + if(p.equals(valueType)) { + bFound = true; + break; + } + } + if(bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: 'Illegal arguments for function \"get_attribute\". Unexpected attribute/index value \"%d\"", + elem)); + return; + } + else { // It is a complex type + DataType dataType = new DataType(valueType,null); + LinkedHashMap props = + dataType.getAllProperties(); + PropertyDef prop = props.get((String)elem); + if(prop != null) { + valueType = (String)prop.getSchema().get("type"); + } + else { + ThreadLocalsHolder.getCollector().appendException(String.format( + "KeyError: Illegal arguments for function \"get_attribute\". Attribute name \"%s\" not found in \"%\"", + elem,valueType)); + } + } + } + } + } + } + } + + @Override + public Object result() { + return this; + } + + private NodeTemplate getReferencedNodeTemplate() { + // Gets the NodeTemplate instance the get_attribute function refers to + + // If HOST keyword was used as the node template argument, the node + // template which contains the attribute along the HostedOn relationship + // chain will be returned. + + return _findNodeTemplateContainingAttribute(); + + } + + // Attributes can be explicitly created as part of the type definition + // or a property name can be implicitly used as an attribute name + private NodeTemplate _findNodeTemplateContainingAttribute() { + NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); + if(nodeTpl != null && + !_attributeExistsInType(nodeTpl.getTypeDefinition()) && + !nodeTpl.getProperties().keySet().contains(getAttributeName())) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "KeyError: Attribute \"%s\" was not found in node template \"%s\"", + getAttributeName(),nodeTpl.getName())); + } + return nodeTpl; + } + + private boolean _attributeExistsInType(StatefulEntityType typeDefinition) { + LinkedHashMap attrsDef = typeDefinition.getAttributesDef(); + return attrsDef.get(getAttributeName()) != null; + } + + private NodeTemplate _findHostContainingAttribute(String nodeTemplateName) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); + if(nodeTemplate != null) { + LinkedHashMap hostedOnRel = + (LinkedHashMap)EntityType.TOSCA_DEF.get(HOSTED_ON); + for(Object ro: nodeTemplate.getRequirements()) { + if(ro != null && ro instanceof LinkedHashMap) { + LinkedHashMap r = (LinkedHashMap)ro; + for(String requirement: r.keySet()) { + String targetName = (String)r.get(requirement); + NodeTemplate targetNode = _findNodeTemplate(targetName); + NodeType targetType = (NodeType)targetNode.getTypeDefinition(); + for(CapabilityTypeDef capability: targetType.getCapabilitiesObjects()) { +// if(((ArrayList)hostedOnRel.get("valid_target_types")).contains(capability.getType())) { + if(capability.inheritsFrom((ArrayList)hostedOnRel.get("valid_target_types"))) { + if(_attributeExistsInType(targetType)) { + return targetNode; + } + return _findHostContainingAttribute(targetName); + } + } + } + } + } + } + return null; + } + + + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { + if(nodeTemplateName.equals(HOST)) { + // Currently this is the only way to tell whether the function + // is used within the outputs section of the TOSCA template. + if(context instanceof ArrayList) { + ThreadLocalsHolder.getCollector().appendException( + "ValueError: \"get_attribute: [ HOST, ... ]\" is not allowed in \"outputs\" section of the TOSCA template"); + return null; + } + NodeTemplate nodeTpl = _findHostContainingAttribute(SELF); + if(nodeTpl == null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"get_attribute: [ HOST, ... ]\" was used in " + + "node template \"%s\" but \"%s\" was not found in " + + "the relationship chain",((NodeTemplate)context).getName(),HOSTED_ON)); + return null; + } + return nodeTpl; + } + if(nodeTemplateName.equals(TARGET)) { + if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendException( + "KeyError: \"TARGET\" keyword can only be used in context " + + " to \"Relationships\" target node"); + return null; + } + return ((RelationshipTemplate)context).getTarget(); + } + if(nodeTemplateName.equals(SOURCE)) { + if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendException( + "KeyError: \"SOURCE\" keyword can only be used in context " + + " to \"Relationships\" source node"); + return null; + } + return ((RelationshipTemplate)context).getTarget(); + } + String name; + if(nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { + name = ((NodeTemplate)context).getName(); + } + else { + name = nodeTemplateName; + } + for(NodeTemplate nt: toscaTpl.getNodeTemplates()) { + if(nt.getName().equals(name)) { + return nt; + } + } + ThreadLocalsHolder.getCollector().appendException(String.format( + "KeyError: Node template \"%s\" was not found",nodeTemplateName)); + return null; + } + + public AttributeDef _findReqOrCapAttribute(String reqOrCap,String attrName) { + + NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); + // Find attribute in node template's requirements + for(Object ro: nodeTpl.getRequirements()) { + if(ro != null && ro instanceof LinkedHashMap) { + LinkedHashMap r = (LinkedHashMap)ro; + for(String req: r.keySet()) { + String nodeName = (String)r.get(req); + if(req.equals(reqOrCap)) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); + return _getCapabilityAttribute(nodeTemplate,req,attrName); + } + } + } + } + // If requirement was not found, look in node template's capabilities + return _getCapabilityAttribute(nodeTpl,reqOrCap,attrName); + } + + private AttributeDef _getCapabilityAttribute(NodeTemplate nodeTemplate, + String capabilityName, + String attrName) { + // Gets a node template capability attribute + LinkedHashMap caps = nodeTemplate.getCapabilities(); + if(caps != null && caps.keySet().contains(capabilityName)) { + Capability cap = caps.get(capabilityName); + AttributeDef attribute = null; + LinkedHashMap attrs = + cap.getDefinition().getAttributesDef(); + if(attrs != null && attrs.keySet().contains(attrName)) { + attribute = attrs.get(attrName); + } + if(attribute == null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "KeyError: Attribute \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + attrName,capabilityName,nodeTemplate.getName(),((NodeTemplate)context).getName())); + } + return attribute; + } + String msg = String.format( + "Requirement/Capability \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", + capabilityName,((NodeTemplate)context).getName(),nodeTemplate.getName()); + ThreadLocalsHolder.getCollector().appendException("KeyError: " + msg); + return null; + } + + String getNodeTemplateName() { + return (String)args.get(0); + } + + String getAttributeName() { + return (String)args.get(1); + } + +} + +/*python + +class GetAttribute(Function): +"""Get an attribute value of an entity defined in the service template + +Node template attributes values are set in runtime and therefore its the +responsibility of the Tosca engine to implement the evaluation of +get_attribute functions. + +Arguments: + +* Node template name | HOST. +* Attribute name. + +If the HOST keyword is passed as the node template name argument the +function will search each node template along the HostedOn relationship +chain until a node which contains the attribute is found. + +Examples: + +* { get_attribute: [ server, private_address ] } +* { get_attribute: [ HOST, private_address ] } +* { get_attribute: [ HOST, private_address, 0 ] } +* { get_attribute: [ HOST, private_address, 0, some_prop] } +""" + +def validate(self): + if len(self.args) < 2: + ExceptionCollector.appendException( + ValueError(_('Illegal arguments for function "{0}". Expected ' + 'arguments: "node-template-name", "req-or-cap"' + '(optional), "property name"' + ).format(GET_ATTRIBUTE))) + return + elif len(self.args) == 2: + self._find_node_template_containing_attribute() + else: + node_tpl = self._find_node_template(self.args[0]) + if node_tpl is None: + return + index = 2 + attrs = node_tpl.type_definition.get_attributes_def() + found = [attrs[self.args[1]]] if self.args[1] in attrs else [] + if found: + attr = found[0] + else: + index = 3 + # then check the req or caps + attr = self._find_req_or_cap_attribute(self.args[1], + self.args[2]) + + value_type = attr.schema['type'] + if len(self.args) > index: + for elem in self.args[index:]: + if value_type == "list": + if not isinstance(elem, int): + ExceptionCollector.appendException( + ValueError(_('Illegal arguments for function' + ' "{0}". "{1}" Expected positive' + ' integer argument' + ).format(GET_ATTRIBUTE, elem))) + value_type = attr.schema['entry_schema']['type'] + elif value_type == "map": + value_type = attr.schema['entry_schema']['type'] + elif value_type in Schema.PROPERTY_TYPES: + ExceptionCollector.appendException( + ValueError(_('Illegal arguments for function' + ' "{0}". Unexpected attribute/' + 'index value "{1}"' + ).format(GET_ATTRIBUTE, elem))) + return + else: # It is a complex type + data_type = DataType(value_type) + props = data_type.get_all_properties() + found = [props[elem]] if elem in props else [] + if found: + prop = found[0] + value_type = prop.schema['type'] + else: + ExceptionCollector.appendException( + KeyError(_('Illegal arguments for function' + ' "{0}". Attribute name "{1}" not' + ' found in "{2}"' + ).format(GET_ATTRIBUTE, + elem, + value_type))) + +def result(self): + return self + +def get_referenced_node_template(self): + """Gets the NodeTemplate instance the get_attribute function refers to. + + If HOST keyword was used as the node template argument, the node + template which contains the attribute along the HostedOn relationship + chain will be returned. + """ + return self._find_node_template_containing_attribute() + +# Attributes can be explicitly created as part of the type definition +# or a property name can be implicitly used as an attribute name +def _find_node_template_containing_attribute(self): + node_tpl = self._find_node_template(self.args[0]) + if node_tpl and \ + not self._attribute_exists_in_type(node_tpl.type_definition) \ + and self.attribute_name not in node_tpl.get_properties(): + ExceptionCollector.appendException( + KeyError(_('Attribute "%(att)s" was not found in node ' + 'template "%(ntpl)s".') % + {'att': self.attribute_name, + 'ntpl': node_tpl.name})) + return node_tpl + +def _attribute_exists_in_type(self, type_definition): + attrs_def = type_definition.get_attributes_def() + found = [attrs_def[self.attribute_name]] \ + if self.attribute_name in attrs_def else [] + return len(found) == 1 + +def _find_host_containing_attribute(self, node_template_name=SELF): + node_template = self._find_node_template(node_template_name) + if node_template: + hosted_on_rel = EntityType.TOSCA_DEF[HOSTED_ON] + for r in node_template.requirements: + for requirement, target_name in r.items(): + target_node = self._find_node_template(target_name) + target_type = target_node.type_definition + for capability in target_type.get_capabilities_objects(): + if capability.type in \ + hosted_on_rel['valid_target_types']: + if self._attribute_exists_in_type(target_type): + return target_node + return self._find_host_containing_attribute( + target_name) + +def _find_node_template(self, node_template_name): + if node_template_name == HOST: + # Currently this is the only way to tell whether the function + # is used within the outputs section of the TOSCA template. + if isinstance(self.context, list): + ExceptionCollector.appendException( + ValueError(_( + '"get_attribute: [ HOST, ... ]" is not allowed in ' + '"outputs" section of the TOSCA template.'))) + return + node_tpl = self._find_host_containing_attribute() + if not node_tpl: + ExceptionCollector.appendException( + ValueError(_( + '"get_attribute: [ HOST, ... ]" was used in node ' + 'template "{0}" but "{1}" was not found in ' + 'the relationship chain.').format(self.context.name, + HOSTED_ON))) + return + return node_tpl + if node_template_name == TARGET: + if not isinstance(self.context.type_definition, RelationshipType): + ExceptionCollector.appendException( + KeyError(_('"TARGET" keyword can only be used in context' + ' to "Relationships" target node'))) + return + return self.context.target + if node_template_name == SOURCE: + if not isinstance(self.context.type_definition, RelationshipType): + ExceptionCollector.appendException( + KeyError(_('"SOURCE" keyword can only be used in context' + ' to "Relationships" source node'))) + return + return self.context.source + name = self.context.name \ + if node_template_name == SELF and \ + not isinstance(self.context, list) \ + else node_template_name + for node_template in self.tosca_tpl.nodetemplates: + if node_template.name == name: + return node_template + ExceptionCollector.appendException( + KeyError(_( + 'Node template "{0}" was not found.' + ).format(node_template_name))) + +def _find_req_or_cap_attribute(self, req_or_cap, attr_name): + node_tpl = self._find_node_template(self.args[0]) + # Find attribute in node template's requirements + for r in node_tpl.requirements: + for req, node_name in r.items(): + if req == req_or_cap: + node_template = self._find_node_template(node_name) + return self._get_capability_attribute( + node_template, + req, + attr_name) + # If requirement was not found, look in node template's capabilities + return self._get_capability_attribute(node_tpl, + req_or_cap, + attr_name) + +def _get_capability_attribute(self, + node_template, + capability_name, + attr_name): + """Gets a node template capability attribute.""" + caps = node_template.get_capabilities() + if caps and capability_name in caps.keys(): + cap = caps[capability_name] + attribute = None + attrs = cap.definition.get_attributes_def() + if attrs and attr_name in attrs.keys(): + attribute = attrs[attr_name] + if not attribute: + ExceptionCollector.appendException( + KeyError(_('Attribute "%(attr)s" was not found in ' + 'capability "%(cap)s" of node template ' + '"%(ntpl1)s" referenced from node template ' + '"%(ntpl2)s".') % {'attr': attr_name, + 'cap': capability_name, + 'ntpl1': node_template.name, + 'ntpl2': self.context.name})) + return attribute + msg = _('Requirement/Capability "{0}" referenced from node template ' + '"{1}" was not found in node template "{2}".').format( + capability_name, + self.context.name, + node_template.name) + ExceptionCollector.appendException(KeyError(msg)) + +@property +def node_template_name(self): + return self.args[0] + +@property +def attribute_name(self): + return self.args[1] +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java new file mode 100644 index 0000000..4332f70 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java @@ -0,0 +1,110 @@ +package org.openecomp.sdc.toscaparser.api.functions; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.DataEntity; +import org.openecomp.sdc.toscaparser.api.TopologyTemplate; +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.parameters.Input; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class GetInput extends Function { + + public GetInput(TopologyTemplate toscaTpl,Object context,String name,ArrayList _args) { + super(toscaTpl,context,name,_args); + + } + + @Override + void validate() { + if(args.size() != 1) { + //PA - changed to WARNING from CRITICAL after talking to Renana, 22/05/2017 + ThreadLocalsHolder.getCollector().appendWarning(String.format( + "ValueError: Expected one argument for function \"get_input\" but received \"%s\"", + args.toString())); + } + boolean bFound = false; + for(Input inp: toscaTpl.getInputs()) { + if(inp.getName().equals(args.get(0))) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownInputError: Unknown input \"%s\"",args.get(0))); + } + } + + public Object result() { + if(toscaTpl.getParsedParams() != null && + toscaTpl.getParsedParams().get(getInputName()) != null) { + LinkedHashMap ttinp = (LinkedHashMap)toscaTpl.getTpl().get("inputs"); + LinkedHashMap ttinpinp = (LinkedHashMap)ttinp.get(getInputName()); + String type = (String)ttinpinp.get("type"); + + return DataEntity.validateDatatype( + type, toscaTpl.getParsedParams().get(getInputName()),null,null,null); + } + + Input inputDef = null; + for(Input inpDef: toscaTpl.getInputs()) { + if(getInputName().equals(inpDef.getName())) { + inputDef = inpDef; + break; + } + } + if(inputDef != null) { + return inputDef.getDefault(); + } + return null; + } + + public String getInputName() { + return (String)args.get(0); + } + +} + +/*python + +class GetInput(Function): +"""Get a property value declared within the input of the service template. + +Arguments: + +* Input name. + +Example: + +* get_input: port +""" + +def validate(self): + if len(self.args) != 1: + ExceptionCollector.appendException( + ValueError(_( + 'Expected one argument for function "get_input" but ' + 'received "%s".') % self.args)) + inputs = [input.name for input in self.tosca_tpl.inputs] + if self.args[0] not in inputs: + ExceptionCollector.appendException( + UnknownInputError(input_name=self.args[0])) + +def result(self): + if self.tosca_tpl.parsed_params and \ + self.input_name in self.tosca_tpl.parsed_params: + return DataEntity.validate_datatype( + self.tosca_tpl.tpl['inputs'][self.input_name]['type'], + self.tosca_tpl.parsed_params[self.input_name]) + + input = [input_def for input_def in self.tosca_tpl.inputs + if self.input_name == input_def.name][0] + return input.default + +@property +def input_name(self): + return self.args[0] + +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java new file mode 100644 index 0000000..22f2cd7 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java @@ -0,0 +1,225 @@ +package org.openecomp.sdc.toscaparser.api.functions; + +import java.util.ArrayList; + +import org.openecomp.sdc.toscaparser.api.*; +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.InterfacesDef; +import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; +import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class GetOperationOutput extends Function { + + public GetOperationOutput(TopologyTemplate ttpl,Object context,String name,ArrayList args) { + super(ttpl,context,name,args); + } + + @Override + public void validate() { + if(args.size() == 4) { + _findNodeTemplate((String)args.get(0)); + String interfaceName = _findInterfaceName((String)args.get(1)); + _findOperationName(interfaceName,(String)args.get(2)); + } + else { + ThreadLocalsHolder.getCollector().appendException( + "ValueError: Illegal arguments for function \"get_operation_output\". " + + "Expected arguments: \"template_name\",\"interface_name\"," + + "\"operation_name\",\"output_variable_name\""); + } + } + + private String _findInterfaceName(String _interfaceName) { + boolean bFound = false; + for(String sect: InterfacesDef.SECTIONS) { + if(sect.equals(_interfaceName)) { + bFound = true; + break; + } + } + if(bFound) { + return _interfaceName; + } + else { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: invalid interface name \"%s\" in \"get_operation_output\"", + _interfaceName)); + return null; + } + } + + private String _findOperationName(String interfaceName,String operationName) { + + if(interfaceName.equals("Configure") || + interfaceName.equals("tosca.interfaces.node.relationship.Configure")) { + boolean bFound = false; + for(String sect: StatefulEntityType.interfacesRelationshipConfigureOperations) { + if(sect.equals(operationName)) { + bFound = true; + break; + } + } + if(bFound) { + return operationName; + } + else { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", + operationName)); + return null; + } + } + if(interfaceName.equals("Standard") || + interfaceName.equals("tosca.interfaces.node.lifecycle.Standard")) { + boolean bFound = false; + for(String sect: StatefulEntityType.interfacesNodeLifecycleOperations) { + if(sect.equals(operationName)) { + bFound = true; + break; + } + } + if(bFound) { + return operationName; + } + else { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", + operationName)); + return null; + } + } + else { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: Invalid interface name \"%s\" in \"get_operation_output\"", + interfaceName)); + return null; + } + } + + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { + if(nodeTemplateName.equals(TARGET)) { + if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendException( + "KeyError: \"TARGET\" keyword can only be used in context " + + " to \"Relationships\" target node"); + return null; + } + return ((RelationshipTemplate)context).getTarget(); + } + if(nodeTemplateName.equals(SOURCE)) { + if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendException( + "KeyError: \"SOURCE\" keyword can only be used in context " + + " to \"Relationships\" source node"); + return null; + } + return ((RelationshipTemplate)context).getTarget(); + } + String name; + if(nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { + name = ((NodeTemplate)context).getName(); + } + else { + name = nodeTemplateName; + } + for(NodeTemplate nt: toscaTpl.getNodeTemplates()) { + if(nodeTemplateName.equals(name)) { + return nt; + } + } + ThreadLocalsHolder.getCollector().appendException(String.format( + "KeyError: Node template \"%s\" was not found",nodeTemplateName)); + return null; + } + + @Override + public Object result() { + return this; + } + +} + +/*python + +class GetOperationOutput(Function): +def validate(self): + if len(self.args) == 4: + self._find_node_template(self.args[0]) + interface_name = self._find_interface_name(self.args[1]) + self._find_operation_name(interface_name, self.args[2]) + else: + ExceptionCollector.appendException( + ValueError(_('Illegal arguments for function "{0}". Expected ' + 'arguments: "template_name","interface_name",' + '"operation_name","output_variable_name"' + ).format(GET_OPERATION_OUTPUT))) + return + +def _find_interface_name(self, interface_name): + if interface_name in toscaparser.elements.interfaces.SECTIONS: + return interface_name + else: + ExceptionCollector.appendException( + ValueError(_('Enter a valid interface name' + ).format(GET_OPERATION_OUTPUT))) + return + +def _find_operation_name(self, interface_name, operation_name): + if(interface_name == 'Configure' or + interface_name == 'tosca.interfaces.node.relationship.Configure'): + if(operation_name in + StatefulEntityType. + interfaces_relationship_configure_operations): + return operation_name + else: + ExceptionCollector.appendException( + ValueError(_('Enter an operation of Configure interface' + ).format(GET_OPERATION_OUTPUT))) + return + elif(interface_name == 'Standard' or + interface_name == 'tosca.interfaces.node.lifecycle.Standard'): + if(operation_name in + StatefulEntityType.interfaces_node_lifecycle_operations): + return operation_name + else: + ExceptionCollector.appendException( + ValueError(_('Enter an operation of Standard interface' + ).format(GET_OPERATION_OUTPUT))) + return + else: + ExceptionCollector.appendException( + ValueError(_('Enter a valid operation name' + ).format(GET_OPERATION_OUTPUT))) + return + +def _find_node_template(self, node_template_name): + if node_template_name == TARGET: + if not isinstance(self.context.type_definition, RelationshipType): + ExceptionCollector.appendException( + KeyError(_('"TARGET" keyword can only be used in context' + ' to "Relationships" target node'))) + return + return self.context.target + if node_template_name == SOURCE: + if not isinstance(self.context.type_definition, RelationshipType): + ExceptionCollector.appendException( + KeyError(_('"SOURCE" keyword can only be used in context' + ' to "Relationships" source node'))) + return + return self.context.source + name = self.context.name \ + if node_template_name == SELF and \ + not isinstance(self.context, list) \ + else node_template_name + for node_template in self.tosca_tpl.nodetemplates: + if node_template.name == name: + return node_template + ExceptionCollector.appendException( + KeyError(_( + 'Node template "{0}" was not found.' + ).format(node_template_name))) + +def result(self): + return self +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java new file mode 100644 index 0000000..3550542 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java @@ -0,0 +1,636 @@ +package org.openecomp.sdc.toscaparser.api.functions; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.*; +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.CapabilityTypeDef; +import org.openecomp.sdc.toscaparser.api.elements.EntityType; +import org.openecomp.sdc.toscaparser.api.elements.NodeType; +import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; +import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; +import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class GetProperty extends Function { + // Get a property value of an entity defined in the same service template + + // Arguments: + + // * Node template name | SELF | HOST | SOURCE | TARGET. + // * Requirement or capability name (optional). + // * Property name. + + // If requirement or capability name is specified, the behavior is as follows: + // The req or cap name is first looked up in the specified node template's + // requirements. + // If found, it would search for a matching capability + // of an other node template and get its property as specified in function + // arguments. + // Otherwise, the req or cap name would be looked up in the specified + // node template's capabilities and if found, it would return the property of + // the capability as specified in function arguments. + + // Examples: + + // * { get_property: [ mysql_server, port ] } + // * { get_property: [ SELF, db_port ] } + // * { get_property: [ SELF, database_endpoint, port ] } + // * { get_property: [ SELF, database_endpoint, port, 1 ] } + + + public GetProperty(TopologyTemplate ttpl,Object context,String name,ArrayList args) { + super(ttpl,context,name,args); + } + + @Override + void validate() { + if(args.size() < 2) { + ThreadLocalsHolder.getCollector().appendException( + "ValueError: Illegal arguments for function \"get_property\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\""); + return; + } + if(args.size() == 2) { + Property foundProp = _findProperty((String)args.get(1)); + if(foundProp == null) { + return; + } + Object prop = foundProp.getValue(); + if(prop instanceof Function) { + Function.getFunction(toscaTpl,context, prop); + } + } + else if(args.size() >= 3) { + // do not use _find_property to avoid raise KeyError + // if the prop is not found + // First check if there is property with this name + NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); + LinkedHashMap props; + if(nodeTpl != null) { + props = nodeTpl.getProperties(); + } + else { + props = new LinkedHashMap<>(); + } + int index = 2; + Object propertyValue; + if(props.get(args.get(1)) != null) { + propertyValue = ((Property)props.get(args.get(1))).getValue(); + } + else { + index = 3; + // then check the req or caps + propertyValue = _findReqOrCapProperty((String)args.get(1),(String)args.get(2)); + } + + if(args.size() > index) { + for(Object elem: args.subList(index,args.size()-1)) { + if(propertyValue instanceof ArrayList) { + int intElem = (int)elem; + propertyValue = _getIndexValue(propertyValue,intElem); + } + else { + propertyValue = _getAttributeValue(propertyValue,(String)elem); + } + } + } + } + } + + @SuppressWarnings("unchecked") + private Object _findReqOrCapProperty(String reqOrCap,String propertyName) { + NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); + if(nodeTpl == null) { + return null; + } + // look for property in node template's requirements + for(Object r: nodeTpl.getRequirements()) { + if(r instanceof LinkedHashMap) { + LinkedHashMap rlist = (LinkedHashMap)r; + for(String req: rlist.keySet()) { + String nodeName = (String)rlist.get(req); + if(req.equals(reqOrCap)) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); + return _getCapabilityProperty(nodeTemplate,req,propertyName,true); + } + } + } + } + // If requirement was not found, look in node template's capabilities + return _getCapabilityProperty(nodeTpl,reqOrCap,propertyName,true); + } + + private Object _getCapabilityProperty(NodeTemplate nodeTemplate, + String capabilityName, + String propertyName, + boolean throwErrors) { + + // Gets a node template capability property + Object property = null; + LinkedHashMap caps = nodeTemplate.getCapabilities(); + if(caps != null && caps.get(capabilityName) != null) { + Capability cap = caps.get(capabilityName); + LinkedHashMap props = cap.getProperties(); + if(props != null && props.get(propertyName) != null) { + property = ((Property)props.get(propertyName)).getValue(); + } + if(property == null && throwErrors) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + propertyName,capabilityName,nodeTemplate.getName(),((NodeTemplate)context).getName())); + } + return property; + } + if(throwErrors) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "KeyError: Requirement/Capability \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", + capabilityName,((NodeTemplate)context).getName(),nodeTemplate.getName())); + } + + return null; + } + + private Property _findProperty(String propertyName) { + NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); + if(nodeTpl == null) { + return null; + } + LinkedHashMap props = nodeTpl.getProperties(); + Property found = props.get(propertyName); + if(found == null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "KeyError: Property \"%s\" was not found in node template \"%s\"", + propertyName,nodeTpl.getName())); + } + return found; + } + + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { + if(nodeTemplateName.equals(SELF)) { + return (NodeTemplate)context; + } + // enable the HOST value in the function + if(nodeTemplateName.equals(HOST)) { + NodeTemplate node = _findHostContainingProperty(null); + if(node == null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + (String)args.get(2),(String)args.get(1),((NodeTemplate)context).getName())); + return null; + } + return node; + } + if(nodeTemplateName.equals(TARGET)) { + if(!(((RelationshipTemplate)context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendException( + "KeyError: \"TARGET\" keyword can only be used in context to \"Relationships\" target node"); + return null; + } + return ((RelationshipTemplate)context).getTarget(); + } + if(nodeTemplateName.equals(SOURCE)) { + if(!(((RelationshipTemplate)context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendException( + "KeyError: \"SOURCE\" keyword can only be used in context to \"Relationships\" target node"); + return null; + } + return ((RelationshipTemplate)context).getSource(); + } + if(toscaTpl.getNodeTemplates() == null) { + return null; + } + for(NodeTemplate nodeTemplate: toscaTpl.getNodeTemplates()) { + if(nodeTemplate.getName().equals(nodeTemplateName)) { + return nodeTemplate; + } + } + ThreadLocalsHolder.getCollector().appendException(String.format( + "KeyError: Node template \"%s\" was not found. Referenced from Node Template \"%s\"", + nodeTemplateName,((NodeTemplate)context).getName())); + + return null; + } + + @SuppressWarnings("rawtypes") + private Object _getIndexValue(Object value,int index) { + if(value instanceof ArrayList) { + if(index < ((ArrayList)value).size()) { + return ((ArrayList)value).get(index); + } + else { + ThreadLocalsHolder.getCollector().appendException(String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an element with index %d", + args.get(2),args.get(1),((NodeTemplate)context).getName(),index)); + + } + } + else { + ThreadLocalsHolder.getCollector().appendException(String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a list", + args.get(2),args.get(1),((NodeTemplate)context).getName())); + } + return null; + } + + @SuppressWarnings("unchecked") + private Object _getAttributeValue(Object value,String attribute) { + if(value instanceof LinkedHashMap) { + Object ov = ((LinkedHashMap)value).get(attribute); + if(ov != null) { + return ov; + } + else { + ThreadLocalsHolder.getCollector().appendException(String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an attribute named \"%s\"", + args.get(2),args.get(1),((NodeTemplate)context).getName(),attribute)); + } + } + else { + ThreadLocalsHolder.getCollector().appendException(String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a dict", + args.get(2),args.get(1),((NodeTemplate)context).getName())); + } + return null; + } + + // Add this functions similar to get_attribute case + private NodeTemplate _findHostContainingProperty(String nodeTemplateName) { + if(nodeTemplateName == null) { + nodeTemplateName = SELF; + } + NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); + LinkedHashMap hostedOnRel = (LinkedHashMap) + EntityType.TOSCA_DEF.get(HOSTED_ON); + for(Object r: nodeTemplate.getRequirements()) { + if(r instanceof LinkedHashMap) { + LinkedHashMap rlist = (LinkedHashMap)r; + for(String requirement: rlist.keySet()) { + String targetName = (String)rlist.get(requirement); + NodeTemplate targetNode = _findNodeTemplate(targetName); + NodeType targetType = (NodeType)targetNode.getTypeDefinition(); + for(CapabilityTypeDef capDef: targetType.getCapabilitiesObjects()) { + if(capDef.inheritsFrom((ArrayList)hostedOnRel.get("valid_target_types"))) { + if(_propertyExistsInType(targetType)) { + return targetNode; + } + // If requirement was not found, look in node + // template's capabilities + if(args.size() > 2 && + _getCapabilityProperty(targetNode,(String)args.get(1),(String)args.get(2),false) != null) { + return targetNode; + } + + return _findHostContainingProperty(targetName); + } + } + } + } + } + return null; + } + + private boolean _propertyExistsInType(StatefulEntityType typeDefinition) { + LinkedHashMap propsDef = typeDefinition.getPropertiesDef(); + return propsDef.keySet().contains((String)args.get(1)); + } + + @Override + public Object result() { + Object propertyValue; + if(args.size() >= 3) { + // First check if there is property with this name + NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); + LinkedHashMap props; + if(nodeTpl != null) { + props = nodeTpl.getProperties(); + } + else { + props = new LinkedHashMap<>(); + } + int index = 2; + if(props.get(args.get(1)) != null) { + propertyValue = ((Property)props.get(args.get(1))).getValue(); + } + else { + index = 3; + // then check the req or caps + propertyValue = _findReqOrCapProperty((String)args.get(1),(String)args.get(2)); + } + + if(args.size() > index) { + for(Object elem: args.subList(index,args.size()-1)) { + if(propertyValue instanceof ArrayList) { + int intElem = (int)elem; + propertyValue = _getIndexValue(propertyValue,intElem); + } + else { + propertyValue = _getAttributeValue(propertyValue,(String)elem); + } + } + } + } + else { + propertyValue = _findProperty((String)args.get(1)).getValue(); + } + if(propertyValue instanceof Function) { + return ((Function)propertyValue).result(); + } + return Function.getFunction(toscaTpl,context,propertyValue); + } + + public String getNodeTemplateName() { + return (String)args.get(0); + } + + public String getPropertyName() { + if(args.size() > 2) { + return (String)args.get(2); + } + return (String)args.get(1); + } + + public String getReqorCap() { + if(args.size() > 2) { + return (String)args.get(1); + } + return null; + } + +} + +/*python + +class GetProperty(Function): +"""Get a property value of an entity defined in the same service template. + +Arguments: + +* Node template name | SELF | HOST | SOURCE | TARGET. +* Requirement or capability name (optional). +* Property name. + +If requirement or capability name is specified, the behavior is as follows: +The req or cap name is first looked up in the specified node template's +requirements. +If found, it would search for a matching capability +of an other node template and get its property as specified in function +arguments. +Otherwise, the req or cap name would be looked up in the specified +node template's capabilities and if found, it would return the property of +the capability as specified in function arguments. + +Examples: + +* { get_property: [ mysql_server, port ] } +* { get_property: [ SELF, db_port ] } +* { get_property: [ SELF, database_endpoint, port ] } +* { get_property: [ SELF, database_endpoint, port, 1 ] } +""" + +def validate(self): + if len(self.args) < 2: + ExceptionCollector.appendException( + ValueError(_( + 'Expected arguments: "node-template-name", "req-or-cap" ' + '(optional), "property name".'))) + return + if len(self.args) == 2: + found_prop = self._find_property(self.args[1]) + if not found_prop: + return + prop = found_prop.value + if not isinstance(prop, Function): + get_function(self.tosca_tpl, self.context, prop) + elif len(self.args) >= 3: + # do not use _find_property to avoid raise KeyError + # if the prop is not found + # First check if there is property with this name + node_tpl = self._find_node_template(self.args[0]) + props = node_tpl.get_properties() if node_tpl else [] + index = 2 + found = [props[self.args[1]]] if self.args[1] in props else [] + if found: + property_value = found[0].value + else: + index = 3 + # then check the req or caps + property_value = self._find_req_or_cap_property(self.args[1], + self.args[2]) + if len(self.args) > index: + for elem in self.args[index:]: + if isinstance(property_value, list): + int_elem = int(elem) + property_value = self._get_index_value(property_value, + int_elem) + else: + property_value = self._get_attribute_value( + property_value, + elem) + +def _find_req_or_cap_property(self, req_or_cap, property_name): + node_tpl = self._find_node_template(self.args[0]) + # Find property in node template's requirements + for r in node_tpl.requirements: + for req, node_name in r.items(): + if req == req_or_cap: + node_template = self._find_node_template(node_name) + return self._get_capability_property( + node_template, + req, + property_name) + # If requirement was not found, look in node template's capabilities + return self._get_capability_property(node_tpl, + req_or_cap, + property_name) + +def _get_capability_property(self, + node_template, + capability_name, + property_name): + """Gets a node template capability property.""" + caps = node_template.get_capabilities() + if caps and capability_name in caps.keys(): + cap = caps[capability_name] + property = None + props = cap.get_properties() + if props and property_name in props.keys(): + property = props[property_name].value + if not property: + ExceptionCollector.appendException( + KeyError(_('Property "%(prop)s" was not found in ' + 'capability "%(cap)s" of node template ' + '"%(ntpl1)s" referenced from node template ' + '"%(ntpl2)s".') % {'prop': property_name, + 'cap': capability_name, + 'ntpl1': node_template.name, + 'ntpl2': self.context.name})) + return property + msg = _('Requirement/Capability "{0}" referenced from node template ' + '"{1}" was not found in node template "{2}".').format( + capability_name, + self.context.name, + node_template.name) + ExceptionCollector.appendException(KeyError(msg)) + +def _find_property(self, property_name): + node_tpl = self._find_node_template(self.args[0]) + if not node_tpl: + return + props = node_tpl.get_properties() + found = [props[property_name]] if property_name in props else [] + if len(found) == 0: + ExceptionCollector.appendException( + KeyError(_('Property "%(prop)s" was not found in node ' + 'template "%(ntpl)s".') % + {'prop': property_name, + 'ntpl': node_tpl.name})) + return None + return found[0] + +def _find_node_template(self, node_template_name): + if node_template_name == SELF: + return self.context + # enable the HOST value in the function + if node_template_name == HOST: + return self._find_host_containing_property() + if node_template_name == TARGET: + if not isinstance(self.context.type_definition, RelationshipType): + ExceptionCollector.appendException( + KeyError(_('"TARGET" keyword can only be used in context' + ' to "Relationships" target node'))) + return + return self.context.target + if node_template_name == SOURCE: + if not isinstance(self.context.type_definition, RelationshipType): + ExceptionCollector.appendException( + KeyError(_('"SOURCE" keyword can only be used in context' + ' to "Relationships" source node'))) + return + return self.context.source + if not hasattr(self.tosca_tpl, 'nodetemplates'): + return + for node_template in self.tosca_tpl.nodetemplates: + if node_template.name == node_template_name: + return node_template + ExceptionCollector.appendException( + KeyError(_( + 'Node template "{0}" was not found.' + ).format(node_template_name))) + +def _get_index_value(self, value, index): + if isinstance(value, list): + if index < len(value): + return value[index] + else: + ExceptionCollector.appendException( + KeyError(_( + "Property '{0}' found in capability '{1}'" + " referenced from node template {2}" + " must have an element with index {3}."). + format(self.args[2], + self.args[1], + self.context.name, + index))) + else: + ExceptionCollector.appendException( + KeyError(_( + "Property '{0}' found in capability '{1}'" + " referenced from node template {2}" + " must be a list.").format(self.args[2], + self.args[1], + self.context.name))) + +def _get_attribute_value(self, value, attibute): + if isinstance(value, dict): + if attibute in value: + return value[attibute] + else: + ExceptionCollector.appendException( + KeyError(_( + "Property '{0}' found in capability '{1}'" + " referenced from node template {2}" + " must have an attribute named {3}."). + format(self.args[2], + self.args[1], + self.context.name, + attibute))) + else: + ExceptionCollector.appendException( + KeyError(_( + "Property '{0}' found in capability '{1}'" + " referenced from node template {2}" + " must be a dict.").format(self.args[2], + self.args[1], + self.context.name))) + +# Add this functions similar to get_attribute case +def _find_host_containing_property(self, node_template_name=SELF): + node_template = self._find_node_template(node_template_name) + hosted_on_rel = EntityType.TOSCA_DEF[HOSTED_ON] + for r in node_template.requirements: + for requirement, target_name in r.items(): + target_node = self._find_node_template(target_name) + target_type = target_node.type_definition + for capability in target_type.get_capabilities_objects(): + if capability.type in hosted_on_rel['valid_target_types']: + if self._property_exists_in_type(target_type): + return target_node + return self._find_host_containing_property( + target_name) + return None + +def _property_exists_in_type(self, type_definition): + props_def = type_definition.get_properties_def() + found = [props_def[self.args[1]]] \ + if self.args[1] in props_def else [] + return len(found) == 1 + +def result(self): + if len(self.args) >= 3: + # First check if there is property with this name + node_tpl = self._find_node_template(self.args[0]) + props = node_tpl.get_properties() if node_tpl else [] + index = 2 + found = [props[self.args[1]]] if self.args[1] in props else [] + if found: + property_value = found[0].value + else: + index = 3 + # then check the req or caps + property_value = self._find_req_or_cap_property(self.args[1], + self.args[2]) + if len(self.args) > index: + for elem in self.args[index:]: + if isinstance(property_value, list): + int_elem = int(elem) + property_value = self._get_index_value(property_value, + int_elem) + else: + property_value = self._get_attribute_value( + property_value, + elem) + else: + property_value = self._find_property(self.args[1]).value + if isinstance(property_value, Function): + return property_value.result() + return get_function(self.tosca_tpl, + self.context, + property_value) + +@property +def node_template_name(self): + return self.args[0] + +@property +def property_name(self): + if len(self.args) > 2: + return self.args[2] + return self.args[1] + +@property +def req_or_cap(self): + if len(self.args) > 2: + return self.args[1] + return None +*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java new file mode 100644 index 0000000..4438908 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java @@ -0,0 +1,112 @@ +package org.openecomp.sdc.toscaparser.api.functions; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.NodeTemplate; +import org.openecomp.sdc.toscaparser.api.TopologyTemplate; +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class Token extends Function { + // Validate the function and provide an instance of the function + + //The token function is used within a TOSCA service template on a string to + //parse out (tokenize) substrings separated by one or more token characters + //within a larger string. + + //Arguments: + + //* The composite string that contains one or more substrings separated by + // token characters. + //* The string that contains one or more token characters that separate + // substrings within the composite string. + //* The integer indicates the index of the substring to return from the + // composite string. Note that the first substring is denoted by using + // the '0' (zero) integer value. + + //Example: + + // [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ] + + + public Token(TopologyTemplate ttpl,Object context,String name,ArrayList args) { + super(ttpl,context,name,args); + } + + @Override + public Object result() { + return this; + } + + @Override + void validate() { + if(args.size() < 3) { + ThreadLocalsHolder.getCollector().appendException( + "ValueError: Invalid arguments for function \"token\". " + + "Expected at least three arguments"); + } + else { + if(!(args.get(1) instanceof String) || + ((String)args.get(1)).length() != 1) { + ThreadLocalsHolder.getCollector().appendException( + "ValueError: Invalid arguments for function \"token\". " + + "Expected single char value as second argument"); + } + if(!(args.get(2) instanceof Integer)) { + ThreadLocalsHolder.getCollector().appendException( + "ValueError: Invalid arguments for function \"token\"" + + "Expected integer value as third argument"); + } + } + } + +} + +/*python + +class Token(Function): +"""Validate the function and provide an instance of the function + +The token function is used within a TOSCA service template on a string to +parse out (tokenize) substrings separated by one or more token characters +within a larger string. + + +Arguments: + +* The composite string that contains one or more substrings separated by + token characters. +* The string that contains one or more token characters that separate + substrings within the composite string. +* The integer indicates the index of the substring to return from the + composite string. Note that the first substring is denoted by using + the '0' (zero) integer value. + +Example: + + [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ] + +""" + +def validate(self): + if len(self.args) < 3: + ExceptionCollector.appendException( + ValueError(_('Invalid arguments for function "{0}". Expected ' + 'at least three arguments.').format(TOKEN))) + else: + if not isinstance(self.args[1], str) or len(self.args[1]) != 1: + ExceptionCollector.appendException( + ValueError(_('Invalid arguments for function "{0}". ' + 'Expected single char value as second ' + 'argument.').format(TOKEN))) + + if not isinstance(self.args[2], int): + ExceptionCollector.appendException( + ValueError(_('Invalid arguments for function "{0}". ' + 'Expected integer value as third ' + 'argument.').format(TOKEN))) + +def result(self): + return self +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java b/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java new file mode 100644 index 0000000..7b3e64f --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java @@ -0,0 +1,226 @@ +package org.openecomp.sdc.toscaparser.api.parameters; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.DataEntity; +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.EntityType; +import org.openecomp.sdc.toscaparser.api.elements.constraints.Constraint; +import org.openecomp.sdc.toscaparser.api.elements.constraints.Schema; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class Input { + + private static final String TYPE = "type"; + private static final String DESCRIPTION = "description"; + private static final String DEFAULT = "default"; + private static final String CONSTRAINTS = "constraints"; + private static final String REQUIRED = "required"; + private static final String STATUS = "status"; + private static final String ENTRY_SCHEMA = "entry_schema"; + + public static final String INTEGER = "integer"; + public static final String STRING = "string"; + public static final String BOOLEAN = "boolean"; + public static final String FLOAT = "float"; + public static final String LIST = "list"; + public static final String MAP = "map"; + public static final String JSON = "json"; + + private static String INPUTFIELD[] = { + TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED,STATUS, ENTRY_SCHEMA + }; + + private static String PRIMITIVE_TYPES[] = { + INTEGER, STRING, BOOLEAN, FLOAT, LIST, MAP, JSON + }; + + private String name; + private Schema schema; + private LinkedHashMap customDefs; + + public Input(String _name,LinkedHashMap _schemaDict,LinkedHashMap _customDefs) { + name = _name; + schema = new Schema(_name,_schemaDict); + customDefs = _customDefs; + } + + public String getName() { + return name; + } + + public String getType() { + return schema.getType(); + } + + public String getDescription() { + return schema.getDescription(); + } + + public boolean isRequired() { + return schema.isRequired(); + } + + public Object getDefault() { + return schema.getDefault(); + } + + public ArrayList getConstraints() { + return schema.getConstraints(); + } + + public void validate(Object value) { + _validateField(); + _validateType(getType()); + if(value != null) { + _validateValue(value); + } + } + + private void _validateField() { + for(String key: schema.getSchema().keySet()) { + boolean bFound = false; + for(String ifld: INPUTFIELD) { + if(key.equals(ifld)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: Input \"%s\" contains unknown field \"%s\"", + name,key)); + } + } + } + + private void _validateType(String inputType) { + boolean bFound = false; + for(String pt: Schema.PROPERTY_TYPES) { + if(pt.equals(inputType)) { + bFound = true; + break; + } + } + + if(!bFound) { + if(customDefs.get(inputType) != null) { + bFound = true; + } + } + + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: Invalid type \"%s\"",inputType)); + } + } + + private void _validateValue(Object value) { + Object datatype = null; + if(EntityType.TOSCA_DEF.get(getType()) != null) { + datatype = EntityType.TOSCA_DEF.get(getType()); + } + else if(EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()) != null) { + datatype = EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()); + } + + String type = getType(); + // if it's one of the basic types DON'T look in customDefs + if(Arrays.asList(PRIMITIVE_TYPES).contains(type)) { + DataEntity.validateDatatype(getType(), value, null, (LinkedHashMap)datatype, null); + return; + } + else if(customDefs.get(getType()) != null) { + datatype = customDefs.get(getType()); + DataEntity.validateDatatype(getType(), value, (LinkedHashMap)datatype, customDefs, null); + return; + } + + DataEntity.validateDatatype(getType(), value, null, (LinkedHashMap)datatype, null); + } +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.dataentity import DataEntity +from toscaparser.elements.constraints import Schema +from toscaparser.elements.entity_type import EntityType +from toscaparser.utils.gettextutils import _ + + +log = logging.getLogger('tosca') + + +class Input(object): + + INPUTFIELD = (TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED, STATUS, + ENTRY_SCHEMA) = ('type', 'description', 'default', + 'constraints', 'required', 'status', + 'entry_schema') + + def __init__(self, name, schema_dict): + self.name = name + self.schema = Schema(name, schema_dict) + + self._validate_field() + self.validate_type(self.type) + + @property + def type(self): + return self.schema.type + + @property + def required(self): + return self.schema.required + + @property + def description(self): + return self.schema.description + + @property + def default(self): + return self.schema.default + + @property + def constraints(self): + return self.schema.constraints + + @property + def status(self): + return self.schema.status + + def validate(self, value=None): + if value is not None: + self._validate_value(value) + + def _validate_field(self): + for name in self.schema.schema: + if name not in self.INPUTFIELD: + ExceptionCollector.appendException( + UnknownFieldError(what='Input "%s"' % self.name, + field=name)) + + def validate_type(self, input_type): + if input_type not in Schema.PROPERTY_TYPES: + ExceptionCollector.appendException( + ValueError(_('Invalid type "%s".') % type)) + + # tODO(anyone) Need to test for any built-in datatype not just network + # that is, tosca.datatypes.* and not assume tosca.datatypes.network.* + # tODO(anyone) Add support for tosca.datatypes.Credential + def _validate_value(self, value): + tosca = EntityType.TOSCA_DEF + datatype = None + if self.type in tosca: + datatype = tosca[self.type] + elif EntityType.DATATYPE_NETWORK_PREFIX + self.type in tosca: + datatype = tosca[EntityType.DATATYPE_NETWORK_PREFIX + self.type] + + DataEntity.validate_datatype(self.type, value, None, datatype) + +*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java b/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java new file mode 100644 index 0000000..34ecf12 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java @@ -0,0 +1,109 @@ +package org.openecomp.sdc.toscaparser.api.parameters; + +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class Output { + + private static final String DESCRIPTION = "description"; + public static final String VALUE = "value"; + private static final String OUTPUTFIELD[] = {DESCRIPTION, VALUE}; + + private String name; + private LinkedHashMap attrs;//TYPE??? + + public Output(String oname,LinkedHashMap oattrs) { + name = oname; + attrs = oattrs; + } + + public String getDescription() { + return (String)attrs.get(DESCRIPTION); + } + + public Object getValue() { + return attrs.get(VALUE); + } + + public void validate() { + _validateField(); + } + + private void _validateField() { + if(!(attrs instanceof LinkedHashMap)) { + //TODO wrong error message... + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValidationError: Output \"%s\" has wrong type. Expecting a dict", + name)); + } + + if(getValue() == null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "MissingRequiredFieldError: Output \"%s\" is missing required \"%s\"", + name,VALUE)); + } + for(String key: attrs.keySet()) { + boolean bFound = false; + for(String of: OUTPUTFIELD) { + if(key.equals(of)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "UnknownFieldError: Output \"%s\" contains unknown field \"%s\"", + name,key)); + } + } + } + + // getter/setter + + public String getName() { + return name; + } + + public void setAttr(String name,Object value) { + attrs.put(name, value); + } +} + +/*python + +class Output(object): + + OUTPUTFIELD = (DESCRIPTION, VALUE) = ('description', 'value') + + def __init__(self, name, attrs): + self.name = name + self.attrs = attrs + + @property + def description(self): + return self.attrs.get(self.DESCRIPTION) + + @property + def value(self): + return self.attrs.get(self.VALUE) + + def validate(self): + self._validate_field() + + def _validate_field(self): + if not isinstance(self.attrs, dict): + ExceptionCollector.appendException( + MissingRequiredFieldError(what='Output "%s"' % self.name, + required=self.VALUE)) + if self.value is None: + ExceptionCollector.appendException( + MissingRequiredFieldError(what='Output "%s"' % self.name, + required=self.VALUE)) + for name in self.attrs: + if name not in self.OUTPUTFIELD: + ExceptionCollector.appendException( + UnknownFieldError(what='Output "%s"' % self.name, + field=name)) +*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java b/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java new file mode 100644 index 0000000..85b54ee --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java @@ -0,0 +1,782 @@ +package org.openecomp.sdc.toscaparser.api.prereq; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.RandomAccessFile; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.*; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; +import java.util.zip.ZipInputStream; + +import org.openecomp.sdc.toscaparser.api.ImportsLoader; +import org.openecomp.sdc.toscaparser.api.common.JToscaException; +import org.openecomp.sdc.toscaparser.api.utils.JToscaErrorCodes; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.openecomp.sdc.toscaparser.api.utils.UrlUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.Yaml; + +public class CSAR { + + private static Logger log = LoggerFactory.getLogger(CSAR.class.getName()); + private static final ArrayList META_PROPERTIES_FILES = new ArrayList<>(Arrays.asList("TOSCA-Metadata/TOSCA.meta", "csar.meta")); + + private String path; + private boolean isFile; + private boolean isValidated; + private boolean errorCaught; + private String csar; + private String tempDir; +// private Metadata metaData; + private File tempFile; + private LinkedHashMap> metaProperties; + + public CSAR(String csarPath, boolean aFile) { + path = csarPath; + isFile = aFile; + isValidated = false; + errorCaught = false; + csar = null; + tempDir = null; + tempFile = null; + metaProperties = new LinkedHashMap<>(); + } + + @SuppressWarnings("unchecked") + public boolean validate() throws JToscaException { + isValidated = true; + + //validate that the file or URL exists + + if(isFile) { + File f = new File(path); + if (!f.isFile()) { + ThreadLocalsHolder.getCollector().appendException(String.format("\"%s\" is not a file", path)); + return false; + } + else { + this.csar = path; + } + } + else { + if(!UrlUtils.validateUrl(path)) { + ThreadLocalsHolder.getCollector().appendException(String.format("ImportError: \"%s\" does not exist",path)); + return false; + } + // get it to a local file + try { + File tempFile = File.createTempFile("csartmp",".csar"); + Path ptf = Paths.get(tempFile.getPath()); + URL webfile = new URL(path); + InputStream in = webfile.openStream(); + Files.copy(in,ptf,StandardCopyOption.REPLACE_EXISTING); + } + catch(Exception e) { + ThreadLocalsHolder.getCollector().appendException("ImportError: failed to load CSAR from " + path); + return false; + } + + log.debug("CSAR - validate - currently only files are supported"); + return false; + } + + _parseAndValidateMetaProperties(); + + if(errorCaught) { + return false; + } + + // validate that external references in the main template actually exist and are accessible + _validateExternalReferences(); + + return !errorCaught; + + } + + private void _parseAndValidateMetaProperties() throws JToscaException { + + ZipFile zf = null; + + try { + + // validate that it is a valid zip file + RandomAccessFile raf = new RandomAccessFile(csar, "r"); + long n = raf.readInt(); + raf.close(); + // check if Zip's magic number + if (n != 0x504B0304) { + String errorString = String.format("\"%s\" is not a valid zip file", csar); + log.error(errorString); + throw new JToscaException(errorString , JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue()); + } + + // validate that it contains the metadata file in the correct location + zf = new ZipFile(csar); + ZipEntry ze = zf.getEntry("TOSCA-Metadata/TOSCA.meta"); + if (ze == null) { + + String errorString = String.format( + "\"%s\" is not a valid CSAR as it does not contain the " + + "required file \"TOSCA.meta\" in the folder \"TOSCA-Metadata\"", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.MISSING_META_FILE.getValue()); + } + + //Going over expected metadata files and parsing them + for (String metaFile: META_PROPERTIES_FILES) { + + byte ba[] = new byte[4096]; + ze = zf.getEntry(metaFile); + if (ze != null) { + InputStream inputStream = zf.getInputStream(ze); + n = inputStream.read(ba, 0, 4096); + String md = new String(ba); + md = md.substring(0, (int) n); + + String errorString = String.format( + "The file \"%s\" in the" + + " CSAR \"%s\" does not contain valid YAML content", ze.getName(), csar); + + try { + Yaml yaml = new Yaml(); + Object mdo = yaml.load(md); + if (!(mdo instanceof LinkedHashMap)) { + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); + } + + String[] split = ze.getName().split("/"); + String fileName = split[split.length - 1]; + + if (!metaProperties.containsKey(fileName)) { + metaProperties.put(fileName, (LinkedHashMap) mdo); + } + } + catch(Exception e) { + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); + } + } + } + + // verify it has "Entry-Definition" + String edf = _getMetadata("Entry-Definitions"); + if (edf == null) { + String errorString = String.format( + "The CSAR \"%s\" is missing the required metadata " + + "\"Entry-Definitions\" in \"TOSCA-Metadata/TOSCA.meta\"", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.ENTRY_DEFINITION_NOT_DEFINED.getValue()); + } + + //validate that "Entry-Definitions' metadata value points to an existing file in the CSAR + boolean foundEDF = false; + Enumeration entries = zf.entries(); + while (entries.hasMoreElements()) { + ze = entries.nextElement(); + if (ze.getName().equals(edf)) { + foundEDF = true; + break; + } + } + if (!foundEDF) { + String errorString = String.format( + "The \"Entry-Definitions\" file defined in the CSAR \"%s\" does not exist", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.MISSING_ENTRY_DEFINITION_FILE.getValue()); + } + } catch (JToscaException e) { + //ThreadLocalsHolder.getCollector().appendCriticalException(e.getMessage()); + throw e; + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendException("ValidationError: " + e.getMessage()); + errorCaught = true; + } + + try { + if (zf != null) { + zf.close(); + } + } catch (IOException e) { + } + } + + public void cleanup() { + try { + if(tempFile != null) { + tempFile.delete(); + } + } + catch(Exception e) { + } + } + + private String _getMetadata(String key) throws JToscaException { + if(!isValidated) { + validate(); + } + Object value = _getMetaProperty("TOSCA.meta").get(key); + return value != null ? value.toString() : null; + } + + public String getAuthor() throws JToscaException { + return _getMetadata("Created-By"); + } + + public String getVersion() throws JToscaException { + return _getMetadata("CSAR-Version"); + } + + public LinkedHashMap> getMetaProperties() { + return metaProperties; + } + + private LinkedHashMap _getMetaProperty(String propertiesFile) { + return metaProperties.get(propertiesFile); + } + + public String getMainTemplate() throws JToscaException { + String entryDef = _getMetadata("Entry-Definitions"); + ZipFile zf; + boolean ok = false; + try { + zf = new ZipFile(path); + ok = (zf.getEntry(entryDef) != null); + zf.close(); + } + catch(IOException e) { + if(!ok) { + log.error("CSAR - getMainTemplate - failed to open {}", path); + } + } + if(ok) { + return entryDef; + } + else { + return null; + } + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getMainTemplateYaml() throws JToscaException { + String mainTemplate = tempDir + File.separator + getMainTemplate(); + if(mainTemplate != null) { + try { + InputStream input = new FileInputStream(new File(mainTemplate)); + Yaml yaml = new Yaml(); + Object data = yaml.load(input); + if(!(data instanceof LinkedHashMap)) { + throw new IOException(); + } + return (LinkedHashMap)data; + } + catch(Exception e) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "The file \"%s\" in the CSAR \"%s\" does not " + + "contain valid TOSCA YAML content", + mainTemplate,csar)); + } + } + return null; + } + + public String getDescription() throws JToscaException { + String desc = _getMetadata("Description"); + if(desc != null) { + return desc; + } + + Map metaData = metaProperties.get("TOSCA.meta"); + metaData.put("Description", getMainTemplateYaml().get("description")); + return _getMetadata("Description"); + } + + public String getTempDir() { + return tempDir; + } + + public void decompress() throws IOException, JToscaException { + if(!isValidated) { + validate(); + } + tempDir = Files.createTempDirectory("JTP").toString(); + unzip(path,tempDir); + + } + + private void _validateExternalReferences() throws JToscaException { + // Extracts files referenced in the main template + // These references are currently supported: + // * imports + // * interface implementations + // * artifacts + try { + decompress(); + String mainTplFile = getMainTemplate(); + if(mainTplFile == null) { + return; + } + + LinkedHashMap mainTpl = getMainTemplateYaml(); + if(mainTpl.get("imports") != null) { + // this loads the imports + ImportsLoader il = new ImportsLoader((ArrayList)mainTpl.get("imports"), + tempDir + File.separator + mainTplFile, + (Object)null, + (LinkedHashMap)null); + } + + if(mainTpl.get("topology_template") != null) { + LinkedHashMap topologyTemplate = + (LinkedHashMap)mainTpl.get("topology_template"); + + if(topologyTemplate.get("node_templates") != null) { + LinkedHashMap nodeTemplates = + (LinkedHashMap)topologyTemplate.get("node_templates"); + for(String nodeTemplateKey: nodeTemplates.keySet()) { + LinkedHashMap nodeTemplate = + (LinkedHashMap)nodeTemplates.get(nodeTemplateKey); + if(nodeTemplate.get("artifacts") != null) { + LinkedHashMap artifacts = + (LinkedHashMap)nodeTemplate.get("artifacts"); + for(String artifactKey: artifacts.keySet()) { + Object artifact = artifacts.get(artifactKey); + if(artifact instanceof String) { + _validateExternalReference(mainTplFile,(String)artifact,true); + } + else if(artifact instanceof LinkedHashMap) { + String file = (String)((LinkedHashMap)artifact).get("file"); + if(file != null) { + _validateExternalReference(mainTplFile,file,true); + } + } + else { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: Unexpected artifact definition for \"%s\"", + artifactKey)); + errorCaught = true; + } + } + } + if(nodeTemplate.get("interfaces") != null) { + LinkedHashMap interfaces = + (LinkedHashMap)nodeTemplate.get("interfaces"); + for(String interfaceKey: interfaces.keySet()) { + LinkedHashMap _interface = + (LinkedHashMap)interfaces.get(interfaceKey); + for(String operationKey: _interface.keySet()) { + Object operation = _interface.get(operationKey); + if(operation instanceof String) { + _validateExternalReference(mainTplFile,(String)operation,false); + } + else if(operation instanceof LinkedHashMap) { + String imp = (String)((LinkedHashMap)operation).get("implementation"); + if(imp != null) { + _validateExternalReference(mainTplFile,imp,true); + } + } + } + } + } + } + } + } + } + catch(IOException e) { + errorCaught = true; + } + finally { + // delete tempDir (only here?!?) + File fdir = new File(tempDir); + deleteDir(fdir); + tempDir = null; + } + } + + public static void deleteDir(File fdir) { + try { + if (fdir.isDirectory()) { + for (File c : fdir.listFiles()) + deleteDir(c); + } + fdir.delete(); + } + catch(Exception e) { + } + } + + private void _validateExternalReference(String tplFile,String resourceFile,boolean raiseExc) { + // Verify that the external resource exists + + // If resource_file is a URL verify that the URL is valid. + // If resource_file is a relative path verify that the path is valid + // considering base folder (self.temp_dir) and tpl_file. + // Note that in a CSAR resource_file cannot be an absolute path. + if(UrlUtils.validateUrl(resourceFile)) { + String msg = String.format("URLException: The resource at \"%s\" cannot be accessed",resourceFile); + try { + if(UrlUtils.isUrlAccessible(resourceFile)) { + return; + } + else { + ThreadLocalsHolder.getCollector().appendException(msg); + errorCaught = true; + } + } + catch (Exception e) { + ThreadLocalsHolder.getCollector().appendException(msg); + } + } + + String dirPath = Paths.get(tplFile).getParent().toString(); + String filePath = tempDir + File.separator + dirPath + File.separator + resourceFile; + File f = new File(filePath); + if(f.isFile()) { + return; + } + + if(raiseExc) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: The resource \"%s\" does not exist",resourceFile)); + } + errorCaught = true; + } + + private void unzip(String zipFilePath, String destDirectory) throws IOException { + File destDir = new File(destDirectory); + if (!destDir.exists()) { + destDir.mkdir(); + } + ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFilePath)); + ZipEntry entry = zipIn.getNextEntry(); + // iterates over entries in the zip file + while (entry != null) { + // create all directories needed for nested items + String[] parts = entry.getName().split("/"); + String s = destDirectory + File.separator ; + for(int i=0; i< parts.length-1; i++) { + s += parts[i]; + File idir = new File(s); + if(!idir.exists()) { + idir.mkdir(); + } + s += File.separator; + } + String filePath = destDirectory + File.separator + entry.getName(); + if (!entry.isDirectory()) { + // if the entry is a file, extracts it + extractFile(zipIn, filePath); + } else { + // if the entry is a directory, make the directory + File dir = new File(filePath); + dir.mkdir(); + } + zipIn.closeEntry(); + entry = zipIn.getNextEntry(); + } + zipIn.close(); + } + + /** + * Extracts a zip entry (file entry) + * @param zipIn + * @param filePath + * @throws IOException + */ + private static final int BUFFER_SIZE = 4096; + + private void extractFile(ZipInputStream zipIn, String filePath) throws IOException { + //BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath)); + FileOutputStream fos = new FileOutputStream(filePath); + BufferedOutputStream bos = new BufferedOutputStream(fos); + byte[] bytesIn = new byte[BUFFER_SIZE]; + int read = 0; + while ((read = zipIn.read(bytesIn)) != -1) { + bos.write(bytesIn, 0, read); + } + bos.close(); + } + +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import URLException +from toscaparser.common.exception import ValidationError +from toscaparser.imports import ImportsLoader +from toscaparser.utils.gettextutils import _ +from toscaparser.utils.urlutils import UrlUtils + +try: # Python 2.x + from BytesIO import BytesIO +except ImportError: # Python 3.x + from io import BytesIO + + +class CSAR(object): + + def __init__(self, csar_file, a_file=True): + self.path = csar_file + self.a_file = a_file + self.is_validated = False + self.error_caught = False + self.csar = None + self.temp_dir = None + + def validate(self): + """Validate the provided CSAR file.""" + + self.is_validated = True + + # validate that the file or URL exists + missing_err_msg = (_('"%s" does not exist.') % self.path) + if self.a_file: + if not os.path.isfile(self.path): + ExceptionCollector.appendException( + ValidationError(message=missing_err_msg)) + return False + else: + self.csar = self.path + else: # a URL + if not UrlUtils.validate_url(self.path): + ExceptionCollector.appendException( + ValidationError(message=missing_err_msg)) + return False + else: + response = requests.get(self.path) + self.csar = BytesIO(response.content) + + # validate that it is a valid zip file + if not zipfile.is_zipfile(self.csar): + err_msg = (_('"%s" is not a valid zip file.') % self.path) + ExceptionCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that it contains the metadata file in the correct location + self.zfile = zipfile.ZipFile(self.csar, 'r') + filelist = self.zfile.namelist() + if 'TOSCA-Metadata/TOSCA.meta' not in filelist: + err_msg = (_('"%s" is not a valid CSAR as it does not contain the ' + 'required file "TOSCA.meta" in the folder ' + '"TOSCA-Metadata".') % self.path) + ExceptionCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that 'Entry-Definitions' property exists in TOSCA.meta + data = self.zfile.read('TOSCA-Metadata/TOSCA.meta') + invalid_yaml_err_msg = (_('The file "TOSCA-Metadata/TOSCA.meta" in ' + 'the CSAR "%s" does not contain valid YAML ' + 'content.') % self.path) + try: + meta = yaml.load(data) + if type(meta) is dict: + self.metadata = meta + else: + ExceptionCollector.appendException( + ValidationError(message=invalid_yaml_err_msg)) + return False + except yaml.YAMLError: + ExceptionCollector.appendException( + ValidationError(message=invalid_yaml_err_msg)) + return False + + if 'Entry-Definitions' not in self.metadata: + err_msg = (_('The CSAR "%s" is missing the required metadata ' + '"Entry-Definitions" in ' + '"TOSCA-Metadata/TOSCA.meta".') + % self.path) + ExceptionCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that 'Entry-Definitions' metadata value points to an + # existing file in the CSAR + entry = self.metadata.get('Entry-Definitions') + if entry and entry not in filelist: + err_msg = (_('The "Entry-Definitions" file defined in the ' + 'CSAR "%s" does not exist.') % self.path) + ExceptionCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that external references in the main template actually + # exist and are accessible + self._validate_external_references() + return not self.error_caught + + def get_metadata(self): + """Return the metadata dictionary.""" + + # validate the csar if not already validated + if not self.is_validated: + self.validate() + + # return a copy to avoid changes overwrite the original + return dict(self.metadata) if self.metadata else None + + def _get_metadata(self, key): + if not self.is_validated: + self.validate() + return self.metadata.get(key) + + def get_author(self): + return self._get_metadata('Created-By') + + def get_version(self): + return self._get_metadata('CSAR-Version') + + def get_main_template(self): + entry_def = self._get_metadata('Entry-Definitions') + if entry_def in self.zfile.namelist(): + return entry_def + + def get_main_template_yaml(self): + main_template = self.get_main_template() + if main_template: + data = self.zfile.read(main_template) + invalid_tosca_yaml_err_msg = ( + _('The file "%(template)s" in the CSAR "%(csar)s" does not ' + 'contain valid TOSCA YAML content.') % + {'template': main_template, 'csar': self.path}) + try: + tosca_yaml = yaml.load(data) + if type(tosca_yaml) is not dict: + ExceptionCollector.appendException( + ValidationError(message=invalid_tosca_yaml_err_msg)) + return tosca_yaml + except Exception: + ExceptionCollector.appendException( + ValidationError(message=invalid_tosca_yaml_err_msg)) + + def get_description(self): + desc = self._get_metadata('Description') + if desc is not None: + return desc + + self.metadata['Description'] = \ + self.get_main_template_yaml().get('description') + return self.metadata['Description'] + + def decompress(self): + if not self.is_validated: + self.validate() + self.temp_dir = tempfile.NamedTemporaryFile().name + with zipfile.ZipFile(self.csar, "r") as zf: + zf.extractall(self.temp_dir) + + def _validate_external_references(self): + """Extracts files referenced in the main template + + These references are currently supported: + * imports + * interface implementations + * artifacts + """ + try: + self.decompress() + main_tpl_file = self.get_main_template() + if not main_tpl_file: + return + main_tpl = self.get_main_template_yaml() + + if 'imports' in main_tpl: + ImportsLoader(main_tpl['imports'], + os.path.join(self.temp_dir, main_tpl_file)) + + if 'topology_template' in main_tpl: + topology_template = main_tpl['topology_template'] + + if 'node_templates' in topology_template: + node_templates = topology_template['node_templates'] + + for node_template_key in node_templates: + node_template = node_templates[node_template_key] + if 'artifacts' in node_template: + artifacts = node_template['artifacts'] + for artifact_key in artifacts: + artifact = artifacts[artifact_key] + if isinstance(artifact, six.string_types): + self._validate_external_reference( + main_tpl_file, + artifact) + elif isinstance(artifact, dict): + if 'file' in artifact: + self._validate_external_reference( + main_tpl_file, + artifact['file']) + else: + ExceptionCollector.appendException( + ValueError(_('Unexpected artifact ' + 'definition for "%s".') + % artifact_key)) + self.error_caught = True + if 'interfaces' in node_template: + interfaces = node_template['interfaces'] + for interface_key in interfaces: + interface = interfaces[interface_key] + for opertation_key in interface: + operation = interface[opertation_key] + if isinstance(operation, six.string_types): + self._validate_external_reference( + main_tpl_file, + operation, + False) + elif isinstance(operation, dict): + if 'implementation' in operation: + self._validate_external_reference( + main_tpl_file, + operation['implementation']) + finally: + if self.temp_dir: + shutil.rmtree(self.temp_dir) + + def _validate_external_reference(self, tpl_file, resource_file, + raise_exc=True): + """Verify that the external resource exists + + If resource_file is a URL verify that the URL is valid. + If resource_file is a relative path verify that the path is valid + considering base folder (self.temp_dir) and tpl_file. + Note that in a CSAR resource_file cannot be an absolute path. + """ + if UrlUtils.validate_url(resource_file): + msg = (_('The resource at "%s" cannot be accessed.') % + resource_file) + try: + if UrlUtils.url_accessible(resource_file): + return + else: + ExceptionCollector.appendException( + URLException(what=msg)) + self.error_caught = True + except Exception: + ExceptionCollector.appendException( + URLException(what=msg)) + self.error_caught = True + + if os.path.isfile(os.path.join(self.temp_dir, + os.path.dirname(tpl_file), + resource_file)): + return + + if raise_exc: + ExceptionCollector.appendException( + ValueError(_('The resource "%s" does not exist.') + % resource_file)) + self.error_caught = True +*/ + + diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java.orig b/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java.orig new file mode 100644 index 0000000..aa36b9e --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java.orig @@ -0,0 +1,767 @@ +package org.openecomp.sdc.toscaparser.api.prereq; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.RandomAccessFile; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.*; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; +import java.util.zip.ZipInputStream; + +import org.openecomp.sdc.toscaparser.api.ImportsLoader; +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.elements.Metadata; +import org.openecomp.sdc.toscaparser.api.utils.UrlUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.Yaml; + +<<<<<<< HEAD:jtosca/src/main/java/org/openecomp/sdc/toscaparser/prereq/CSAR.java +import org.openecomp.sdc.toscaparser.ImportsLoader; +import org.openecomp.sdc.toscaparser.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.utils.UrlUtils; + +======= +>>>>>>> 243072-jtosca-package-fix:jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java +public class CSAR { + + private static Logger log = LoggerFactory.getLogger(CSAR.class.getName()); + private static final ArrayList META_PROPERTIES_FILES = new ArrayList<>(Arrays.asList("TOSCA-Metadata/TOSCA.meta", "csar.meta")); + + private String path; + private boolean isFile; + private boolean isValidated; + private boolean errorCaught; + private String csar; + private String tempDir; +// private Metadata metaData; + private File tempFile; + private LinkedHashMap> metaProperties; + + public CSAR(String csarPath, boolean aFile) { + path = csarPath; + isFile = aFile; + isValidated = false; + errorCaught = false; + csar = null; + tempDir = null; + tempFile = null; + metaProperties = new LinkedHashMap<>(); + } + + @SuppressWarnings("unchecked") + public boolean validate() { + isValidated = true; + + //validate that the file or URL exists + + if(isFile) { + File f = new File(path); + if (!f.isFile()) { + ExceptionCollector.appendException(String.format("\"%s\" is not a file", path)); + return false; + } + else { + this.csar = path; + } + } + else { + if(!UrlUtils.validateUrl(path)) { + ExceptionCollector.appendException(String.format("ImportError: \"%s\" does not exist",path)); + return false; + } + // get it to a local file + try { + File tempFile = File.createTempFile("csartmp",".csar"); + Path ptf = Paths.get(tempFile.getPath()); + URL webfile = new URL(path); + InputStream in = webfile.openStream(); + Files.copy(in,ptf,StandardCopyOption.REPLACE_EXISTING); + } + catch(Exception e) { + ExceptionCollector.appendException("ImportError: failed to load CSAR from " + path); + return false; + } + + log.debug("CSAR - validate - currently only files are supported"); + return false; + } + + _parseAndValidateMetaProperties(); + + if(errorCaught) { + return false; + } + + // validate that external references in the main template actually exist and are accessible + _validateExternalReferences(); + + return !errorCaught; + + } + + private void _parseAndValidateMetaProperties() { + + ZipFile zf = null; + + try { + + // validate that it is a valid zip file + RandomAccessFile raf = new RandomAccessFile(csar, "r"); + long n = raf.readInt(); + raf.close(); + // check if Zip's magic number + if (n != 0x504B0304) { + throw new IOException(String.format("\"%s\" is not a valid zip file", csar)); + } + + // validate that it contains the metadata file in the correct location + zf = new ZipFile(csar); + ZipEntry ze = zf.getEntry("TOSCA-Metadata/TOSCA.meta"); + if (ze == null) { + throw new IOException(String.format( + "\"%s\" is not a valid CSAR as it does not contain the " + + "required file \"TOSCA.meta\" in the folder \"TOSCA-Metadata\"", csar)); + } + + //Going over expected metadata files and parsing them + for (String metaFile: META_PROPERTIES_FILES) { + + byte ba[] = new byte[4096]; + ze = zf.getEntry(metaFile); + if (ze != null) { + InputStream inputStream = zf.getInputStream(ze); + n = inputStream.read(ba, 0, 4096); + + String md = new String(ba); + md = md.substring(0, (int) n); + Yaml yaml = new Yaml(); + Object mdo = yaml.load(md); + if (!(mdo instanceof LinkedHashMap)) { + throw new IOException(String.format( + "The file \"%s\" in the" + + " CSAR \"%s\" does not contain valid YAML content", ze.getName(), csar)); + } + + String[] split = ze.getName().split("/"); + String fileName = split[split.length - 1]; + + if (!metaProperties.containsKey(fileName)) { + metaProperties.put(fileName, (LinkedHashMap) mdo); + } + } + } + + // verify it has "Entry-Definition" + String edf = _getMetadata("Entry-Definitions"); + if (edf == null) { + throw new IOException(String.format( + "The CSAR \"%s\" is missing the required metadata " + + "\"Entry-Definitions\" in \"TOSCA-Metadata/TOSCA.meta\"", csar)); + } + + //validate that "Entry-Definitions' metadata value points to an existing file in the CSAR + boolean foundEDF = false; + Enumeration entries = zf.entries(); + while (entries.hasMoreElements()) { + ze = entries.nextElement(); + if (ze.getName().equals(edf)) { + foundEDF = true; + break; + } + } + if (!foundEDF) { + throw new IOException(String.format( + "The \"Entry-Definitions\" file defined in the CSAR \"%s\" does not exist", csar)); + } + } catch (Exception e) { + ExceptionCollector.appendException("ValidationError: " + e.getMessage()); + errorCaught = true; + } + + try { + if (zf != null) { + zf.close(); + } + } catch (IOException e) { + } + } + + public void cleanup() { + try { + if(tempFile != null) { + tempFile.delete(); + } + } + catch(Exception e) { + } + } + + private String _getMetadata(String key) { + if(!isValidated) { + validate(); + } + Object value = _getMetaProperty("TOSCA.meta").get(key); + return value != null ? value.toString() : null; + } + + public String getAuthor() { + return _getMetadata("Created-By"); + } + + public String getVersion() { + return _getMetadata("CSAR-Version"); + } + + public LinkedHashMap> getMetaProperties() { + return metaProperties; + } + + private LinkedHashMap _getMetaProperty(String propertiesFile) { + return metaProperties.get(propertiesFile); + } + + public String getMainTemplate() { + String entryDef = _getMetadata("Entry-Definitions"); + ZipFile zf; + boolean ok = false; + try { + zf = new ZipFile(path); + ok = (zf.getEntry(entryDef) != null); + zf.close(); + } + catch(IOException e) { + if(!ok) { + log.error("CSAR - getMainTemplate - failed to open {}", path); + } + } + if(ok) { + return entryDef; + } + else { + return null; + } + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getMainTemplateYaml() { + String mainTemplate = tempDir + File.separator + getMainTemplate(); + if(mainTemplate != null) { + try { + InputStream input = new FileInputStream(new File(mainTemplate)); + Yaml yaml = new Yaml(); + Object data = yaml.load(input); + if(!(data instanceof LinkedHashMap)) { + throw new IOException(); + } + return (LinkedHashMap)data; + } + catch(Exception e) { + ExceptionCollector.appendException(String.format( + "The file \"%s\" in the CSAR \"%s\" does not " + + "contain valid TOSCA YAML content", + mainTemplate,csar)); + } + } + return null; + } + + public String getDescription() { + String desc = _getMetadata("Description"); + if(desc != null) { + return desc; + } + + Map metaData = metaProperties.get("TOSCA.meta"); + metaData.put("Description", getMainTemplateYaml().get("description")); + return _getMetadata("Description"); + } + + public String getTempDir() { + return tempDir; + } + + public void decompress() throws IOException { + if(!isValidated) { + validate(); + } + tempDir = Files.createTempDirectory("JTP").toString(); + unzip(path,tempDir); + + } + + private void _validateExternalReferences() { + // Extracts files referenced in the main template + // These references are currently supported: + // * imports + // * interface implementations + // * artifacts + try { + decompress(); + String mainTplFile = getMainTemplate(); + if(mainTplFile == null) { + return; + } + + LinkedHashMap mainTpl = getMainTemplateYaml(); + if(mainTpl.get("imports") != null) { + // this loads the imports + ImportsLoader il = new ImportsLoader((ArrayList)mainTpl.get("imports"), + tempDir + File.separator + mainTplFile, + (Object)null, + (LinkedHashMap)null); + } + + if(mainTpl.get("topology_template") != null) { + LinkedHashMap topologyTemplate = + (LinkedHashMap)mainTpl.get("topology_template"); + + if(topologyTemplate.get("node_templates") != null) { + LinkedHashMap nodeTemplates = + (LinkedHashMap)topologyTemplate.get("node_templates"); + for(String nodeTemplateKey: nodeTemplates.keySet()) { + LinkedHashMap nodeTemplate = + (LinkedHashMap)nodeTemplates.get(nodeTemplateKey); + if(nodeTemplate.get("artifacts") != null) { + LinkedHashMap artifacts = + (LinkedHashMap)nodeTemplate.get("artifacts"); + for(String artifactKey: artifacts.keySet()) { + Object artifact = artifacts.get(artifactKey); + if(artifact instanceof String) { + _validateExternalReference(mainTplFile,(String)artifact,true); + } + else if(artifact instanceof LinkedHashMap) { + String file = (String)((LinkedHashMap)artifact).get("file"); + if(file != null) { + _validateExternalReference(mainTplFile,file,true); + } + } + else { + ExceptionCollector.appendException(String.format( + "ValueError: Unexpected artifact definition for \"%s\"", + artifactKey)); + errorCaught = true; + } + } + } + if(nodeTemplate.get("interfaces") != null) { + LinkedHashMap interfaces = + (LinkedHashMap)nodeTemplate.get("interfaces"); + for(String interfaceKey: interfaces.keySet()) { + LinkedHashMap _interface = + (LinkedHashMap)interfaces.get(interfaceKey); + for(String operationKey: _interface.keySet()) { + Object operation = _interface.get(operationKey); + if(operation instanceof String) { + _validateExternalReference(mainTplFile,(String)operation,false); + } + else if(operation instanceof LinkedHashMap) { + String imp = (String)((LinkedHashMap)operation).get("implementation"); + if(imp != null) { + _validateExternalReference(mainTplFile,imp,true); + } + } + } + } + } + } + } + } + } + catch(IOException e) { + errorCaught = true; + } + finally { + // delete tempDir (only here?!?) + File fdir = new File(tempDir); + deleteDir(fdir); + tempDir = null; + } + } + + public static void deleteDir(File fdir) { + try { + if (fdir.isDirectory()) { + for (File c : fdir.listFiles()) + deleteDir(c); + } + fdir.delete(); + } + catch(Exception e) { + } + } + + private void _validateExternalReference(String tplFile,String resourceFile,boolean raiseExc) { + // Verify that the external resource exists + + // If resource_file is a URL verify that the URL is valid. + // If resource_file is a relative path verify that the path is valid + // considering base folder (self.temp_dir) and tpl_file. + // Note that in a CSAR resource_file cannot be an absolute path. + if(UrlUtils.validateUrl(resourceFile)) { + String msg = String.format("URLException: The resource at \"%s\" cannot be accessed",resourceFile); + try { + if(UrlUtils.isUrlAccessible(resourceFile)) { + return; + } + else { + ExceptionCollector.appendException(msg); + errorCaught = true; + } + } + catch (Exception e) { + ExceptionCollector.appendException(msg); + } + } + + String dirPath = Paths.get(tplFile).getParent().toString(); + String filePath = tempDir + File.separator + dirPath + File.separator + resourceFile; + File f = new File(filePath); + if(f.isFile()) { + return; + } + + if(raiseExc) { + ExceptionCollector.appendException(String.format( + "ValueError: The resource \"%s\" does not exist",resourceFile)); + } + errorCaught = true; + } + + private void unzip(String zipFilePath, String destDirectory) throws IOException { + File destDir = new File(destDirectory); + if (!destDir.exists()) { + destDir.mkdir(); + } + ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFilePath)); + ZipEntry entry = zipIn.getNextEntry(); + // iterates over entries in the zip file + while (entry != null) { + // create all directories needed for nested items + String[] parts = entry.getName().split("/"); + String s = destDirectory + File.separator ; + for(int i=0; i< parts.length-1; i++) { + s += parts[i]; + File idir = new File(s); + if(!idir.exists()) { + idir.mkdir(); + } + s += File.separator; + } + String filePath = destDirectory + File.separator + entry.getName(); + if (!entry.isDirectory()) { + // if the entry is a file, extracts it + extractFile(zipIn, filePath); + } else { + // if the entry is a directory, make the directory + File dir = new File(filePath); + dir.mkdir(); + } + zipIn.closeEntry(); + entry = zipIn.getNextEntry(); + } + zipIn.close(); + } + + /** + * Extracts a zip entry (file entry) + * @param zipIn + * @param filePath + * @throws IOException + */ + private static final int BUFFER_SIZE = 4096; + + private void extractFile(ZipInputStream zipIn, String filePath) throws IOException { + //BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath)); + FileOutputStream fos = new FileOutputStream(filePath); + BufferedOutputStream bos = new BufferedOutputStream(fos); + byte[] bytesIn = new byte[BUFFER_SIZE]; + int read = 0; + while ((read = zipIn.read(bytesIn)) != -1) { + bos.write(bytesIn, 0, read); + } + bos.close(); + } + +} + +/*python + +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import URLException +from toscaparser.common.exception import ValidationError +from toscaparser.imports import ImportsLoader +from toscaparser.utils.gettextutils import _ +from toscaparser.utils.urlutils import UrlUtils + +try: # Python 2.x + from BytesIO import BytesIO +except ImportError: # Python 3.x + from io import BytesIO + + +class CSAR(object): + + def __init__(self, csar_file, a_file=True): + self.path = csar_file + self.a_file = a_file + self.is_validated = False + self.error_caught = False + self.csar = None + self.temp_dir = None + + def validate(self): + """Validate the provided CSAR file.""" + + self.is_validated = True + + # validate that the file or URL exists + missing_err_msg = (_('"%s" does not exist.') % self.path) + if self.a_file: + if not os.path.isfile(self.path): + ExceptionCollector.appendException( + ValidationError(message=missing_err_msg)) + return False + else: + self.csar = self.path + else: # a URL + if not UrlUtils.validate_url(self.path): + ExceptionCollector.appendException( + ValidationError(message=missing_err_msg)) + return False + else: + response = requests.get(self.path) + self.csar = BytesIO(response.content) + + # validate that it is a valid zip file + if not zipfile.is_zipfile(self.csar): + err_msg = (_('"%s" is not a valid zip file.') % self.path) + ExceptionCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that it contains the metadata file in the correct location + self.zfile = zipfile.ZipFile(self.csar, 'r') + filelist = self.zfile.namelist() + if 'TOSCA-Metadata/TOSCA.meta' not in filelist: + err_msg = (_('"%s" is not a valid CSAR as it does not contain the ' + 'required file "TOSCA.meta" in the folder ' + '"TOSCA-Metadata".') % self.path) + ExceptionCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that 'Entry-Definitions' property exists in TOSCA.meta + data = self.zfile.read('TOSCA-Metadata/TOSCA.meta') + invalid_yaml_err_msg = (_('The file "TOSCA-Metadata/TOSCA.meta" in ' + 'the CSAR "%s" does not contain valid YAML ' + 'content.') % self.path) + try: + meta = yaml.load(data) + if type(meta) is dict: + self.metadata = meta + else: + ExceptionCollector.appendException( + ValidationError(message=invalid_yaml_err_msg)) + return False + except yaml.YAMLError: + ExceptionCollector.appendException( + ValidationError(message=invalid_yaml_err_msg)) + return False + + if 'Entry-Definitions' not in self.metadata: + err_msg = (_('The CSAR "%s" is missing the required metadata ' + '"Entry-Definitions" in ' + '"TOSCA-Metadata/TOSCA.meta".') + % self.path) + ExceptionCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that 'Entry-Definitions' metadata value points to an + # existing file in the CSAR + entry = self.metadata.get('Entry-Definitions') + if entry and entry not in filelist: + err_msg = (_('The "Entry-Definitions" file defined in the ' + 'CSAR "%s" does not exist.') % self.path) + ExceptionCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that external references in the main template actually + # exist and are accessible + self._validate_external_references() + return not self.error_caught + + def get_metadata(self): + """Return the metadata dictionary.""" + + # validate the csar if not already validated + if not self.is_validated: + self.validate() + + # return a copy to avoid changes overwrite the original + return dict(self.metadata) if self.metadata else None + + def _get_metadata(self, key): + if not self.is_validated: + self.validate() + return self.metadata.get(key) + + def get_author(self): + return self._get_metadata('Created-By') + + def get_version(self): + return self._get_metadata('CSAR-Version') + + def get_main_template(self): + entry_def = self._get_metadata('Entry-Definitions') + if entry_def in self.zfile.namelist(): + return entry_def + + def get_main_template_yaml(self): + main_template = self.get_main_template() + if main_template: + data = self.zfile.read(main_template) + invalid_tosca_yaml_err_msg = ( + _('The file "%(template)s" in the CSAR "%(csar)s" does not ' + 'contain valid TOSCA YAML content.') % + {'template': main_template, 'csar': self.path}) + try: + tosca_yaml = yaml.load(data) + if type(tosca_yaml) is not dict: + ExceptionCollector.appendException( + ValidationError(message=invalid_tosca_yaml_err_msg)) + return tosca_yaml + except Exception: + ExceptionCollector.appendException( + ValidationError(message=invalid_tosca_yaml_err_msg)) + + def get_description(self): + desc = self._get_metadata('Description') + if desc is not None: + return desc + + self.metadata['Description'] = \ + self.get_main_template_yaml().get('description') + return self.metadata['Description'] + + def decompress(self): + if not self.is_validated: + self.validate() + self.temp_dir = tempfile.NamedTemporaryFile().name + with zipfile.ZipFile(self.csar, "r") as zf: + zf.extractall(self.temp_dir) + + def _validate_external_references(self): + """Extracts files referenced in the main template + + These references are currently supported: + * imports + * interface implementations + * artifacts + """ + try: + self.decompress() + main_tpl_file = self.get_main_template() + if not main_tpl_file: + return + main_tpl = self.get_main_template_yaml() + + if 'imports' in main_tpl: + ImportsLoader(main_tpl['imports'], + os.path.join(self.temp_dir, main_tpl_file)) + + if 'topology_template' in main_tpl: + topology_template = main_tpl['topology_template'] + + if 'node_templates' in topology_template: + node_templates = topology_template['node_templates'] + + for node_template_key in node_templates: + node_template = node_templates[node_template_key] + if 'artifacts' in node_template: + artifacts = node_template['artifacts'] + for artifact_key in artifacts: + artifact = artifacts[artifact_key] + if isinstance(artifact, six.string_types): + self._validate_external_reference( + main_tpl_file, + artifact) + elif isinstance(artifact, dict): + if 'file' in artifact: + self._validate_external_reference( + main_tpl_file, + artifact['file']) + else: + ExceptionCollector.appendException( + ValueError(_('Unexpected artifact ' + 'definition for "%s".') + % artifact_key)) + self.error_caught = True + if 'interfaces' in node_template: + interfaces = node_template['interfaces'] + for interface_key in interfaces: + interface = interfaces[interface_key] + for opertation_key in interface: + operation = interface[opertation_key] + if isinstance(operation, six.string_types): + self._validate_external_reference( + main_tpl_file, + operation, + False) + elif isinstance(operation, dict): + if 'implementation' in operation: + self._validate_external_reference( + main_tpl_file, + operation['implementation']) + finally: + if self.temp_dir: + shutil.rmtree(self.temp_dir) + + def _validate_external_reference(self, tpl_file, resource_file, + raise_exc=True): + """Verify that the external resource exists + + If resource_file is a URL verify that the URL is valid. + If resource_file is a relative path verify that the path is valid + considering base folder (self.temp_dir) and tpl_file. + Note that in a CSAR resource_file cannot be an absolute path. + """ + if UrlUtils.validate_url(resource_file): + msg = (_('The resource at "%s" cannot be accessed.') % + resource_file) + try: + if UrlUtils.url_accessible(resource_file): + return + else: + ExceptionCollector.appendException( + URLException(what=msg)) + self.error_caught = True + except Exception: + ExceptionCollector.appendException( + URLException(what=msg)) + self.error_caught = True + + if os.path.isfile(os.path.join(self.temp_dir, + os.path.dirname(tpl_file), + resource_file)): + return + + if raise_exc: + ExceptionCollector.appendException( + ValueError(_('The resource "%s" does not exist.') + % resource_file)) + self.error_caught = True +*/ + + diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/CopyUtils.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/CopyUtils.java new file mode 100644 index 0000000..db236e1 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/CopyUtils.java @@ -0,0 +1,29 @@ +package org.openecomp.sdc.toscaparser.api.utils; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public class CopyUtils { + + @SuppressWarnings("unchecked") + public static Object copyLhmOrAl(Object src) { + if(src instanceof LinkedHashMap) { + LinkedHashMap dst = new LinkedHashMap(); + for(Map.Entry me: ((LinkedHashMap)src).entrySet()) { + dst.put(me.getKey(),me.getValue()); + } + return dst; + } + else if(src instanceof ArrayList) { + ArrayList dst = new ArrayList(); + for(Object o: (ArrayList)src) { + dst.add(o); + } + return dst; + } + else { + return null; + } + } +} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/DumpUtils.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/DumpUtils.java new file mode 100644 index 0000000..32c69cd --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/DumpUtils.java @@ -0,0 +1,55 @@ +package org.openecomp.sdc.toscaparser.api.utils; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public class DumpUtils { + + @SuppressWarnings("unchecked") + public static void dumpYaml(Object yo,int level) { + final String indent = " "; + try { + if(yo == null) { + System.out.println(""); + return; + } + String cname = yo.getClass().getSimpleName(); + System.out.print(cname); + if(cname.equals("LinkedHashMap")) { + LinkedHashMap lhm = (LinkedHashMap)yo; + System.out.println(); + for(Map.Entry me: lhm.entrySet()) { + System.out.print(indent.substring(0,level) + me.getKey() + ": "); + dumpYaml(me.getValue(),level+2); + } + } + else if(cname.equals("ArrayList")) { + ArrayList al = (ArrayList)yo; + System.out.println(); + for (int i=0; i \"" + (String)yo + "\""); + } + else if(cname.equals("Integer")) { + System.out.println(" ==> " + (int)yo); + } + else if(cname.equals("Boolean")) { + System.out.println(" ==> " + (boolean)yo); + } + else if(cname.equals("Double")) { + System.out.println(" ==> " + (double)yo); + } + else { + System.out.println(" !! unexpected type"); + } + } + catch(Exception e) { + System.out.println("Exception!! " + e.getMessage()); + } + } +} \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java new file mode 100644 index 0000000..354fef0 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java @@ -0,0 +1,32 @@ +package org.openecomp.sdc.toscaparser.api.utils; + + +public enum JToscaErrorCodes { + MISSING_META_FILE("JT1001"), + INVALID_META_YAML_CONTENT("JT1002"), + ENTRY_DEFINITION_NOT_DEFINED("JT1003"), + MISSING_ENTRY_DEFINITION_FILE ("JT1004"), + GENERAL_ERROR("JT1005"), + PATH_NOT_VALID("JT1006"), + CSAR_TOSCA_VALIDATION_ERROR("JT1007"), + INVALID_CSAR_FORMAT("JT1008"); + + private String value; + + private JToscaErrorCodes(String value) { + this.value = value; + } + + public String getValue() { + return value; + } + + public static JToscaErrorCodes getByCode(String code) { + for(JToscaErrorCodes v : values()){ + if( v.getValue().equals(code)){ + return v; + } + } + return null; + } +} \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java new file mode 100644 index 0000000..6b3c1ce --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java @@ -0,0 +1,182 @@ +package org.openecomp.sdc.toscaparser.api.utils; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; + +public class TOSCAVersionProperty {// test with functions/test_concat.yaml + + private String version; + + private static final String versionRe = + "^(?([0-9][0-9]*))" + + "(\\.(?([0-9][0-9]*)))?" + + "(\\.(?([0-9][0-9]*)))?" + + "(\\.(?([0-9A-Za-z]+)))?" + + "(\\-(?[0-9])*)?$"; + + private String minorVersion = null; + private String majorVersion = null; + private String fixVersion = null; + private String qualifier = null; + private String buildVersion = null; + + + public TOSCAVersionProperty(Object _version) { + version = _version.toString(); + + if(version.equals("0") || version.equals("0.0") || version.equals("0.0.0")) { + //log.warning(_('Version assumed as not provided')) + version = ""; + return; + } + + Pattern pattern = Pattern.compile(versionRe); + Matcher matcher = pattern.matcher(version); + if(!matcher.find()) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidTOSCAVersionPropertyException: " + + "Value of TOSCA version property \"%s\" is invalid", + version)); + return; + } + minorVersion = matcher.group("gMinorVersion"); + majorVersion = matcher.group("gMajorVersion"); + fixVersion = matcher.group("gFixVersion"); + qualifier = _validateQualifier(matcher.group("gQualifier")); + buildVersion = _validateBuild(matcher.group("gBuildVersion")); + _validateMajorVersion(majorVersion); + + } + + private String _validateMajorVersion(String value) { + // Validate major version + + // Checks if only major version is provided and assumes + // minor version as 0. + // Eg: If version = 18, then it returns version = '18.0' + + if(minorVersion == null && buildVersion == null && !value.equals("0")) { + //log.warning(_('Minor version assumed "0".')) + version = version + "0"; + } + return value; + } + + private String _validateQualifier(String value) { + // Validate qualifier + + // TOSCA version is invalid if a qualifier is present without the + // fix version or with all of major, minor and fix version 0s. + + // For example, the following versions are invalid + // 18.0.abc + // 0.0.0.abc + + if((fixVersion == null && value != null) || + (minorVersion.equals("0") && majorVersion.equals("0") && + fixVersion.equals("0") && value != null)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidTOSCAVersionPropertyException: " + + "Value of TOSCA version property \"%s\" is invalid", + version)); + } + return value; + } + + private String _validateBuild(String value) { + // Validate build version + + // TOSCA version is invalid if build version is present without the qualifier. + // Eg: version = 18.0.0-1 is invalid. + + if(qualifier == null && value != null) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "InvalidTOSCAVersionPropertyException: " + + "Value of TOSCA version property \"%s\" is invalid", + version)); + } + return value; + } + + public Object getVersion() { + return version; + } + +} + +/*python + +class TOSCAVersionProperty(object): + + VERSION_RE = re.compile('^(?P([0-9][0-9]*))' + '(\.(?P([0-9][0-9]*)))?' + '(\.(?P([0-9][0-9]*)))?' + '(\.(?P([0-9A-Za-z]+)))?' + '(\-(?P[0-9])*)?$') + + def __init__(self, version): + self.version = str(version) + match = self.VERSION_RE.match(self.version) + if not match: + ExceptionCollector.appendException( + InvalidTOSCAVersionPropertyException(what=(self.version))) + return + ver = match.groupdict() + if self.version in ['0', '0.0', '0.0.0']: + log.warning(_('Version assumed as not provided')) + self.version = None + self.minor_version = ver['minor_version'] + self.major_version = ver['major_version'] + self.fix_version = ver['fix_version'] + self.qualifier = self._validate_qualifier(ver['qualifier']) + self.build_version = self._validate_build(ver['build_version']) + self._validate_major_version(self.major_version) + + def _validate_major_version(self, value): + """Validate major version + + Checks if only major version is provided and assumes + minor version as 0. + Eg: If version = 18, then it returns version = '18.0' + """ + + if self.minor_version is None and self.build_version is None and \ + value != '0': + log.warning(_('Minor version assumed "0".')) + self.version = '.'.join([value, '0']) + return value + + def _validate_qualifier(self, value): + """Validate qualifier + + TOSCA version is invalid if a qualifier is present without the + fix version or with all of major, minor and fix version 0s. + + For example, the following versions are invalid + 18.0.abc + 0.0.0.abc + """ + if (self.fix_version is None and value) or \ + (self.minor_version == self.major_version == + self.fix_version == '0' and value): + ExceptionCollector.appendException( + InvalidTOSCAVersionPropertyException(what=(self.version))) + return value + + def _validate_build(self, value): + """Validate build version + + TOSCA version is invalid if build version is present without the + qualifier. + Eg: version = 18.0.0-1 is invalid. + """ + if not self.qualifier and value: + ExceptionCollector.appendException( + InvalidTOSCAVersionPropertyException(what=(self.version))) + return value + + def get_version(self): + return self.version +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java new file mode 100644 index 0000000..47ba972 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java @@ -0,0 +1,24 @@ +package org.openecomp.sdc.toscaparser.api.utils; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; + +public class ThreadLocalsHolder { + + private static final ThreadLocal exceptionCollectorThreadLocal = new ThreadLocal<>(); + + private ThreadLocalsHolder(){} + + public static ExceptionCollector getCollector() { + return exceptionCollectorThreadLocal.get(); + } + + public static void setCollector(ExceptionCollector exceptionCollector) { + cleanup(); + exceptionCollectorThreadLocal.set(exceptionCollector); + } + + public static void cleanup(){ + exceptionCollectorThreadLocal.remove(); + } + +} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java new file mode 100644 index 0000000..092f827 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java @@ -0,0 +1,123 @@ +package org.openecomp.sdc.toscaparser.api.utils; + +import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URL; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; + +public class UrlUtils { + + public static boolean validateUrl(String sUrl) { + // Validates whether the given path is a URL or not + + // If the given path includes a scheme (http, https, ftp, ...) and a net + // location (a domain name such as www.github.com) it is validated as a URL + try { + URL url = new URL(sUrl); + if(url.getProtocol().equals("file")) { + return true; + } + return url.getAuthority() != null; + } + catch(MalformedURLException e) { + return false; + } + } + + public static String joinUrl(String sUrl,String relativePath) { + // Builds a new URL from the given URL and the relative path + + // Example: + // url: http://www.githib.com/openstack/heat + // relative_path: heat-translator + // - joined: http://www.githib.com/openstack/heat-translator + if(!validateUrl(sUrl)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: The URL \"%s\" is malformed",sUrl)); + } + try { + URL base = new URL(sUrl); + return (new URL(base,relativePath)).toString(); + } + catch(MalformedURLException e) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: Joining URL \"%s\" and relative path \"%s\" caused an exception",sUrl,relativePath)); + return sUrl; + } + } + + public static boolean isUrlAccessible(String sUrl) { + // Validates whether the given URL is accessible + + // Returns true if the get call returns a 200 response code. + // Otherwise, returns false. + try { + HttpURLConnection connection = (HttpURLConnection) new URL(sUrl).openConnection(); + connection.setRequestMethod("HEAD"); + int responseCode = connection.getResponseCode(); + return responseCode == 200; + } + catch(IOException e) { + return false; + } + } + +} + +/*python + +from six.moves.urllib.parse import urljoin +from six.moves.urllib.parse import urlparse +from toscaparser.common.exception import ExceptionCollector +from toscaparser.utils.gettextutils import _ + +try: + # Python 3.x + import urllib.request as urllib2 +except ImportError: + # Python 2.x + import urllib2 + + +class UrlUtils(object): + + @staticmethod + def validate_url(path): + """Validates whether the given path is a URL or not. + + If the given path includes a scheme (http, https, ftp, ...) and a net + location (a domain name such as www.github.com) it is validated as a + URL. + """ + parsed = urlparse(path) + if parsed.scheme == 'file': + # If the url uses the file scheme netloc will be "" + return True + else: + return bool(parsed.scheme) and bool(parsed.netloc) + + @staticmethod + def join_url(url, relative_path): + """Builds a new URL from the given URL and the relative path. + + Example: + url: http://www.githib.com/openstack/heat + relative_path: heat-translator + - joined: http://www.githib.com/openstack/heat-translator + """ + if not UrlUtils.validate_url(url): + ExceptionCollector.appendException( + ValueError(_('"%s" is not a valid URL.') % url)) + return urljoin(url, relative_path) + + @staticmethod + def url_accessible(url): + """Validates whether the given URL is accessible. + + Returns true if the get call returns a 200 response code. + Otherwise, returns false. + """ + return urllib2.urlopen(url).getcode() == 200 +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java new file mode 100644 index 0000000..291316f --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java @@ -0,0 +1,409 @@ +package org.openecomp.sdc.toscaparser.api.utils; + +import java.util.ArrayList; +import java.util.Date; +import java.util.LinkedHashMap; + +import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; + +public class ValidateUtils { + + private static final String RANGE_UNBOUNDED = "UNBOUNDED"; + + public static Object strToNum(Object value) { + // Convert a string representation of a number into a numeric type + // tODO(TBD) we should not allow numeric values in, input should be str + if(value instanceof Number) { + return value; + } + if(!(value instanceof String)) { + + } + try { + return Integer.parseInt((String)value); + } + catch(NumberFormatException e) { + } + try { + return Float.parseFloat((String)value); + } + catch(Exception e) { + } + return null; + } + + public static Object validateNumeric(Object value) { + if(!(value instanceof Number)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not a numeric",value.toString())); + } + return value; + } + + public static Object validateInteger(Object value) { + if(!(value instanceof Integer)) { + // allow "true" and "false" + if(value instanceof Boolean) { + return (Boolean)value ? 1 : 0; + } + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not an integer",value.toString())); + } + return value; + } + + public static Object validateFloat(Object value) { + if(!(value instanceof Float || value instanceof Double)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not a float",value.toString())); + } + return value; + } + + public static Object validateString(Object value) { + if(!(value instanceof String)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \'%s\' is not a string",value.toString())); + } + return value; + } + + public static Object validateList(Object value) { + if(!(value instanceof ArrayList)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not a list",value.toString())); + } + return value; + } + + + @SuppressWarnings("unchecked") + public static Object validateRange(Object range) { + // list class check + validateList(range); + // validate range list has a min and max + if(range instanceof ArrayList && ((ArrayList)range).size() != 2) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not a valid range",range.toString())); + // too dangerous to continue... + return range; + } + // validate min and max are numerics or the keyword UNBOUNDED + boolean minTest = false; + boolean maxTest = false; + Object r0 = ((ArrayList)range).get(0); + Object r1 = ((ArrayList)range).get(1); + + if(!(r0 instanceof Integer) && !(r0 instanceof Float) || + !(r1 instanceof Integer) && !(r1 instanceof Float)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not a valid range",range.toString())); + // too dangerous to continue... + return range; + } + + Float min = 0.0F; + Float max = 0.0F; + if(r0 instanceof String && ((String)r0).equals(RANGE_UNBOUNDED)) { + minTest = true; + } + else { + min = r0 instanceof Integer ? ((Integer)r0).floatValue() : (Float)r0; + } + if(r1 instanceof String && ((String)r1).equals(RANGE_UNBOUNDED)) { + maxTest = true; + } + else { + max = r1 instanceof Integer ? ((Integer)r1).floatValue() : (Float)r1; + } + + // validate the max > min (account for UNBOUNDED) + if(!minTest && !maxTest) { + // Note: min == max is allowed + if(min > max) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError:\"%s\" is not a valid range",range.toString())); + } + } + return range; + } + + @SuppressWarnings("unchecked") + public static Object validateValueInRange(Object value,Object range,String propName) { + // verify all 3 are numeric and convert to Floats + if(!(value instanceof Integer || value instanceof Float)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: validateInRange: \"%s\" is not a number",range.toString())); + return value; + } + Float fval = value instanceof Integer ? ((Integer)value).floatValue() : (Float)value; + + ////////////////////////// + //"validateRange(range);" + ////////////////////////// + // better safe than sorry... + // validate that range list has a min and max + if(range instanceof ArrayList && ((ArrayList)range).size() != 2) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not a valid range",range.toString())); + // too dangerous to continue... + return value; + } + // validate min and max are numerics or the keyword UNBOUNDED + boolean minTest = false; + boolean maxTest = false; + Object r0 = ((ArrayList)range).get(0); + Object r1 = ((ArrayList)range).get(1); + + if(!(r0 instanceof Integer) && !(r0 instanceof Float) || + !(r1 instanceof Integer) && !(r1 instanceof Float)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not a valid range",range.toString())); + // too dangerous to continue... + return value; + } + + Float min = 0.0F; + Float max = 0.0F; + if(r0 instanceof String && ((String)r0).equals(RANGE_UNBOUNDED)) { + minTest = true; + } + else { + min = r0 instanceof Integer ? ((Integer)r0).floatValue() : (Float)r0; + } + if(r1 instanceof String && ((String)r1).equals(RANGE_UNBOUNDED)) { + maxTest = true; + } + else { + max = r1 instanceof Integer ? ((Integer)r1).floatValue() : (Float)r1; + } + + // validate the max > min (account for UNBOUNDED) + if(!minTest && !maxTest) { + // Note: min == max is allowed + if(min > max) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError:\"%s\" is not a valid range",range.toString())); + } + } + // finally... + boolean bError = false; + //Note: value is valid if equal to min + if(!minTest) { + if(fval < min) { + bError = true; + } + } + // Note: value is valid if equal to max + if(!maxTest) { + if(fval > max) { + bError = true; + } + } + if(bError) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "RangeValueError: Property \"%s\", \"%s\" not in range [\"%s\" - \"%s\"", + propName,value.toString(),r0.toString(),r1.toString())); + } + return value; + } + + public static Object validateMap(Object ob) { + if(!(ob instanceof LinkedHashMap)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError\"%s\" is not a map.",ob.toString())); + } + return ob; + } + + public static Object validateBoolean(Object value) { + if(value instanceof Boolean) { + return value; + } + if(value instanceof String) { + String normalized = ((String)value).toLowerCase(); + if(normalized.equals("true") || normalized.equals("false")) { + return normalized.equals("true"); + } + } + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not a boolean",value.toString())); + return value; + } + + public static Object validateTimestamp(Object value) { + /* + try: + # Note: we must return our own exception message + # as dateutil's parser returns different types / values on + # different systems. OSX, for example, returns a tuple + # containing a different error message than Linux + dateutil.parser.parse(value) + except Exception as e: + original_err_msg = str(e) + log.error(original_err_msg) + ExceptionCollector.appendException( + ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') % + {'val': value, 'msg': original_err_msg})) + */ + + // timestamps are loaded as Date objects by the YAML parser + if(!(value instanceof Date)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not a valid timestamp", + value.toString())); + + } + return value; + } + +} + +/*python + +from toscaparser.elements import constraints +from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import InvalidTOSCAVersionPropertyException +from toscaparser.common.exception import RangeValueError +from toscaparser.utils.gettextutils import _ + +log = logging.getLogger('tosca') + +RANGE_UNBOUNDED = 'UNBOUNDED' + + +def str_to_num(value): + '''Convert a string representation of a number into a numeric type.''' + # tODO(TBD) we should not allow numeric values in, input should be str + if isinstance(value, numbers.Number): + return value + try: + return int(value) + except ValueError: + return float(value) + + +def validate_numeric(value): + if not isinstance(value, numbers.Number): + ExceptionCollector.appendException( + ValueError(_('"%s" is not a numeric.') % value)) + return value + + +def validate_integer(value): + if not isinstance(value, int): + try: + value = int(value) + except Exception: + ExceptionCollector.appendException( + ValueError(_('"%s" is not an integer.') % value)) + return value + + +def validate_float(value): + if not isinstance(value, float): + ExceptionCollector.appendException( + ValueError(_('"%s" is not a float.') % value)) + return value + + +def validate_string(value): + if not isinstance(value, six.string_types): + ExceptionCollector.appendException( + ValueError(_('"%s" is not a string.') % value)) + return value + + +def validate_list(value): + if not isinstance(value, list): + ExceptionCollector.appendException( + ValueError(_('"%s" is not a list.') % value)) + return value + + +def validate_range(range): + # list class check + validate_list(range) + # validate range list has a min and max + if len(range) != 2: + ExceptionCollector.appendException( + ValueError(_('"%s" is not a valid range.') % range)) + # validate min and max are numerics or the keyword UNBOUNDED + min_test = max_test = False + if not range[0] == RANGE_UNBOUNDED: + min = validate_numeric(range[0]) + else: + min_test = True + if not range[1] == RANGE_UNBOUNDED: + max = validate_numeric(range[1]) + else: + max_test = True + # validate the max > min (account for UNBOUNDED) + if not min_test and not max_test: + # Note: min == max is allowed + if min > max: + ExceptionCollector.appendException( + ValueError(_('"%s" is not a valid range.') % range)) + + return range + + +def validate_value_in_range(value, range, prop_name): + validate_numeric(value) + validate_range(range) + + # Note: value is valid if equal to min + if range[0] != RANGE_UNBOUNDED: + if value < range[0]: + ExceptionCollector.appendException( + RangeValueError(pname=prop_name, + pvalue=value, + vmin=range[0], + vmax=range[1])) + # Note: value is valid if equal to max + if range[1] != RANGE_UNBOUNDED: + if value > range[1]: + ExceptionCollector.appendException( + RangeValueError(pname=prop_name, + pvalue=value, + vmin=range[0], + vmax=range[1])) + return value + + +def validate_map(value): + if not isinstance(value, collections.Mapping): + ExceptionCollector.appendException( + ValueError(_('"%s" is not a map.') % value)) + return value + + +def validate_boolean(value): + if isinstance(value, bool): + return value + + if isinstance(value, str): + normalised = value.lower() + if normalised in ['true', 'false']: + return normalised == 'true' + + ExceptionCollector.appendException( + ValueError(_('"%s" is not a boolean.') % value)) + + +def validate_timestamp(value): + try: + # Note: we must return our own exception message + # as dateutil's parser returns different types / values on + # different systems. OSX, for example, returns a tuple + # containing a different error message than Linux + dateutil.parser.parse(value) + except Exception as e: + original_err_msg = str(e) + log.error(original_err_msg) + ExceptionCollector.appendException( + ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') % + {'val': value, 'msg': original_err_msg})) + return + +*/ \ No newline at end of file diff --git a/src/main/resources/TOSCA_definition_1_0.yaml b/src/main/resources/TOSCA_definition_1_0.yaml new file mode 100644 index 0000000..554b7b6 --- /dev/null +++ b/src/main/resources/TOSCA_definition_1_0.yaml @@ -0,0 +1,967 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +########################################################################## +# The content of this file reflects TOSCA Simple Profile in YAML version +# 1.0.0. It describes the definition for TOSCA types including Node Type, +# Relationship Type, Capability Type and Interfaces. +########################################################################## +tosca_definitions_version: tosca_simple_yaml_1_0 + +########################################################################## +# Node Type. +# A Node Type is a reusable entity that defines the type of one or more +# Node Templates. +########################################################################## +node_types: + tosca.nodes.Root: + description: > + The TOSCA root node all other TOSCA base node types derive from. + attributes: + tosca_id: + type: string + tosca_name: + type: string + state: + type: string + capabilities: + feature: + type: tosca.capabilities.Node + requirements: + - dependency: + capability: tosca.capabilities.Node + node: tosca.nodes.Root + relationship: tosca.relationships.DependsOn + occurrences: [ 0, UNBOUNDED ] + interfaces: + Standard: + type: tosca.interfaces.node.lifecycle.Standard + + tosca.nodes.Compute: + derived_from: tosca.nodes.Root + attributes: + private_address: + type: string + public_address: + type: string + networks: + type: map + entry_schema: + type: tosca.datatypes.network.NetworkInfo + ports: + type: map + entry_schema: + type: tosca.datatypes.network.PortInfo + capabilities: + host: + type: tosca.capabilities.Container + binding: + type: tosca.capabilities.network.Bindable + os: + type: tosca.capabilities.OperatingSystem + scalable: + type: tosca.capabilities.Scalable + endpoint: + type: tosca.capabilities.Endpoint.Admin + requirements: + - local_storage: + capability: tosca.capabilities.Attachment + node: tosca.nodes.BlockStorage + relationship: tosca.relationships.AttachesTo + occurrences: [0, UNBOUNDED] + + tosca.nodes.SoftwareComponent: + derived_from: tosca.nodes.Root + properties: + # domain-specific software component version + component_version: + type: version + required: false + description: > + Software component version. + admin_credential: + type: tosca.datatypes.Credential + required: false + requirements: + - host: + capability: tosca.capabilities.Container + node: tosca.nodes.Compute + relationship: tosca.relationships.HostedOn + + tosca.nodes.DBMS: + derived_from: tosca.nodes.SoftwareComponent + properties: + port: + required: false + type: integer + description: > + The port the DBMS service will listen to for data and requests. + root_password: + required: false + type: string + description: > + The root password for the DBMS service. + capabilities: + host: + type: tosca.capabilities.Container + valid_source_types: [tosca.nodes.Database] + + tosca.nodes.Database: + derived_from: tosca.nodes.Root + properties: + user: + required: false + type: string + description: > + User account name for DB administration + port: + required: false + type: integer + description: > + The port the database service will use to listen for incoming data and + requests. + name: + required: false + type: string + description: > + The name of the database. + password: + required: false + type: string + description: > + The password for the DB user account + requirements: + - host: + capability: tosca.capabilities.Container + node: tosca.nodes.DBMS + relationship: tosca.relationships.HostedOn + capabilities: + database_endpoint: + type: tosca.capabilities.Endpoint.Database + + tosca.nodes.WebServer: + derived_from: tosca.nodes.SoftwareComponent + capabilities: + data_endpoint: + type: tosca.capabilities.Endpoint + admin_endpoint: + type: tosca.capabilities.Endpoint.Admin + host: + type: tosca.capabilities.Container + valid_source_types: [tosca.nodes.WebApplication] + + tosca.nodes.WebApplication: + derived_from: tosca.nodes.Root + properties: + context_root: + type: string + required: false + requirements: + - host: + capability: tosca.capabilities.Container + node: tosca.nodes.WebServer + relationship: tosca.relationships.HostedOn + capabilities: + app_endpoint: + type: tosca.capabilities.Endpoint + + tosca.nodes.BlockStorage: + derived_from: tosca.nodes.Root + properties: + size: + type: scalar-unit.size + constraints: + - greater_or_equal: 1 MB + volume_id: + type: string + required: false + snapshot_id: + type: string + required: false + attributes: + volume_id: + type: string + capabilities: + attachment: + type: tosca.capabilities.Attachment + + tosca.nodes.network.Network: + derived_from: tosca.nodes.Root + description: > + The TOSCA Network node represents a simple, logical network service. + properties: + ip_version: + type: integer + required: false + default: 4 + constraints: + - valid_values: [ 4, 6 ] + description: > + The IP version of the requested network. Valid values are 4 for ipv4 + or 6 for ipv6. + cidr: + type: string + required: false + description: > + The cidr block of the requested network. + start_ip: + type: string + required: false + description: > + The IP address to be used as the start of a pool of addresses within + the full IP range derived from the cidr block. + end_ip: + type: string + required: false + description: > + The IP address to be used as the end of a pool of addresses within + the full IP range derived from the cidr block. + gateway_ip: + type: string + required: false + description: > + The gateway IP address. + network_name: + type: string + required: false + description: > + An identifier that represents an existing Network instance in the + underlying cloud infrastructure or can be used as the name of the + newly created network. If network_name is provided and no other + properties are provided (with exception of network_id), then an + existing network instance will be used. If network_name is provided + alongside with more properties then a new network with this name will + be created. + network_id: + type: string + required: false + description: > + An identifier that represents an existing Network instance in the + underlying cloud infrastructure. This property is mutually exclusive + with all other properties except network_name. This can be used alone + or together with network_name to identify an existing network. + segmentation_id: + type: string + required: false + description: > + A segmentation identifier in the underlying cloud infrastructure. + E.g. VLAN ID, GRE tunnel ID, etc.. + network_type: + type: string + required: false + description: > + It specifies the nature of the physical network in the underlying + cloud infrastructure. Examples are flat, vlan, gre or vxlan. + For flat and vlan types, physical_network should be provided too. + physical_network: + type: string + required: false + description: > + It identifies the physical network on top of which the network is + implemented, e.g. physnet1. This property is required if network_type + is flat or vlan. + dhcp_enabled: + type: boolean + required: false + default: true + description: > + Indicates should DHCP service be enabled on the network or not. + capabilities: + link: + type: tosca.capabilities.network.Linkable + + tosca.nodes.network.Port: + derived_from: tosca.nodes.Root + description: > + The TOSCA Port node represents a logical entity that associates between + Compute and Network normative types. The Port node type effectively + represents a single virtual NIC on the Compute node instance. + properties: + ip_address: + type: string + required: false + description: > + Allow the user to set a static IP. + order: + type: integer + required: false + default: 0 + constraints: + - greater_or_equal: 0 + description: > + The order of the NIC on the compute instance (e.g. eth2). + is_default: + type: boolean + required: false + default: false + description: > + If is_default=true this port will be used for the default gateway + route. Only one port that is associated to single compute node can + set as is_default=true. + ip_range_start: + type: string + required: false + description: > + Defines the starting IP of a range to be allocated for the compute + instances that are associated with this Port. + ip_range_end: + type: string + required: false + description: > + Defines the ending IP of a range to be allocated for the compute + instances that are associated with this Port. + attributes: + ip_address: + type: string + requirements: + - binding: + description: > + Binding requirement expresses the relationship between Port and + Compute nodes. Effectively it indicates that the Port will be + attached to specific Compute node instance + capability: tosca.capabilities.network.Bindable + relationship: tosca.relationships.network.BindsTo + node: tosca.nodes.Compute + - link: + description: > + Link requirement expresses the relationship between Port and Network + nodes. It indicates which network this port will connect to. + capability: tosca.capabilities.network.Linkable + relationship: tosca.relationships.network.LinksTo + node: tosca.nodes.network.Network + + tosca.nodes.network.FloatingIP: + derived_from: tosca.nodes.Root + description: > + The TOSCA FloatingIP node represents a floating IP that can associate to a Port. + properties: + floating_network: + type: string + required: true + floating_ip_address: + type: string + required: false + port_id: + type: string + required: false + requirements: + - link: + capability: tosca.capabilities.network.Linkable + relationship: tosca.relationships.network.LinksTo + node: tosca.nodes.network.Port + + tosca.nodes.ObjectStorage: + derived_from: tosca.nodes.Root + description: > + The TOSCA ObjectStorage node represents storage that provides the ability + to store data as objects (or BLOBs of data) without consideration for the + underlying filesystem or devices + properties: + name: + type: string + required: true + description: > + The logical name of the object store (or container). + size: + type: scalar-unit.size + required: false + constraints: + - greater_or_equal: 0 GB + description: > + The requested initial storage size. + maxsize: + type: scalar-unit.size + required: false + constraints: + - greater_or_equal: 0 GB + description: > + The requested maximum storage size. + capabilities: + storage_endpoint: + type: tosca.capabilities.Endpoint + + tosca.nodes.LoadBalancer: + derived_from: tosca.nodes.Root + properties: + algorithm: + type: string + required: false + status: experimental + capabilities: + client: + type: tosca.capabilities.Endpoint.Public + occurrences: [0, UNBOUNDED] + description: the Floating (IP) client’s on the public network can connect to + requirements: + - application: + capability: tosca.capabilities.Endpoint + relationship: tosca.relationships.RoutesTo + occurrences: [0, UNBOUNDED] + description: Connection to one or more load balanced applications + + tosca.nodes.Container.Application: + derived_from: tosca.nodes.Root + requirements: + - host: + capability: tosca.capabilities.Container + node: tosca.nodes.Container.Runtime + relationship: tosca.relationships.HostedOn + + tosca.nodes.Container.Runtime: + derived_from: tosca.nodes.SoftwareComponent + capabilities: + host: + type: tosca.capabilities.Container + scalable: + type: tosca.capabilities.Scalable + + tosca.nodes.Container.Application.Docker: + derived_from: tosca.nodes.Container.Application + requirements: + - host: + capability: tosca.capabilities.Container.Docker + +########################################################################## +# Relationship Type. +# A Relationship Type is a reusable entity that defines the type of one +# or more relationships between Node Types or Node Templates. +########################################################################## +relationship_types: + tosca.relationships.Root: + description: > + The TOSCA root Relationship Type all other TOSCA base Relationship Types + derive from. + attributes: + tosca_id: + type: string + tosca_name: + type: string + interfaces: + Configure: + type: tosca.interfaces.relationship.Configure + + tosca.relationships.DependsOn: + derived_from: tosca.relationships.Root + + tosca.relationships.HostedOn: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.Container ] + + tosca.relationships.ConnectsTo: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.Endpoint ] + credential: + type: tosca.datatypes.Credential + required: false + + tosca.relationships.AttachesTo: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.Attachment ] + properties: + location: + required: true + type: string + constraints: + - min_length: 1 + device: + required: false + type: string + + tosca.relationships.RoutesTo: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.Endpoint ] + + tosca.relationships.network.LinksTo: + derived_from: tosca.relationships.DependsOn + valid_target_types: [ tosca.capabilities.network.Linkable ] + + tosca.relationships.network.BindsTo: + derived_from: tosca.relationships.DependsOn + valid_target_types: [ tosca.capabilities.network.Bindable ] + +########################################################################## +# Capability Type. +# A Capability Type is a reusable entity that describes a kind of +# capability that a Node Type can declare to expose. +########################################################################## +capability_types: + tosca.capabilities.Root: + description: > + The TOSCA root Capability Type all other TOSCA base Capability Types + derive from. + + tosca.capabilities.Node: + derived_from: tosca.capabilities.Root + + tosca.capabilities.Container: + derived_from: tosca.capabilities.Root + properties: + num_cpus: + required: false + type: integer + constraints: + - greater_or_equal: 1 + cpu_frequency: + required: false + type: scalar-unit.frequency + constraints: + - greater_or_equal: 0.1 GHz + disk_size: + required: false + type: scalar-unit.size + constraints: + - greater_or_equal: 0 MB + mem_size: + required: false + type: scalar-unit.size + constraints: + - greater_or_equal: 0 MB + + tosca.capabilities.Endpoint: + derived_from: tosca.capabilities.Root + properties: + protocol: + type: string + required: true + default: tcp + port: + type: tosca.datatypes.network.PortDef + required: false + secure: + type: boolean + required: false + default: false + url_path: + type: string + required: false + port_name: + type: string + required: false + network_name: + type: string + required: false + default: PRIVATE + initiator: + type: string + required: false + default: source + constraints: + - valid_values: [source, target, peer] + ports: + type: map + required: false + constraints: + - min_length: 1 + entry_schema: + type: tosca.datatypes.network.PortSpec + attributes: + ip_address: + type: string + + tosca.capabilities.Endpoint.Admin: + derived_from: tosca.capabilities.Endpoint + properties: + secure: + type: boolean + default: true + constraints: + - equal: true + + tosca.capabilities.Endpoint.Public: + derived_from: tosca.capabilities.Endpoint + properties: + # Change the default network_name to use the first public network found + network_name: + type: string + default: PUBLIC + constraints: + - equal: PUBLIC + floating: + description: > + Indicates that the public address should be allocated from a pool of + floating IPs that are associated with the network. + type: boolean + default: false + status: experimental + dns_name: + description: The optional name to register with DNS + type: string + required: false + status: experimental + + tosca.capabilities.Scalable: + derived_from: tosca.capabilities.Root + properties: + min_instances: + type: integer + required: true + default: 1 + description: > + This property is used to indicate the minimum number of instances + that should be created for the associated TOSCA Node Template by + a TOSCA orchestrator. + max_instances: + type: integer + required: true + default: 1 + description: > + This property is used to indicate the maximum number of instances + that should be created for the associated TOSCA Node Template by + a TOSCA orchestrator. + default_instances: + type: integer + required: false + description: > + An optional property that indicates the requested default number + of instances that should be the starting number of instances a + TOSCA orchestrator should attempt to allocate. + The value for this property MUST be in the range between the values + set for min_instances and max_instances properties. + + tosca.capabilities.Endpoint.Database: + derived_from: tosca.capabilities.Endpoint + + tosca.capabilities.Attachment: + derived_from: tosca.capabilities.Root + + tosca.capabilities.network.Linkable: + derived_from: tosca.capabilities.Root + description: > + A node type that includes the Linkable capability indicates that it can + be pointed by tosca.relationships.network.LinksTo relationship type, which + represents an association relationship between Port and Network node types. + + tosca.capabilities.network.Bindable: + derived_from: tosca.capabilities.Root + description: > + A node type that includes the Bindable capability indicates that it can + be pointed by tosca.relationships.network.BindsTo relationship type, which + represents a network association relationship between Port and Compute node + types. + + tosca.capabilities.OperatingSystem: + derived_from: tosca.capabilities.Root + properties: + architecture: + required: false + type: string + description: > + The host Operating System (OS) architecture. + type: + required: false + type: string + description: > + The host Operating System (OS) type. + distribution: + required: false + type: string + description: > + The host Operating System (OS) distribution. Examples of valid values + for an “type” of “Linux” would include: + debian, fedora, rhel and ubuntu. + version: + required: false + type: version + description: > + The host Operating System version. + + tosca.capabilities.Container.Docker: + derived_from: tosca.capabilities.Container + properties: + version: + type: list + required: false + entry_schema: + type: version + description: > + The Docker version capability. + publish_all: + type: boolean + default: false + required: false + description: > + Indicates that all ports (ranges) listed in the dockerfile + using the EXPOSE keyword be published. + publish_ports: + type: list + entry_schema: + type: tosca.datatypes.network.PortSpec + required: false + description: > + List of ports mappings from source (Docker container) + to target (host) ports to publish. + expose_ports: + type: list + entry_schema: + type: tosca.datatypes.network.PortSpec + required: false + description: > + List of ports mappings from source (Docker container) to expose + to other Docker containers (not accessible outside host). + volumes: + type: list + entry_schema: + type: string + required: false + description: > + The dockerfile VOLUME command which is used to enable access + from the Docker container to a directory on the host machine. + host_id: + type: string + required: false + description: > + The optional identifier of an existing host resource + that should be used to run this container on. + volume_id: + type: string + required: false + description: > + The optional identifier of an existing storage volume (resource) + that should be used to create the container's mount point(s) on. + +########################################################################## + # Interfaces Type. + # The Interfaces element describes a list of one or more interface + # definitions for a modelable entity (e.g., a Node or Relationship Type) + # as defined within the TOSCA Simple Profile specification. +########################################################################## +interface_types: + tosca.interfaces.node.lifecycle.Standard: + create: + description: Standard lifecycle create operation. + configure: + description: Standard lifecycle configure operation. + start: + description: Standard lifecycle start operation. + stop: + description: Standard lifecycle stop operation. + delete: + description: Standard lifecycle delete operation. + + tosca.interfaces.relationship.Configure: + pre_configure_source: + description: Operation to pre-configure the source endpoint. + pre_configure_target: + description: Operation to pre-configure the target endpoint. + post_configure_source: + description: Operation to post-configure the source endpoint. + post_configure_target: + description: Operation to post-configure the target endpoint. + add_target: + description: Operation to add a target node. + remove_target: + description: Operation to remove a target node. + add_source: > + description: Operation to notify the target node of a source node which + is now available via a relationship. + description: + target_changed: > + description: Operation to notify source some property or attribute of the + target changed + +########################################################################## + # Data Type. + # A Datatype is a complex data type declaration which contains other + # complex or simple data types. +########################################################################## +data_types: + tosca.datatypes.Root: + description: > + The TOSCA root Data Type all other TOSCA base Data Types derive from + + tosca.datatypes.network.NetworkInfo: + derived_from: tosca.datatypes.Root + properties: + network_name: + type: string + network_id: + type: string + addresses: + type: list + entry_schema: + type: string + + tosca.datatypes.network.PortInfo: + derived_from: tosca.datatypes.Root + properties: + port_name: + type: string + port_id: + type: string + network_id: + type: string + mac_address: + type: string + addresses: + type: list + entry_schema: + type: string + + tosca.datatypes.network.PortDef: + derived_from: tosca.datatypes.Root + type: integer + constraints: + - in_range: [ 1, 65535 ] + + tosca.datatypes.network.PortSpec: + derived_from: tosca.datatypes.Root + properties: + protocol: + type: string + required: true + default: tcp + constraints: + - valid_values: [ udp, tcp, igmp ] + target: + type: tosca.datatypes.network.PortDef + required: false + target_range: + type: range + required: false + constraints: + - in_range: [ 1, 65535 ] + source: + type: tosca.datatypes.network.PortDef + required: false + source_range: + type: range + required: false + constraints: + - in_range: [ 1, 65535 ] + + tosca.datatypes.Credential: + derived_from: tosca.datatypes.Root + properties: + protocol: + type: string + required: false + token_type: + type: string + default: password + required: true + token: + type: string + required: true + keys: + type: map + entry_schema: + type: string + required: false + user: + type: string + required: false + +########################################################################## + # Artifact Type. + # An Artifact Type is a reusable entity that defines the type of one or more + # files which Node Types or Node Templates can have dependent relationships + # and used during operations such as during installation or deployment. +########################################################################## +artifact_types: + tosca.artifacts.Root: + description: > + The TOSCA Artifact Type all other TOSCA Artifact Types derive from + properties: + version: version + + tosca.artifacts.File: + derived_from: tosca.artifacts.Root + + tosca.artifacts.Deployment: + derived_from: tosca.artifacts.Root + description: TOSCA base type for deployment artifacts + + tosca.artifacts.Deployment.Image: + derived_from: tosca.artifacts.Deployment + + tosca.artifacts.Deployment.Image.VM: + derived_from: tosca.artifacts.Deployment.Image + + tosca.artifacts.Implementation: + derived_from: tosca.artifacts.Root + description: TOSCA base type for implementation artifacts + + tosca.artifacts.Implementation.Bash: + derived_from: tosca.artifacts.Implementation + description: Script artifact for the Unix Bash shell + mime_type: application/x-sh + file_ext: [ sh ] + + tosca.artifacts.Implementation.Python: + derived_from: tosca.artifacts.Implementation + description: Artifact for the interpreted Python language + mime_type: application/x-python + file_ext: [ py ] + + tosca.artifacts.Deployment.Image.Container.Docker: + derived_from: tosca.artifacts.Deployment.Image + description: Docker container image + + tosca.artifacts.Deployment.Image.VM.ISO: + derived_from: tosca.artifacts.Deployment.Image + description: Virtual Machine (VM) image in ISO disk format + mime_type: application/octet-stream + file_ext: [ iso ] + + tosca.artifacts.Deployment.Image.VM.QCOW2: + derived_from: tosca.artifacts.Deployment.Image + description: Virtual Machine (VM) image in QCOW v2 standard disk format + mime_type: application/octet-stream + file_ext: [ qcow2 ] + +########################################################################## + # Policy Type. + # TOSCA Policy Types represent logical grouping of TOSCA nodes that have + # an implied relationship and need to be orchestrated or managed together + # to achieve some result. +########################################################################## +policy_types: + tosca.policies.Root: + description: The TOSCA Policy Type all other TOSCA Policy Types derive from. + + tosca.policies.Placement: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern + placement of TOSCA nodes or groups of nodes. + + tosca.policies.Scaling: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern + scaling of TOSCA nodes or groups of nodes. + + tosca.policies.Monitoring: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern + monitoring of TOSCA nodes or groups of nodes. + + tosca.policies.Update: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern + update of TOSCA nodes or groups of nodes. + + tosca.policies.Performance: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to declare + performance requirements for TOSCA nodes or groups of nodes. + +########################################################################## + # Group Type. + # Group Type represents logical grouping of TOSCA nodes that have an + # implied membership relationship and may need to be orchestrated or + # managed together to achieve some result. +########################################################################## +group_types: + tosca.groups.Root: + description: The TOSCA Group Type all other TOSCA Group Types derive from + interfaces: + Standard: + type: tosca.interfaces.node.lifecycle.Standard diff --git a/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml b/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml new file mode 100644 index 0000000..365d70e --- /dev/null +++ b/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml @@ -0,0 +1,240 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +########################################################################## +# The content of this file reflects TOSCA NFV Profile in YAML version +# 1.0.0. It describes the definition for TOSCA NFV types including Node Type, +# Relationship Type, Capability Type and Interfaces. +########################################################################## +tosca_definitions_version: tosca_simple_profile_for_nfv_1_0_0 + +########################################################################## +# Node Type. +# A Node Type is a reusable entity that defines the type of one or more +# Node Templates. +########################################################################## +node_types: + tosca.nodes.nfv.VNF: + derived_from: tosca.nodes.Root # Or should this be its own top - level type? + properties: + id: + type: string + description: ID of this VNF + vendor: + type: string + description: name of the vendor who generate this VNF + version: + type: version + description: version of the software for this VNF + requirements: + - virtualLink: + capability: tosca.capabilities.nfv.VirtualLinkable + relationship: tosca.relationships.nfv.VirtualLinksTo + node: tosca.nodes.nfv.VL + + tosca.nodes.nfv.VDU: + derived_from: tosca.nodes.Compute + capabilities: + high_availability: + type: tosca.capabilities.nfv.HA + virtualbinding: + type: tosca.capabilities.nfv.VirtualBindable + monitoring_parameter: + type: tosca.capabilities.nfv.Metric + requirements: + - high_availability: + capability: tosca.capabilities.nfv.HA + relationship: tosca.relationships.nfv.HA + node: tosca.nodes.nfv.VDU + occurrences: [ 0, 1 ] + + tosca.nodes.nfv.CP: + derived_from: tosca.nodes.network.Port + properties: + type: + type: string + required: false + requirements: + - virtualLink: + capability: tosca.capabilities.nfv.VirtualLinkable + relationship: tosca.relationships.nfv.VirtualLinksTo + node: tosca.nodes.nfv.VL + - virtualBinding: + capability: tosca.capabilities.nfv.VirtualBindable + relationship: tosca.relationships.nfv.VirtualBindsTo + node: tosca.nodes.nfv.VDU + attributes: + address: + type: string + + tosca.nodes.nfv.VL: + derived_from: tosca.nodes.network.Network + properties: + vendor: + type: string + required: true + description: name of the vendor who generate this VL + capabilities: + virtual_linkable: + type: tosca.capabilities.nfv.VirtualLinkable + + tosca.nodes.nfv.VL.ELine: + derived_from: tosca.nodes.nfv.VL + capabilities: + virtual_linkable: + occurrences: 2 + + tosca.nodes.nfv.VL.ELAN: + derived_from: tosca.nodes.nfv.VL + + tosca.nodes.nfv.VL.ETree: + derived_from: tosca.nodes.nfv.VL + + tosca.nodes.nfv.FP: + derived_from: tosca.nodes.Root + properties: + policy: + type: string + required: false + description: name of the vendor who generate this VL + requirements: + - forwarder: + capability: tosca.capabilities.nfv.Forwarder + relationship: tosca.relationships.nfv.ForwardsTo + +########################################################################## +# Relationship Type. +# A Relationship Type is a reusable entity that defines the type of one +# or more relationships between Node Types or Node Templates. +########################################################################## + +relationship_types: + tosca.relationships.nfv.VirtualLinksTo: + derived_from: tosca.relationships.network.LinksTo + valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ] + + tosca.relationships.nfv.VirtualBindsTo: + derived_from: tosca.relationships.network.BindsTo + valid_target_types: [ tosca.capabilities.nfv.VirtualBindable ] + + tosca.relationships.nfv.HA: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.nfv.HA ] + + tosca.relationships.nfv.Monitor: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.nfv.Metric ] + + tosca.relationships.nfv.ForwardsTo: + derived_from: tosca.relationships.root + valid_target_types: [ tosca.capabilities.nfv.Forwarder] + +########################################################################## +# Capability Type. +# A Capability Type is a reusable entity that describes a kind of +# capability that a Node Type can declare to expose. +########################################################################## + +capability_types: + tosca.capabilities.nfv.VirtualLinkable: + derived_from: tosca.capabilities.network.Linkable + + tosca.capabilities.nfv.VirtualBindable: + derived_from: tosca.capabilities.network.Bindable + + tosca.capabilities.nfv.HA: + derived_from: tosca.capabilities.Root + valid_source_types: [ tosca.nodes.nfv.VDU ] + + tosca.capabilities.nfv.HA.ActiveActive: + derived_from: tosca.capabilities.nfv.HA + + tosca.capabilities.nfv.HA.ActivePassive: + derived_from: tosca.capabilities.nfv.HA + + tosca.capabilities.nfv.Metric: + derived_from: tosca.capabilities.Root + + tosca.capabilities.nfv.Forwarder: + derived_from: tosca.capabilities.Root + +########################################################################## + # Interfaces Type. + # The Interfaces element describes a list of one or more interface + # definitions for a modelable entity (e.g., a Node or Relationship Type) + # as defined within the TOSCA Simple Profile specification. +########################################################################## + +########################################################################## + # Data Type. + # A Datatype is a complex data type declaration which contains other + # complex or simple data types. +########################################################################## + +########################################################################## + # Artifact Type. + # An Artifact Type is a reusable entity that defines the type of one or more + # files which Node Types or Node Templates can have dependent relationships + # and used during operations such as during installation or deployment. +########################################################################## + +########################################################################## + # Policy Type. + # TOSCA Policy Types represent logical grouping of TOSCA nodes that have + # an implied relationship and need to be orchestrated or managed together + # to achieve some result. +########################################################################## + +########################################################################## + # Group Type + # +########################################################################## +group_types: + tosca.groups.nfv.VNFFG: + derived_from: tosca.groups.Root + + properties: + vendor: + type: string + required: true + description: name of the vendor who generate this VNFFG + + version: + type: string + required: true + description: version of this VNFFG + + number_of_endpoints: + type: integer + required: true + description: count of the external endpoints included in this VNFFG + + dependent_virtual_link: + type: list + entry_schema: + type: string + required: true + description: Reference to a VLD used in this Forwarding Graph + + connection_point: + type: list + entry_schema: + type: string + required: true + description: Reference to Connection Points forming the VNFFG + + constituent_vnfs: + type: list + entry_schema: + type: string + required: true + description: Reference to a list of VNFD used in this VNF Forwarding Graph diff --git a/src/main/resources/extensions/nfv/nfv.py b/src/main/resources/extensions/nfv/nfv.py new file mode 100644 index 0000000..0c7c2b9 --- /dev/null +++ b/src/main/resources/extensions/nfv/nfv.py @@ -0,0 +1,19 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# VERSION and DEFS_FILE are required for all extensions + +VERSION = 'tosca_simple_profile_for_nfv_1_0_0' + +DEFS_FILE = "TOSCA_nfv_definition_1_0.yaml" + +SECTIONS = ('metadata') diff --git a/src/test/java/org.openecomp.sdc.toscaparser/JToscaMetadataParse.java b/src/test/java/org.openecomp.sdc.toscaparser/JToscaMetadataParse.java new file mode 100644 index 0000000..584a0fd --- /dev/null +++ b/src/test/java/org.openecomp.sdc.toscaparser/JToscaMetadataParse.java @@ -0,0 +1,26 @@ +package org.openecomp.sdc.toscaparser; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import java.io.File; +import java.util.LinkedHashMap; + +import org.junit.Test; +import org.openecomp.sdc.toscaparser.api.ToscaTemplate; +import org.openecomp.sdc.toscaparser.api.common.JToscaException; + +public class JToscaMetadataParse { + + @Test + public void testMetadataParsedCorrectly() throws JToscaException { + String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/csar_hello_world.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + LinkedHashMap metadataProperties = toscaTemplate.getMetaProperties("TOSCA.meta"); + assertNotNull(metadataProperties); + Object entryDefinition = metadataProperties.get("Entry-Definitions"); + assertNotNull(entryDefinition); + assertEquals("tosca_helloworld.yaml", entryDefinition); + } +} diff --git a/src/test/resources/csars/csar_hello_world.csar b/src/test/resources/csars/csar_hello_world.csar new file mode 100644 index 0000000..43ffbbc Binary files /dev/null and b/src/test/resources/csars/csar_hello_world.csar differ diff --git a/src/test/resources/csars/service-ServiceFdnt-csar.csar b/src/test/resources/csars/service-ServiceFdnt-csar.csar new file mode 100644 index 0000000..983dc9b Binary files /dev/null and b/src/test/resources/csars/service-ServiceFdnt-csar.csar differ diff --git a/version.properties b/version.properties new file mode 100644 index 0000000..0424f62 --- /dev/null +++ b/version.properties @@ -0,0 +1,13 @@ +########################################################### +# Versioning variables +# Note that these variables cannot be structured (e.g. : version.release or version.snapshot etc... ) +# because they are used in Jenkins, whose plug-in doesn't support + +major=1 +minor=1 +patch=0 + +base_version=${major}.${minor}.${patch} + +release_version=${base_version} +snapshot_version=${base_version}-SNAPSHOT -- cgit 1.2.3-korg From 06dbe56b572649352a7f34fe62f773ad281b5dfa Mon Sep 17 00:00:00 2001 From: Pavel Aharoni Date: Thu, 8 Jun 2017 11:54:31 +0300 Subject: [SDC-28] added Metadata missing method Change-Id: Ifa9504e8b3d15465877fd8e5c9251870520831d8 Signed-off-by: Pavel Aharoni --- .../java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java index 4f7bdd0..6cf84a5 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java @@ -1,5 +1,6 @@ package org.openecomp.sdc.toscaparser.api.elements; +import java.util.HashMap; import java.util.Map; public class Metadata { @@ -14,6 +15,13 @@ public class Metadata { return !isEmpty() ? String.valueOf(this.metadataMap.get(key)) : null; } + public Map getPropertyMap() { + if(metadataMap == null){ + return null; + } + return new HashMap<>(metadataMap); + } + public void setValue(String key, Object value) { if (!isEmpty()) { this.metadataMap.put(key, value); -- cgit 1.2.3-korg From 38de9e0fc3dd7557b17f33d332663ff10bbc0200 Mon Sep 17 00:00:00 2001 From: Pavel Aharoni Date: Mon, 26 Jun 2017 17:16:37 +0300 Subject: [SDC-39] filter nt props values by operator Change-Id: I5ac27ba682923960d5dfa9f5f770afdb7fac6ae1 Signed-off-by: Pavel Aharoni --- README.md | 15 ++++++++++++++- pom.xml | 2 +- .../sdc/toscaparser/api/functions/Function.java | 15 ++++++++++----- .../sdc/toscaparser/api/functions/GetInput.java | 22 ++++++++++++++++------ 4 files changed, 41 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index 2c0f5e0..6913f03 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # OpenECOMP JTOSCA - + --- --- @@ -26,3 +26,16 @@ SDC Javadoc and Maven site *** to be completed on rrelease *** +# Release notes for versions + +1.1.0-SNAPSHOT + +Initial after separating into separate repo + +------------------------------- + +1.1.1-SNAPSHOT + +Added toString of Function (GetInput, etc.) + +Allowed two arguments for GetInput - name of list input and index in list diff --git a/pom.xml b/pom.xml index 61a504b..d092e57 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.openecomp.sdc.jtosca jtosca - 1.1.0-SNAPSHOT + 1.1.1-SNAPSHOT diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java index 102fbc0..5e5f31f 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java @@ -113,24 +113,29 @@ public abstract class Function { } else if(funcType.equals("GetAttribute")) { return new GetAttribute(ttpl,context,funcName,funcArgs); - } + } else if(funcType.equals("GetProperty")) { return new GetProperty(ttpl,context,funcName,funcArgs); - } + } else if(funcType.equals("GetOperationOutput")) { return new GetOperationOutput(ttpl,context,funcName,funcArgs); - } + } else if(funcType.equals("Concat")) { return new Concat(ttpl,context,funcName,funcArgs); - } + } else if(funcType.equals("Token")) { return new Token(ttpl,context,funcName,funcArgs); - } + } } } } return rawFunctionObj; } + + @Override + public String toString() { + return name + ":" + args.toString(); + } } /*python diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java index 4332f70..62f2b39 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java @@ -18,12 +18,17 @@ public class GetInput extends Function { @Override void validate() { - if(args.size() != 1) { - //PA - changed to WARNING from CRITICAL after talking to Renana, 22/05/2017 - ThreadLocalsHolder.getCollector().appendWarning(String.format( - "ValueError: Expected one argument for function \"get_input\" but received \"%s\"", - args.toString())); - } +// if(args.size() != 1) { +// //PA - changed to WARNING from CRITICAL after talking to Renana, 22/05/2017 +// ThreadLocalsHolder.getCollector().appendWarning(String.format( +// "ValueError: Expected one argument for function \"get_input\" but received \"%s\"", +// args.toString())); +// } + if(args.size() > 2) { + ThreadLocalsHolder.getCollector().appendWarning(String.format( + "ValueError: Expected max 2 arguments for function \"get_input\" but received \"%s\"", + args.size())); + } boolean bFound = false; for(Input inp: toscaTpl.getInputs()) { if(inp.getName().equals(args.get(0))) { @@ -56,6 +61,11 @@ public class GetInput extends Function { } } if(inputDef != null) { + if (args.size() == 2 && args.get(1) instanceof Integer) { + if (inputDef.getDefault() != null && inputDef.getDefault() instanceof ArrayList) { + return ((ArrayList) inputDef.getDefault()).get(((Integer)args.get(1)).intValue()); + } + } return inputDef.getDefault(); } return null; -- cgit 1.2.3-korg From 95eec749b89894f6f74661ec9f9eef46c6798341 Mon Sep 17 00:00:00 2001 From: Pavel Aharoni Date: Tue, 27 Jun 2017 14:15:04 +0300 Subject: [SDC-39] fix Function args toString Change-Id: I7b5d1445c701f7a18ebd5522fbf20b8cb5befc3b Signed-off-by: Pavel Aharoni --- .../java/org/openecomp/sdc/toscaparser/api/functions/Function.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java index 5e5f31f..9c39b30 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java @@ -134,7 +134,8 @@ public abstract class Function { @Override public String toString() { - return name + ":" + args.toString(); + String argsStr = args.size() > 1 ? args.toString() : args.get(0).toString(); + return name + ":" + argsStr; } } -- cgit 1.2.3-korg From 1f71a9c4f53d176c30ad01a220db1c7f45248625 Mon Sep 17 00:00:00 2001 From: Pavel Aharoni Date: Thu, 29 Jun 2017 13:57:23 +0300 Subject: [SDC-39] added def constr to Input Change-Id: If6031adac7585159732b8cb6dd124bae52527180 Signed-off-by: Pavel Aharoni --- .../java/org/openecomp/sdc/toscaparser/api/parameters/Input.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java b/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java index 7b3e64f..28e57d2 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java @@ -41,6 +41,12 @@ public class Input { private Schema schema; private LinkedHashMap customDefs; + public Input(){ + /** + * Added to support Input serialization + */ + } + public Input(String _name,LinkedHashMap _schemaDict,LinkedHashMap _customDefs) { name = _name; schema = new Schema(_name,_schemaDict); -- cgit 1.2.3-korg From dfd01d445981b858b2a215d98da27fd247c2bc40 Mon Sep 17 00:00:00 2001 From: ruty slominsky Date: Sun, 16 Jul 2017 14:07:42 +0300 Subject: [SDC-135] - no properties parent Change-Id: I529ce49a434f0e06cb94a6637736677765ba68b1 Signed-off-by: ruty slominsky --- .../sdc/toscaparser/api/elements/CapabilityTypeDef.java | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java index 03e2c45..2994fa8 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java @@ -49,13 +49,15 @@ public class CapabilityTypeDef extends StatefulEntityType { if(parentProperties != null) { for(Map.Entry me: parentProperties.entrySet()) { LinkedHashMap props = (LinkedHashMap)me.getValue(); - for(Map.Entry pe: props.entrySet()) { - String prop = pe.getKey(); - LinkedHashMap schema = (LinkedHashMap)pe.getValue(); - // add parent property if not overridden by children type - if(properties == null || properties.get(prop) == null) { - propsdefs.add(new PropertyDef(prop, null, schema)); - } + if (props != null) { + for(Map.Entry pe: props.entrySet()) { + String prop = pe.getKey(); + LinkedHashMap schema = (LinkedHashMap)pe.getValue(); + // add parent property if not overridden by children type + if(properties == null || properties.get(prop) == null) { + propsdefs.add(new PropertyDef(prop, null, schema)); + } + } } } } -- cgit 1.2.3-korg From 0810bec2516b1841bfb5500103329b1d578f443f Mon Sep 17 00:00:00 2001 From: Ester Rotstein Date: Wed, 19 Jul 2017 14:48:22 +0300 Subject: [SDC-142] nested node templates Change-Id: I602f84218825e59b0780d4786f1f1fe160cf5d44 Signed-off-by: Ester Rotstein --- .../sdc/toscaparser/api/NodeTemplate.java | 15 +-- .../sdc/toscaparser/api/ToscaTemplate.java | 103 +++++++++++---------- 2 files changed, 55 insertions(+), 63 deletions(-) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java index c8af559..11db32b 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java @@ -4,7 +4,6 @@ import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.elements.*; import org.openecomp.sdc.toscaparser.api.utils.CopyUtils; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; @@ -19,7 +18,6 @@ public class NodeTemplate extends EntityTemplate { private ArrayList relationshipTpl; private LinkedHashMap _relationships; private SubstitutionMappings subMappingToscaTemplate; - private SubstitutionMappings subMappingToscaTemplate2; private Metadata metadata; private static final String METADATA = "metadata"; @@ -42,7 +40,6 @@ public class NodeTemplate extends EntityTemplate { availableRelTypes = ntavailableRelTypes; _relationships = new LinkedHashMap(); subMappingToscaTemplate = null; - subMappingToscaTemplate2 = null; metadata = _metaData(); } @@ -440,7 +437,8 @@ public class NodeTemplate extends EntityTemplate { } // getter/setter - + + // multilevel nesting public SubstitutionMappings getSubMappingToscaTemplate() { return subMappingToscaTemplate; } @@ -449,15 +447,6 @@ public class NodeTemplate extends EntityTemplate { subMappingToscaTemplate = sm; } - // **experimental** (multilevel nesting) - public SubstitutionMappings getSubMappingToscaTemplate2() { - return subMappingToscaTemplate2; - } - - public void setSubMappingToscaTemplate2(SubstitutionMappings sm) { - subMappingToscaTemplate2 = sm; - } - public Metadata getMetaData() { return metadata; } diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java index b13a2a5..76b86f5 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java @@ -6,6 +6,7 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.*; +import java.util.concurrent.ConcurrentHashMap; import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.common.JToscaException; @@ -81,7 +82,7 @@ public class ToscaTemplate extends Object { private ArrayList nodeTemplates; private ArrayList outputs; private ArrayList policies; - private LinkedHashMap nestedToscaTplsWithTopology; + private ConcurrentHashMap nestedToscaTplsWithTopology; private ArrayList nestedToscaTemplatesWithTopology; private ToscaGraph graph; private String csarTempDir; @@ -113,7 +114,7 @@ public class ToscaTemplate extends Object { path = null; tpl = null; csarTempDir = null; - nestedToscaTplsWithTopology = new LinkedHashMap(); + nestedToscaTplsWithTopology = new ConcurrentHashMap<>(); nestedToscaTemplatesWithTopology = new ArrayList(); if(_path != null && !_path.isEmpty()) { @@ -179,7 +180,8 @@ public class ToscaTemplate extends Object { this.nodeTemplates = _nodeTemplates(); this.outputs = _outputs(); this.policies = _policies(); - _handleNestedToscaTemplatesWithTopology(); +// _handleNestedToscaTemplatesWithTopology(); + _handleNestedToscaTemplatesWithTopology(topologyTemplate); graph = new ToscaGraph(nodeTemplates); } } @@ -361,10 +363,10 @@ public class ToscaTemplate extends Object { } } - // **experimental** (multi level nesting) RECURSIVE - BEWARE OF INIFINITE LOOPS... - private void _handleNestedToscaTemplatesWithTopology2(TopologyTemplate tt) { + // multi level nesting - RECURSIVE + private void _handleNestedToscaTemplatesWithTopology(TopologyTemplate tt) { if(++nestingLoopCounter > 10) { - log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology2 - Nested Topologies Loop: too many levels, aborting"); + log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting"); return; } for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { @@ -372,13 +374,14 @@ public class ToscaTemplate extends Object { LinkedHashMap toscaTpl = (LinkedHashMap)me.getValue(); for(NodeTemplate nt: tt.getNodeTemplates()) { - if(_isSubMappedNode2(nt,toscaTpl)) { + if(_isSubMappedNode(nt,toscaTpl)) { parsedParams = _getParamsForNestedTemplate(nt); + ArrayList alim = (ArrayList)toscaTpl.get(IMPORTS); LinkedHashMap topologyTpl = (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE); TopologyTemplate topologyWithSubMapping = new TopologyTemplate(topologyTpl, - _getAllCustomDefs(null), + _getAllCustomDefs(alim), relationshipTypes, parsedParams, nt); @@ -386,44 +389,44 @@ public class ToscaTemplate extends Object { // Record nested topology templates in top level template //nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); // Set substitution mapping object for mapped node - nt.setSubMappingToscaTemplate2( - topologyWithSubMapping.getSubstitutionMappings()); - _handleNestedToscaTemplatesWithTopology2(topologyWithSubMapping); - } - } - } - } - } - - private void _handleNestedToscaTemplatesWithTopology() { - for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { - String fname = me.getKey(); - LinkedHashMap toscaTpl = - (LinkedHashMap)me.getValue(); - for(NodeTemplate nt: nodeTemplates) { - if(_isSubMappedNode(nt,toscaTpl)) { - parsedParams = _getParamsForNestedTemplate(nt); - ArrayList alim = (ArrayList)toscaTpl.get(IMPORTS); - LinkedHashMap topologyTpl = - (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE); - TopologyTemplate topologyWithSubMapping = - new TopologyTemplate(topologyTpl, - //_getAllCustomDefs(null), - _getAllCustomDefs(alim), - relationshipTypes, - parsedParams, - nt); - if(topologyWithSubMapping.getSubstitutionMappings() != null) { - // Record nested topology templates in top level template - nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); - // Set substitution mapping object for mapped node nt.setSubMappingToscaTemplate( topologyWithSubMapping.getSubstitutionMappings()); + _handleNestedToscaTemplatesWithTopology(topologyWithSubMapping); } } } } } + +// private void _handleNestedToscaTemplatesWithTopology() { +// for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { +// String fname = me.getKey(); +// LinkedHashMap toscaTpl = +// (LinkedHashMap)me.getValue(); +// for(NodeTemplate nt: nodeTemplates) { +// if(_isSubMappedNode(nt,toscaTpl)) { +// parsedParams = _getParamsForNestedTemplate(nt); +// ArrayList alim = (ArrayList)toscaTpl.get(IMPORTS); +// LinkedHashMap topologyTpl = +// (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE); +// TopologyTemplate topologyWithSubMapping = +// new TopologyTemplate(topologyTpl, +// //_getAllCustomDefs(null), +// _getAllCustomDefs(alim), +// relationshipTypes, +// parsedParams, +// nt); +// if(topologyWithSubMapping.getSubstitutionMappings() != null) { +// // Record nested topology templates in top level template +// nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); +// // Set substitution mapping object for mapped node +// nt.setSubMappingToscaTemplate( +// topologyWithSubMapping.getSubstitutionMappings()); +// } +// } +// } +// } +// } private void _validateField() { String sVersion = _tplVersion(); @@ -575,7 +578,17 @@ public class ToscaTemplate extends Object { return metaProperties.get(propertiesFile); } - private boolean _isSubMappedNode(NodeTemplate nt,LinkedHashMap toscaTpl) { +// private boolean _isSubMappedNode(NodeTemplate nt,LinkedHashMap toscaTpl) { +// // Return True if the nodetemple is substituted +// if(nt != null && nt.getSubMappingToscaTemplate() == null && +// getSubMappingNodeType(toscaTpl).equals(nt.getType()) && +// nt.getInterfaces().size() < 1) { +// return true; +// } +// return false; +// } + + private boolean _isSubMappedNode(NodeTemplate nt, LinkedHashMap toscaTpl) { // Return True if the nodetemple is substituted if(nt != null && nt.getSubMappingToscaTemplate() == null && getSubMappingNodeType(toscaTpl).equals(nt.getType()) && @@ -585,16 +598,6 @@ public class ToscaTemplate extends Object { return false; } - private boolean _isSubMappedNode2(NodeTemplate nt,LinkedHashMap toscaTpl) { - // Return True if the nodetemple is substituted - if(nt != null && nt.getSubMappingToscaTemplate2() == null && - getSubMappingNodeType(toscaTpl).equals(nt.getType()) && - nt.getInterfaces().size() < 1) { - return true; - } - return false; - } - private LinkedHashMap _getParamsForNestedTemplate(NodeTemplate nt) { // Return total params for nested_template LinkedHashMap pparams; -- cgit 1.2.3-korg From eaabcf9464592d1481d8c5d56b1b9802481debf9 Mon Sep 17 00:00:00 2001 From: Ester Rotstein Date: Sun, 23 Jul 2017 12:59:51 +0300 Subject: [SDC-146] support get_input in all list cases Change-Id: I6a4b867de15908e6d06e6c7393ed710f65fc244f Signed-off-by: Ester Rotstein --- .../sdc/toscaparser/api/functions/Function.java | 83 ++++++++++++---------- .../sdc/toscaparser/api/functions/GetInput.java | 10 ++- 2 files changed, 53 insertions(+), 40 deletions(-) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java index 9c39b30..0d16092 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java @@ -1,6 +1,7 @@ package org.openecomp.sdc.toscaparser.api.functions; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; @@ -91,47 +92,53 @@ public abstract class Function { // :param raw_function: The raw function as dict. // :return: Template function as Function instance or the raw_function if // parsing was unsuccessful. - - if(isFunction(rawFunctionObj)) { - if(rawFunctionObj instanceof LinkedHashMap) { - LinkedHashMap rawFunction = (LinkedHashMap)rawFunctionObj; - String funcName = (new ArrayList(rawFunction.keySet())).get(0); - if(functionMappings.keySet().contains(funcName)) { - String funcType = functionMappings.get(funcName); - Object oargs = (new ArrayList(rawFunction.values())).get(0); - ArrayList funcArgs; - if(oargs instanceof ArrayList) { - funcArgs = (ArrayList)oargs; - } - else { - funcArgs = new ArrayList<>(); - funcArgs.add(oargs); - } - - if(funcType.equals("GetInput")) { - return new GetInput(ttpl,context,funcName,funcArgs); - } - else if(funcType.equals("GetAttribute")) { - return new GetAttribute(ttpl,context,funcName,funcArgs); - } - else if(funcType.equals("GetProperty")) { - return new GetProperty(ttpl,context,funcName,funcArgs); - } - else if(funcType.equals("GetOperationOutput")) { - return new GetOperationOutput(ttpl,context,funcName,funcArgs); - } - else if(funcType.equals("Concat")) { - return new Concat(ttpl,context,funcName,funcArgs); - } - else if(funcType.equals("Token")) { - return new Token(ttpl,context,funcName,funcArgs); - } - } - } - } + if (rawFunctionObj instanceof LinkedHashMap) { + return getFunctionForObjectItem(ttpl, context, rawFunctionObj); + } else if (rawFunctionObj instanceof ArrayList) { + ArrayList rawFunctionObjList = new ArrayList<>(); + for (Object rawFunctionObjItem: (ArrayList) rawFunctionObj) { + rawFunctionObjList.add(getFunctionForObjectItem(ttpl, context, rawFunctionObjItem)); + } + return rawFunctionObjList; + } + return rawFunctionObj; } + private static Object getFunctionForObjectItem(TopologyTemplate ttpl, Object context, Object rawFunctionObjItem) { + if(isFunction(rawFunctionObjItem)) { + LinkedHashMap rawFunction = (LinkedHashMap) rawFunctionObjItem; + String funcName = (new ArrayList(rawFunction.keySet())).get(0); + if (functionMappings.keySet().contains(funcName)) { + String funcType = functionMappings.get(funcName); + Object oargs = (new ArrayList(rawFunction.values())).get(0); + ArrayList funcArgs; + if (oargs instanceof ArrayList) { + funcArgs = (ArrayList) oargs; + } else { + funcArgs = new ArrayList<>(); + funcArgs.add(oargs); + } + + if (funcType.equals("GetInput")) { + return new GetInput(ttpl, context, funcName, funcArgs); + } else if (funcType.equals("GetAttribute")) { + return new GetAttribute(ttpl, context, funcName, funcArgs); + } else if (funcType.equals("GetProperty")) { + return new GetProperty(ttpl, context, funcName, funcArgs); + } else if (funcType.equals("GetOperationOutput")) { + return new GetOperationOutput(ttpl, context, funcName, funcArgs); + } else if (funcType.equals("Concat")) { + return new Concat(ttpl, context, funcName, funcArgs); + } else if (funcType.equals("Token")) { + return new Token(ttpl, context, funcName, funcArgs); + } + } + } + + return rawFunctionObjItem; + } + @Override public String toString() { String argsStr = args.size() > 1 ? args.toString() : args.get(0).toString(); diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java index 62f2b39..dd6c05c 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java @@ -48,9 +48,15 @@ public class GetInput extends Function { LinkedHashMap ttinp = (LinkedHashMap)toscaTpl.getTpl().get("inputs"); LinkedHashMap ttinpinp = (LinkedHashMap)ttinp.get(getInputName()); String type = (String)ttinpinp.get("type"); - - return DataEntity.validateDatatype( + + Object value = DataEntity.validateDatatype( type, toscaTpl.getParsedParams().get(getInputName()),null,null,null); + + if (value instanceof ArrayList && args.size() == 2 && args.get(1) instanceof Integer) { + return ((ArrayList) value).get((Integer)args.get(1)); + } + + return value; } Input inputDef = null; -- cgit 1.2.3-korg From 9c34952c0941653fb370960df216ec47ee2c9724 Mon Sep 17 00:00:00 2001 From: Ester Rotstein Date: Thu, 3 Aug 2017 12:30:14 +0300 Subject: [SDC-182] support nested properties with function Change-Id: Ibeeb4afc5cb5540e1fa902ae27f5e8b6d277b2db Signed-off-by: Ester Rotstein --- .../sdc/toscaparser/api/functions/Function.java | 33 ++++++++++++++++------ 1 file changed, 25 insertions(+), 8 deletions(-) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java index 0d16092..85fa62e 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java @@ -1,9 +1,6 @@ package org.openecomp.sdc.toscaparser.api.functions; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.LinkedHashMap; +import java.util.*; import org.openecomp.sdc.toscaparser.api.TopologyTemplate; @@ -92,12 +89,32 @@ public abstract class Function { // :param raw_function: The raw function as dict. // :return: Template function as Function instance or the raw_function if // parsing was unsuccessful. - if (rawFunctionObj instanceof LinkedHashMap) { - return getFunctionForObjectItem(ttpl, context, rawFunctionObj); - } else if (rawFunctionObj instanceof ArrayList) { + + + // iterate over leaves of the properties's tree and convert function leaves to function object, + // support List and Map nested, + // assuming that leaf value of function is always map type contains 1 item (e.g. my_leaf: {get_input: xxx}). + + if (rawFunctionObj instanceof LinkedHashMap) { // In map type case + LinkedHashMap rawFunction = ((LinkedHashMap) rawFunctionObj); + if(rawFunction.size() == 1) { // End point + return getFunctionForObjectItem(ttpl, context, rawFunction); + } else { + // iterate over map nested properties in recursion, convert leaves to function, + // and collect them in the same hierarchy as the original map. + LinkedHashMap rawFunctionObjMap = new LinkedHashMap(); + for (Object rawFunctionObjItem: rawFunction.entrySet()) { + Object itemValue = getFunction(ttpl, context, ((Map.Entry)rawFunctionObjItem).getValue()); + rawFunctionObjMap.put(((Map.Entry)rawFunctionObjItem).getKey(), itemValue); + } + return rawFunctionObjMap; + } + } else if (rawFunctionObj instanceof ArrayList) { // In list type case + // iterate over list properties in recursion, convert leaves to function, + // and collect them in the same hierarchy as the original list. ArrayList rawFunctionObjList = new ArrayList<>(); for (Object rawFunctionObjItem: (ArrayList) rawFunctionObj) { - rawFunctionObjList.add(getFunctionForObjectItem(ttpl, context, rawFunctionObjItem)); + rawFunctionObjList.add(getFunction(ttpl, context, rawFunctionObjItem)); } return rawFunctionObjList; } -- cgit 1.2.3-korg From 4149de4df046df1f3ee334eff211a0f979cf4cc7 Mon Sep 17 00:00:00 2001 From: Pavel Aharoni Date: Mon, 21 Aug 2017 17:22:08 +0300 Subject: [SDC-242] jtosca resolve get_input Change-Id: Ia0624cb00df47770af0e4514e6c2a35038151667 Signed-off-by: Pavel Aharoni --- pom.xml | 68 +++++++++++----------- .../sdc/toscaparser/api/TopologyTemplate.java | 22 ++++--- .../sdc/toscaparser/api/ToscaTemplate.java | 28 +++++++-- .../sdc/toscaparser/api/extensions/ExtTools.java | 4 -- .../sdc/toscaparser/api/functions/Function.java | 40 +++++++------ .../sdc/toscaparser/api/functions/GetProperty.java | 4 +- 6 files changed, 97 insertions(+), 69 deletions(-) diff --git a/pom.xml b/pom.xml index d092e57..6485d2b 100644 --- a/pom.xml +++ b/pom.xml @@ -1,10 +1,10 @@ - - 4.0.0 - - org.openecomp.sdc.jtosca - jtosca - 1.1.1-SNAPSHOT + + 4.0.0 + + org.openecomp.sdc.jtosca + jtosca + 1.1.3-SNAPSHOT @@ -28,31 +28,31 @@ releases - - - - - - org.yaml - snakeyaml - 1.14 - compile - - - - org.slf4j - slf4j-api - 1.7.25 - - - - - - junit - junit - 4.12 - + + + + + + org.yaml + snakeyaml + 1.14 + compile + + + + org.slf4j + slf4j-api + 1.7.25 + + + + + + junit + junit + 4.12 + @@ -207,6 +207,6 @@ ecomp-site dav:${nexus.proxy}${sitePath} - - + + \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java index 25f118b..709dc81 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java @@ -48,13 +48,15 @@ public class TopologyTemplate { private String description; private ToscaGraph graph; private SubstitutionMappings substitutionMappings; + private boolean resolveGetInput; public TopologyTemplate( LinkedHashMap _template, LinkedHashMap _customDefs, LinkedHashMap _relTypes,//TYPE LinkedHashMap _parsedParams, - NodeTemplate _subMappedNodeTemplate) { + NodeTemplate _subMappedNodeTemplate, + boolean _resolveGetInput) { tpl = _template; if(tpl != null) { @@ -63,6 +65,7 @@ public class TopologyTemplate { customDefs = _customDefs; relTypes = _relTypes; parsedParams = _parsedParams; + resolveGetInput = _resolveGetInput; _validateField(); description = _tplDescription(); inputs = _inputs(); @@ -400,14 +403,14 @@ public class TopologyTemplate { if(nodeTemplates != null) { for(NodeTemplate nt: nodeTemplates) { for(Property prop: nt.getPropertiesObjects()) { - prop.setValue(Function.getFunction(this,nt,prop.getValue())); + prop.setValue(Function.getFunction(this,nt,prop.getValue(), resolveGetInput)); } for(InterfacesDef ifd: nt.getInterfaces()) { LinkedHashMap ifin = ifd.getInputs(); if(ifin != null) { for(Map.Entry me: ifin.entrySet()) { String name = me.getKey(); - Object value = Function.getFunction(this,nt,me.getValue()); + Object value = Function.getFunction(this,nt,me.getValue(), resolveGetInput); ifd.setInput(name,value); } } @@ -438,7 +441,7 @@ public class TopologyTemplate { (LinkedHashMap)rel.get("properties"); for(String key: relprops.keySet()) { Object value = relprops.get(key); - Object func = Function.getFunction(this,req,value); + Object func = Function.getFunction(this,req,value, resolveGetInput); relprops.put(key,func); } } @@ -448,7 +451,7 @@ public class TopologyTemplate { for(Capability cap: nt.getCapabilitiesObjects()) { if(cap.getPropertiesObjects() != null) { for(Property prop: cap.getPropertiesObjects()) { - Object propvalue = Function.getFunction(this,nt,prop.getValue()); + Object propvalue = Function.getFunction(this,nt,prop.getValue(), resolveGetInput); if(propvalue instanceof GetInput) { propvalue = ((GetInput)propvalue).result(); for(String p: cap.getProperties().keySet()) { @@ -475,7 +478,8 @@ public class TopologyTemplate { Object func = Function.getFunction( this, relTpl, - value); + value, + resolveGetInput); iface.setInput(name,func); } } @@ -486,7 +490,7 @@ public class TopologyTemplate { } } for(Output output: outputs) { - Object func = Function.getFunction(this,outputs,output.getValue()); + Object func = Function.getFunction(this,outputs,output.getValue(), resolveGetInput); if(func instanceof GetAttribute) { output.setAttr(Output.VALUE,func); } @@ -542,6 +546,10 @@ public class TopologyTemplate { public LinkedHashMap getParsedParams() { return parsedParams; } + + public boolean getResolveGetInput() { + return resolveGetInput; + } } /*python diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java index 76b86f5..5d5cb87 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java @@ -69,6 +69,7 @@ public class ToscaTemplate extends Object { private String path; private String inputPath; private LinkedHashMap parsedParams; + private boolean resolveGetInput; private LinkedHashMap tpl; private String version; private ArrayList imports; @@ -89,11 +90,25 @@ public class ToscaTemplate extends Object { private int nestingLoopCounter; private LinkedHashMap> metaProperties; - @SuppressWarnings("unchecked") public ToscaTemplate(String _path, - LinkedHashMap _parsedParams, + LinkedHashMap _parsedParams, + boolean aFile, + LinkedHashMap yamlDictTpl) throws JToscaException { + init(_path, _parsedParams, aFile, yamlDictTpl, true); + } + + public ToscaTemplate(String _path, + LinkedHashMap _parsedParams, boolean aFile, - LinkedHashMap yamlDictTpl) throws JToscaException { + LinkedHashMap yamlDictTpl, boolean resolveGetInput) throws JToscaException { + init(_path, _parsedParams, aFile, yamlDictTpl, resolveGetInput); + } + + @SuppressWarnings("unchecked") + private void init(String _path, + LinkedHashMap _parsedParams, + boolean aFile, + LinkedHashMap yamlDictTpl, boolean _resolveGetInput) throws JToscaException { ThreadLocalsHolder.setCollector(new ExceptionCollector(_path)); @@ -116,6 +131,7 @@ public class ToscaTemplate extends Object { csarTempDir = null; nestedToscaTplsWithTopology = new ConcurrentHashMap<>(); nestedToscaTemplatesWithTopology = new ArrayList(); + resolveGetInput = _resolveGetInput; if(_path != null && !_path.isEmpty()) { // save the original input path @@ -206,7 +222,8 @@ public class ToscaTemplate extends Object { _getAllCustomDefs(imports), relationshipTypes, parsedParams, - null); + null, + resolveGetInput); } private ArrayList _inputs() { @@ -384,7 +401,8 @@ public class ToscaTemplate extends Object { _getAllCustomDefs(alim), relationshipTypes, parsedParams, - nt); + nt, + resolveGetInput); if(topologyWithSubMapping.getSubstitutionMappings() != null) { // Record nested topology templates in top level template //nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java b/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java index 6403d6e..90aa35c 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java @@ -37,10 +37,6 @@ public class ExtTools { // for all folders in extdir File extDir = new File(extdir); File extDirList[] = extDir.listFiles(); - if (extDirList == null) { - String a = "aaaa"; - - } if (extDirList != null) { for(File f: extDirList) { if(f.isDirectory()) { diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java index 85fa62e..7615a00 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java @@ -3,6 +3,7 @@ package org.openecomp.sdc.toscaparser.api.functions; import java.util.*; import org.openecomp.sdc.toscaparser.api.TopologyTemplate; +import org.openecomp.sdc.toscaparser.api.ToscaTemplate; public abstract class Function { @@ -77,7 +78,7 @@ public abstract class Function { } @SuppressWarnings("unchecked") - public static Object getFunction(TopologyTemplate ttpl,Object context,Object rawFunctionObj) { + public static Object getFunction(TopologyTemplate ttpl,Object context,Object rawFunctionObj, boolean resolveGetInput) { // Gets a Function instance representing the provided template function. // If the format provided raw_function format is not relevant for template @@ -98,13 +99,13 @@ public abstract class Function { if (rawFunctionObj instanceof LinkedHashMap) { // In map type case LinkedHashMap rawFunction = ((LinkedHashMap) rawFunctionObj); if(rawFunction.size() == 1) { // End point - return getFunctionForObjectItem(ttpl, context, rawFunction); + return getFunctionForObjectItem(ttpl, context, rawFunction, resolveGetInput); } else { // iterate over map nested properties in recursion, convert leaves to function, // and collect them in the same hierarchy as the original map. LinkedHashMap rawFunctionObjMap = new LinkedHashMap(); for (Object rawFunctionObjItem: rawFunction.entrySet()) { - Object itemValue = getFunction(ttpl, context, ((Map.Entry)rawFunctionObjItem).getValue()); + Object itemValue = getFunction(ttpl, context, ((Map.Entry)rawFunctionObjItem).getValue(), resolveGetInput); rawFunctionObjMap.put(((Map.Entry)rawFunctionObjItem).getKey(), itemValue); } return rawFunctionObjMap; @@ -114,7 +115,7 @@ public abstract class Function { // and collect them in the same hierarchy as the original list. ArrayList rawFunctionObjList = new ArrayList<>(); for (Object rawFunctionObjItem: (ArrayList) rawFunctionObj) { - rawFunctionObjList.add(getFunction(ttpl, context, rawFunctionObjItem)); + rawFunctionObjList.add(getFunction(ttpl, context, rawFunctionObjItem, resolveGetInput)); } return rawFunctionObjList; } @@ -122,7 +123,7 @@ public abstract class Function { return rawFunctionObj; } - private static Object getFunctionForObjectItem(TopologyTemplate ttpl, Object context, Object rawFunctionObjItem) { + private static Object getFunctionForObjectItem(TopologyTemplate ttpl, Object context, Object rawFunctionObjItem, boolean resolveGetInput) { if(isFunction(rawFunctionObjItem)) { LinkedHashMap rawFunction = (LinkedHashMap) rawFunctionObjItem; String funcName = (new ArrayList(rawFunction.keySet())).get(0); @@ -137,18 +138,23 @@ public abstract class Function { funcArgs.add(oargs); } - if (funcType.equals("GetInput")) { - return new GetInput(ttpl, context, funcName, funcArgs); - } else if (funcType.equals("GetAttribute")) { - return new GetAttribute(ttpl, context, funcName, funcArgs); - } else if (funcType.equals("GetProperty")) { - return new GetProperty(ttpl, context, funcName, funcArgs); - } else if (funcType.equals("GetOperationOutput")) { - return new GetOperationOutput(ttpl, context, funcName, funcArgs); - } else if (funcType.equals("Concat")) { - return new Concat(ttpl, context, funcName, funcArgs); - } else if (funcType.equals("Token")) { - return new Token(ttpl, context, funcName, funcArgs); + switch (funcType) { + case "GetInput": + if (resolveGetInput) { + GetInput input = new GetInput(ttpl, context, funcName, funcArgs); + return input.result(); + } + return new GetInput(ttpl, context, funcName, funcArgs); + case "GetAttribute": + return new GetAttribute(ttpl, context, funcName, funcArgs); + case "GetProperty": + return new GetProperty(ttpl, context, funcName, funcArgs); + case "GetOperationOutput": + return new GetOperationOutput(ttpl, context, funcName, funcArgs); + case "Concat": + return new Concat(ttpl, context, funcName, funcArgs); + case "Token": + return new Token(ttpl, context, funcName, funcArgs); } } } diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java index 3550542..71420e8 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java @@ -58,7 +58,7 @@ public class GetProperty extends Function { } Object prop = foundProp.getValue(); if(prop instanceof Function) { - Function.getFunction(toscaTpl,context, prop); + Function.getFunction(toscaTpl,context, prop, toscaTpl.getResolveGetInput()); } } else if(args.size() >= 3) { @@ -336,7 +336,7 @@ public class GetProperty extends Function { if(propertyValue instanceof Function) { return ((Function)propertyValue).result(); } - return Function.getFunction(toscaTpl,context,propertyValue); + return Function.getFunction(toscaTpl,context,propertyValue, toscaTpl.getResolveGetInput()); } public String getNodeTemplateName() { -- cgit 1.2.3-korg From 47cd117386e199c6d4b021b7fcc9e2727ef9bf05 Mon Sep 17 00:00:00 2001 From: Pavel Aharoni Date: Mon, 21 Aug 2017 21:28:48 +0300 Subject: [SDC-243] jtosca for port mirroring Change-Id: I4218eb219a2c7d5061a52753f285474bdba0b6cf Signed-off-by: Pavel Aharoni --- pom.xml | 421 ++++++++++----------- .../openecomp/sdc/toscaparser/api/Capability.java | 121 ------ .../sdc/toscaparser/api/CapabilityAssignment.java | 146 +++++++ .../sdc/toscaparser/api/CapabilityAssignments.java | 51 +++ .../sdc/toscaparser/api/EntityTemplate.java | 72 ++-- .../sdc/toscaparser/api/NodeTemplate.java | 61 ++- .../sdc/toscaparser/api/RequirementAssignment.java | 85 +++++ .../toscaparser/api/RequirementAssignments.java | 39 ++ .../sdc/toscaparser/api/SubstitutionMappings.java | 23 +- .../sdc/toscaparser/api/TopologyTemplate.java | 43 +-- .../sdc/toscaparser/api/elements/Metadata.java | 32 +- .../toscaparser/api/functions/GetAttribute.java | 49 +-- .../sdc/toscaparser/api/functions/GetProperty.java | 71 ++-- src/main/resources/TOSCA_definition_1_0.yaml | 6 +- .../extensions/nfv/TOSCA_nfv_definition_1_0.yaml | 6 +- 15 files changed, 697 insertions(+), 529 deletions(-) delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/Capability.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignment.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignments.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignment.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignments.java diff --git a/pom.xml b/pom.xml index 6485d2b..31c6b64 100644 --- a/pom.xml +++ b/pom.xml @@ -1,212 +1,211 @@ - - 4.0.0 - - org.openecomp.sdc.jtosca - jtosca - 1.1.3-SNAPSHOT - - - - - - - UTF-8 - - - - - - - - true - ${project.basedir} - ${project.basedir}/target/jacoco.exec - https://nexus.onap.org - /content/sites/site/org/openecomp/sdc/jtosca/${project.version} - snapshots - releases - - - - - - - - org.yaml - snakeyaml - 1.14 - compile - - - - org.slf4j - slf4j-api - 1.7.25 - - - - - - junit - junit - 4.12 - - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - 2.10.4 - - false - org.umlgraph.doclet.UmlGraphDoc - - org.umlgraph - umlgraph - 5.6 - - -views - true - - - - - - - - - org.apache.maven.plugins - maven-site-plugin - 3.4 - - - org.apache.maven.wagon - wagon-webdav-jackrabbit - 2.10 - - - - - - org.jacoco - jacoco-maven-plugin - 0.7.8 - - - - prepare-agent - - prepare-agent - - - ${sonar.jacoco.reportPath} - - - - - - - - org.sonatype.plugins - nexus-staging-maven-plugin - 1.6.7 - true - - ${nexus.proxy} - ${staging.profile.id} - ecomp-staging - - - - - org.apache.maven.plugins - maven-compiler-plugin - 2.5.1 - true - - 1.8 - 1.8 - - - - org.apache.maven.plugins - maven-javadoc-plugin - 2.10.3 - - - - org.codehaus.mojo - license-maven-plugin - 1.10 - - false - ============LICENSE_START======================================================= - ============LICENSE_END========================================================= - ================================================================================ - apache_v2 - 2017 - AT&T Intellectual Property. All rights - reserved. - jtosca - true - true - true - true - false - - **/*.java - - - - - first - - update-file-header - - - - - - - - - - - central - Official Maven repository - http://repo2.maven.org/maven2/ - - - ecomp-releases - Release Repository - ${nexus.proxy}/content/repositories/releases/ - - - ecomp-staging - Staging Repository - ${nexus.proxy}/content/repositories/staging/ - - - - - - ecomp-releases - Release Repository - ${nexus.proxy}/content/repositories/${releases.path}/ - - - ecomp-snapshots - Snapshot Repository - ${nexus.proxy}/content/repositories/${snapshots.path}/ - - - ecomp-site - dav:${nexus.proxy}${sitePath} - - - + + 4.0.0 + + org.openecomp.sdc.jtosca + jtosca + 1.1.10-SNAPSHOT + + + + + + UTF-8 + + + + + + + + true + ${project.basedir} + ${project.basedir}/target/jacoco.exec + https://nexus.onap.org + /content/sites/site/org/openecomp/sdc/jtosca/${project.version} + snapshots + releases + + + + + + + + org.yaml + snakeyaml + 1.14 + compile + + + + org.slf4j + slf4j-api + 1.7.25 + + + + + + junit + junit + 4.12 + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 2.10.4 + + false + org.umlgraph.doclet.UmlGraphDoc + + org.umlgraph + umlgraph + 5.6 + + -views + true + + + + + + + + + org.apache.maven.plugins + maven-site-plugin + 3.4 + + + org.apache.maven.wagon + wagon-webdav-jackrabbit + 2.10 + + + + + + org.jacoco + jacoco-maven-plugin + 0.7.8 + + + + prepare-agent + + prepare-agent + + + ${sonar.jacoco.reportPath} + + + + + + + + org.sonatype.plugins + nexus-staging-maven-plugin + 1.6.7 + true + + ${nexus.proxy} + ${staging.profile.id} + ecomp-staging + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.5.1 + true + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-javadoc-plugin + 2.10.3 + + + + org.codehaus.mojo + license-maven-plugin + 1.10 + + false + ============LICENSE_START======================================================= + ============LICENSE_END========================================================= + ================================================================================ + apache_v2 + 2017 + AT&T Intellectual Property. All rights + reserved. + jtosca + true + true + true + true + false + + **/*.java + + + + + first + + update-file-header + + + + + + + + + + + central + Official Maven repository + http://repo2.maven.org/maven2/ + + + ecomp-releases + Release Repository + ${nexus.proxy}/content/repositories/releases/ + + + ecomp-staging + Staging Repository + ${nexus.proxy}/content/repositories/staging/ + + + + + + ecomp-releases + Release Repository + ${nexus.proxy}/content/repositories/${releases.path}/ + + + ecomp-snapshots + Snapshot Repository + ${nexus.proxy}/content/repositories/${snapshots.path}/ + + + ecomp-site + dav:${nexus.proxy}${sitePath} + + + \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Capability.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Capability.java deleted file mode 100644 index 09571db..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/Capability.java +++ /dev/null @@ -1,121 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.openecomp.sdc.toscaparser.api.elements.CapabilityTypeDef; -import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; - -public class Capability { - - private String name; - private LinkedHashMap _properties; - private CapabilityTypeDef _definition; - - public Capability(String cname, - LinkedHashMap cproperties, - CapabilityTypeDef cdefinition) { - name = cname; - _properties = cproperties; - _definition = cdefinition; - } - - public ArrayList getPropertiesObjects() { - // Return a list of property objects - ArrayList properties = new ArrayList(); - LinkedHashMap props = _properties; - if(props != null) { - for(Map.Entry me: props.entrySet()) { - String pname = me.getKey(); - Object pvalue = me.getValue(); - - LinkedHashMap propsDef = _definition.getPropertiesDef(); - if(propsDef != null) { - PropertyDef pd = (PropertyDef)propsDef.get(pname); - if(pd != null) { - properties.add(new Property(pname,pvalue,pd.getSchema(),null)); - } - } - } - } - return properties; - } - - public LinkedHashMap getProperties() { - // Return a dictionary of property name-object pairs - LinkedHashMap npps = new LinkedHashMap<>(); - for(Property p: getPropertiesObjects()) { - npps.put(p.getName(),p); - } - return npps; - } - - public Object getPropertyValue(String pname) { - // Return the value of a given property name - LinkedHashMap props = getProperties(); - if(props != null && props.get(pname) != null) { - return props.get(name).getValue(); - } - return null; - } - - public String getName() { - return name; - } - - public CapabilityTypeDef getDefinition() { - return _definition; - } - - // setter - public void setProperty(String pname,Object pvalue) { - _properties.put(pname,pvalue); - } - - @Override - public String toString() { - return "Capability{" + - "name='" + name + '\'' + - ", _properties=" + _properties + - ", _definition=" + _definition + - '}'; - } -} - -/*python - -from toscaparser.properties import Property - - -class Capability(object): - '''TOSCA built-in capabilities type.''' - - def __init__(self, name, properties, definition): - self.name = name - self._properties = properties - self.definition = definition - - def get_properties_objects(self): - '''Return a list of property objects.''' - properties = [] - props = self._properties - if props: - for name, value in props.items(): - props_def = self.definition.get_properties_def() - if props_def and name in props_def: - properties.append(Property(name, value, - props_def[name].schema)) - return properties - - def get_properties(self): - '''Return a dictionary of property name-object pairs.''' - return {prop.name: prop - for prop in self.get_properties_objects()} - - def get_property_value(self, name): - '''Return the value of a given property name.''' - props = self.get_properties() - if props and name in props: - return props[name].value -*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignment.java b/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignment.java new file mode 100644 index 0000000..0eaa099 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignment.java @@ -0,0 +1,146 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.openecomp.sdc.toscaparser.api.elements.CapabilityTypeDef; +import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; + +public class CapabilityAssignment { + + private String name; + private LinkedHashMap _properties; + private CapabilityTypeDef _definition; + + public CapabilityAssignment(String cname, + LinkedHashMap cproperties, + CapabilityTypeDef cdefinition) { + name = cname; + _properties = cproperties; + _definition = cdefinition; + } + + /** + * Get the properties list for capability + * @return list of property objects for capability + */ + public ArrayList getPropertiesObjects() { + // Return a list of property objects + ArrayList properties = new ArrayList(); + LinkedHashMap props = _properties; + if(props != null) { + for(Map.Entry me: props.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + + LinkedHashMap propsDef = _definition.getPropertiesDef(); + if(propsDef != null) { + PropertyDef pd = (PropertyDef)propsDef.get(pname); + if(pd != null) { + properties.add(new Property(pname,pvalue,pd.getSchema(),null)); + } + } + } + } + return properties; + } + + /** + * Get the map of properties + * @return map of all properties contains dictionary of property name and property object + */ + public LinkedHashMap getProperties() { + // Return a dictionary of property name-object pairs + LinkedHashMap npps = new LinkedHashMap<>(); + for(Property p: getPropertiesObjects()) { + npps.put(p.getName(),p); + } + return npps; + } + + /** + * Get the property value by name + * @param pname - the property name for capability + * @return the property value for this name + */ + public Object getPropertyValue(String pname) { + // Return the value of a given property name + LinkedHashMap props = getProperties(); + if(props != null && props.get(pname) != null) { + return props.get(name).getValue(); + } + return null; + } + + /** + * Get the name for capability + * @return the name for capability + */ + public String getName() { + return name; + } + + /** + * Get the definition for capability + * @return CapabilityTypeDef - contain definition for capability + */ + public CapabilityTypeDef getDefinition() { + return _definition; + } + + /** + * Set the property for capability + * @param pname - the property name for capability to set + * @param pvalue - the property valiue for capability to set + */ + public void setProperty(String pname,Object pvalue) { + _properties.put(pname,pvalue); + } + + @Override + public String toString() { + return "CapabilityAssignment{" + + "name='" + name + '\'' + + ", _properties=" + _properties + + ", _definition=" + _definition + + '}'; + } +} + +/*python + +from toscaparser.properties import Property + + +class CapabilityAssignment(object): + '''TOSCA built-in capabilities type.''' + + def __init__(self, name, properties, definition): + self.name = name + self._properties = properties + self.definition = definition + + def get_properties_objects(self): + '''Return a list of property objects.''' + properties = [] + props = self._properties + if props: + for name, value in props.items(): + props_def = self.definition.get_properties_def() + if props_def and name in props_def: + properties.append(Property(name, value, + props_def[name].schema)) + return properties + + def get_properties(self): + '''Return a dictionary of property name-object pairs.''' + return {prop.name: prop + for prop in self.get_properties_objects()} + + def get_property_value(self, name): + '''Return the value of a given property name.''' + props = self.get_properties() + if props and name in props: + return props[name].value +*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignments.java b/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignments.java new file mode 100644 index 0000000..3397960 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignments.java @@ -0,0 +1,51 @@ +package org.openecomp.sdc.toscaparser.api; + +import org.openecomp.sdc.toscaparser.api.CapabilityAssignment; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class CapabilityAssignments { + + private Map capabilityAssignments; + + public CapabilityAssignments(Map capabilityAssignments) { + this.capabilityAssignments = capabilityAssignments != null ? new HashMap<>(capabilityAssignments) : new HashMap<>(); + } + + /** + * Get all capability assignments for node template.
+ * This object can be either the original one, holding all capability assignments for this node template,or a filtered one, holding a filtered subset.
+ * @return list of capability assignments for the node template.
+ * If there are no capability assignments, empty list is returned. + */ + public List getAll() { + return new ArrayList<>(capabilityAssignments.values()); + } + + /** + * Filter capability assignments by capability tosca type. + * @param type - The tosca type of capability assignments. + * @return CapabilityAssignments object, containing capability assignments of this type.
+ * If no such found, filtering will result in an empty collection. + */ + public CapabilityAssignments getCapabilitiesByType(String type) { + Map capabilityAssignmentsMap = capabilityAssignments.entrySet().stream() + .filter(cap -> cap.getValue().getDefinition().getType().equals(type)).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + + return new CapabilityAssignments(capabilityAssignmentsMap); + } + + /** + * Get capability assignment by capability name. + * @param name - The name of capability assignment + * @return capability assignment with this name, or null if no such capability assignment was found. + */ + public CapabilityAssignment getCapabilityByName(String name) { + return capabilityAssignments.get(name); + } + +} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java index e896905..9220dac 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java @@ -2,9 +2,9 @@ package org.openecomp.sdc.toscaparser.api; import java.util.ArrayList; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.elements.*; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; @@ -47,8 +47,8 @@ public abstract class EntityTemplate { protected StatefulEntityType typeDefinition; private ArrayList _properties; private ArrayList _interfaces; - private ArrayList _requirements; - private ArrayList _capabilities; + private ArrayList _requirements; + private ArrayList _capabilities; // dummy constructor for subclasses that don't want super public EntityTemplate() { @@ -151,18 +151,40 @@ public abstract class EntityTemplate { } @SuppressWarnings("unchecked") - public ArrayList getRequirements() { + public RequirementAssignments getRequirements() { if(_requirements == null) { - _requirements = new ArrayList(); - Object ob = ((EntityType)typeDefinition).getValue(REQUIREMENTS,entityTpl,false); - if(ob != null) { - _requirements.addAll((ArrayList)ob); - } - + _requirements = _createRequirements(); } - return _requirements; + return new RequirementAssignments(_requirements); } + private ArrayList _createRequirements() { + ArrayList reqs = new ArrayList<>(); + ArrayList> requirements = (ArrayList>) + typeDefinition.getValue(REQUIREMENTS,entityTpl,false); + if(requirements == null) { + requirements = new ArrayList<>(); + } + for (Map req: requirements) { + for(String reqName: req.keySet()) { + Object reqItem = req.get(reqName); + if(reqItem instanceof LinkedHashMap) { + Object rel = ((LinkedHashMap)reqItem).get("relationship"); +// LinkedHashMap relationship = rel instanceof LinkedHashMap ? (LinkedHashMap) rel : null; + String nodeName = ((LinkedHashMap)reqItem).get("node").toString(); + Object capability = ((LinkedHashMap)reqItem).get("capability"); + String capabilityString = capability != null ? capability.toString() : null; + + reqs.add(new RequirementAssignment(reqName, nodeName, capabilityString, rel)); + } else if (reqItem instanceof String) { //short notation + String nodeName = String.valueOf(reqItem); + reqs.add(new RequirementAssignment(reqName, nodeName)); + } + } + } + return reqs; + } + public ArrayList getPropertiesObjects() { // Return properties objects for this template if(_properties ==null) { @@ -192,7 +214,7 @@ public abstract class EntityTemplate { return _interfaces; } - public ArrayList getCapabilitiesObjects() { + public ArrayList getCapabilitiesObjects() { // Return capabilities objects for this template if(_capabilities == null) { _capabilities = _createCapabilities(); @@ -201,12 +223,12 @@ public abstract class EntityTemplate { } - public LinkedHashMap getCapabilities() { - LinkedHashMap caps = new LinkedHashMap(); - for(Capability cap: getCapabilitiesObjects()) { + public CapabilityAssignments getCapabilities() { + LinkedHashMap caps = new LinkedHashMap(); + for(CapabilityAssignment cap: getCapabilitiesObjects()) { caps.put(cap.getName(),cap); } - return caps; + return new CapabilityAssignments(caps); } public boolean isDerivedFrom(String typeStr) { @@ -226,8 +248,8 @@ public abstract class EntityTemplate { } @SuppressWarnings("unchecked") - private ArrayList _createCapabilities() { - ArrayList capability = new ArrayList(); + private ArrayList _createCapabilities() { + ArrayList capability = new ArrayList(); LinkedHashMap caps = (LinkedHashMap) ((EntityType)typeDefinition).getValue(CAPABILITIES,entityTpl,true); if(caps != null) { @@ -257,7 +279,7 @@ public abstract class EntityTemplate { if(pp != null) { properties.putAll(pp); } - Capability cap = new Capability(name, properties, c); + CapabilityAssignment cap = new CapabilityAssignment(name, properties, c); capability.add(cap); } } @@ -292,7 +314,7 @@ public abstract class EntityTemplate { for(Map.Entry me: capabilities.entrySet()) { String cap = me.getKey(); LinkedHashMap props = (LinkedHashMap)me.getValue(); - Capability capability = getCapability(cap); + CapabilityAssignment capability = getCapability(cap); if(capability == null) { continue; } @@ -485,15 +507,11 @@ public abstract class EntityTemplate { return interfaces; } - public Capability getCapability(String name) { + public CapabilityAssignment getCapability(String name) { // Provide named capability // :param name: name of capability // :return: capability object if found, None otherwise - LinkedHashMap caps = getCapabilities(); - if(caps != null) { - return caps.get(name); - } - return null; + return getCapabilities().getCapabilityByName(name); } // getter @@ -666,7 +684,7 @@ class EntityTemplate(object): if 'properties' in props and props['properties']: properties.update(props['properties']) - cap = Capability(name, properties, c) + cap = CapabilityAssignment(name, properties, c) capability.append(cap) return capability diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java index 11db32b..6606068 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java @@ -2,6 +2,7 @@ package org.openecomp.sdc.toscaparser.api; import java.util.ArrayList; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; import org.openecomp.sdc.toscaparser.api.elements.*; @@ -46,17 +47,14 @@ public class NodeTemplate extends EntityTemplate { @SuppressWarnings("unchecked") public LinkedHashMap getRelationships() { if(_relationships.isEmpty()) { - ArrayList requires = getRequirements(); - if(requires != null && requires instanceof ArrayList) { - for(Object ro: requires) { - LinkedHashMap r = (LinkedHashMap)ro; - for(Map.Entry me: r.entrySet()) { - LinkedHashMap explicit = _getExplicitRelationship(r,me.getValue()); - if(explicit != null) { - // _relationships.putAll(explicit)... - for(Map.Entry ee: explicit.entrySet()) { - _relationships.put(ee.getKey(), ee.getValue()); - } + List requires = getRequirements().getAll(); + if(requires != null && requires instanceof List) { + for(RequirementAssignment r: requires) { + LinkedHashMap explicit = _getExplicitRelationship(r); + if(explicit != null) { + // _relationships.putAll(explicit)... + for(Map.Entry ee: explicit.entrySet()) { + _relationships.put(ee.getKey(), ee.getValue()); } } } @@ -66,7 +64,7 @@ public class NodeTemplate extends EntityTemplate { } @SuppressWarnings("unchecked") - private LinkedHashMap _getExplicitRelationship(LinkedHashMap req,Object value) { + private LinkedHashMap _getExplicitRelationship(RequirementAssignment req) { // Handle explicit relationship // For example, @@ -75,13 +73,7 @@ public class NodeTemplate extends EntityTemplate { // relationship: tosca.relationships.HostedOn LinkedHashMap explicitRelation = new LinkedHashMap(); - String node; - if(value instanceof LinkedHashMap) { - node = (String)((LinkedHashMap)value).get("node"); - } - else { - node = (String)value; - } + String node = req.getNodeTemplateName(); if(node != null && !node.isEmpty()) { //msg = _('Lookup by TOSCA types is not supported. ' @@ -105,35 +97,33 @@ public class NodeTemplate extends EntityTemplate { return null; } NodeTemplate relatedTpl = new NodeTemplate(node,templates,customDef,null,null); - Object relationship = null; + Object relationship = req.getRelationship(); String relationshipString = null; - if(value instanceof LinkedHashMap) { - relationship = ((LinkedHashMap)value).get("relationship"); - // here relationship can be a string or a LHM with 'type': - } - // check if its type has relationship defined +// // here relationship can be a string or a LHM with 'type': + + // check if its type has relationship defined if(relationship == null) { ArrayList parentReqs = ((NodeType)typeDefinition).getAllRequirements(); if(parentReqs == null) { ThreadLocalsHolder.getCollector().appendException("ValidationError: parent_req is null"); } else { - for(String key: req.keySet()) { - boolean bFoundRel = false; +// for(String key: req.keySet()) { +// boolean bFoundRel = false; for(Object rdo: parentReqs) { LinkedHashMap reqDict = (LinkedHashMap)rdo; - LinkedHashMap relDict = (LinkedHashMap)reqDict.get(key); + LinkedHashMap relDict = (LinkedHashMap)reqDict.get(req.getName()); if(relDict != null) { relationship = relDict.get("relationship"); //BUG-python??? need to break twice? - bFoundRel = true; +// bFoundRel = true; break; } } - if(bFoundRel) { - break; - } - } +// if(bFoundRel) { +// break; +// } +// } } } @@ -208,8 +198,9 @@ public class NodeTemplate extends EntityTemplate { } @SuppressWarnings("unchecked") - private void _addRelationshipTemplate(LinkedHashMap requirement, String rtype, NodeTemplate source) { - LinkedHashMap req = (LinkedHashMap)CopyUtils.copyLhmOrAl(requirement); + private void _addRelationshipTemplate(RequirementAssignment requirement, String rtype, NodeTemplate source) { + LinkedHashMap req = new LinkedHashMap<>(); + req.put("relationship", CopyUtils.copyLhmOrAl(requirement.getRelationship())); req.put("type",rtype); RelationshipTemplate tpl = new RelationshipTemplate(req, rtype, customDef, this, source); relationshipTpl.add(tpl); diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignment.java b/src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignment.java new file mode 100644 index 0000000..799a8ee --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignment.java @@ -0,0 +1,85 @@ +package org.openecomp.sdc.toscaparser.api; + +import java.util.Map; + +public class RequirementAssignment { + + private String name; + private String nodeName; + private String capabilityName; + private Object relationship; + + public RequirementAssignment(String reqName, String nodeName) { + this.name = reqName; + this.nodeName = nodeName; + } + + public RequirementAssignment(String reqName, String nodeName, String capabilityName) { + this.name = reqName; + this.nodeName = nodeName; + this.capabilityName = capabilityName; + } + + public RequirementAssignment(String reqName, String nodeName, String capabilityName, Object relationship) { + this.name = reqName; + this.nodeName = nodeName; + this.capabilityName = capabilityName; + this.relationship = relationship; + } + + /** + * Get the name for requirement assignment. + * @return the name for requirement assignment. + */ + public String getName() { + return name; + } + + /** + * Set the name for requirement + * @param name - the name for requirement to set + */ + public void setName(String name) { + this.name = name; + } + + /** + * Get the node name for requirement assignment. + * @return the node name for requirement + */ + public String getNodeTemplateName() { + return nodeName; + } + + /** + * Set the node name for requirement + * @param nodeName - the node name for requirement to set + */ + public void setNodeTemplateName(String nodeName) { + this.nodeName = nodeName; + } + + /** + * Get the capability name for requirement assignment. + * @return the capability name for requirement + */ + public String getCapabilityName() { + return capabilityName; + } + + /** + * Set the capability name for requirement assignment. + * @param capabilityName - the capability name for requirement to set + */ + public void setCapabilityName(String capabilityName) { + this.capabilityName = capabilityName; + } + + /** + * Get the relationship object for requirement + * @return the relationship object for requirement + */ + public Object getRelationship() { + return relationship; + } +} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignments.java b/src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignments.java new file mode 100644 index 0000000..7991f3c --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignments.java @@ -0,0 +1,39 @@ +package org.openecomp.sdc.toscaparser.api; + +import org.openecomp.sdc.toscaparser.api.RequirementAssignment; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +public class RequirementAssignments { + + private List requirementAssignmentList; + + public RequirementAssignments(List requirementAssignments) { + this.requirementAssignmentList = requirementAssignments != null ? new ArrayList<>(requirementAssignments) : new ArrayList<>(); + } + + /** + * Get all requirement assignments for Node Template.
+ * This object can be either the original one, holding all requirement assignments for this node template,or a filtered one, holding a filtered subset.
+ * @return list of requirement assignments for the node template.
+ * If there are no requirement assignments, empty list is returned. + */ + public List getAll() { + return new ArrayList<>(requirementAssignmentList); + } + + /** + * Filter requirement assignments by requirement name. + * @param reqName - The name of requirement + * @return RequirementAssignments object, containing requirement assignments of this type.
+ * If no such found, filtering will result in an empty collection. + */ + public RequirementAssignments getRequirementsByName(String reqName) { + List requirementAssignments = requirementAssignmentList.stream() + .filter(req -> req.getName().equals(reqName)).collect(Collectors.toList()); + + return new RequirementAssignments(requirementAssignments); + } +} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java b/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java index b9c2238..a68f9fb 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java @@ -1,10 +1,7 @@ package org.openecomp.sdc.toscaparser.api; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedHashMap; +import java.util.*; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.elements.NodeType; import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; import org.openecomp.sdc.toscaparser.api.parameters.Input; @@ -217,13 +214,13 @@ public class SubstitutionMappings { // The capabilities must be in node template which be mapped. LinkedHashMap tplsCapabilities = (LinkedHashMap)subMappingDef.get(CAPABILITIES); - LinkedHashMap nodeCapabilities = null; + List nodeCapabilities = null; if(subMappedNodeTemplate != null) { - nodeCapabilities = subMappedNodeTemplate.getCapabilities(); + nodeCapabilities = subMappedNodeTemplate.getCapabilities().getAll(); } if(nodeCapabilities != null) { - for(String cap: nodeCapabilities.keySet()) { - if(tplsCapabilities != null && tplsCapabilities.get(cap) == null) { + for(CapabilityAssignment cap: nodeCapabilities) { + if(tplsCapabilities != null && tplsCapabilities.get(cap.getName()) == null) { ; //pass // ExceptionCollector.appendException( // UnknownFieldError(what='SubstitutionMappings', @@ -241,15 +238,13 @@ public class SubstitutionMappings { // The requirements must be in node template which be mapped. LinkedHashMap tplsRequirements = (LinkedHashMap)subMappingDef.get(REQUIREMENTS); - ArrayList nodeRequirements = null; + List nodeRequirements = null; if(subMappedNodeTemplate != null) { - nodeRequirements = subMappedNodeTemplate.getRequirements(); + nodeRequirements = subMappedNodeTemplate.getRequirements().getAll(); } if(nodeRequirements != null) { - for(Object ro: nodeRequirements) { - ArrayList al = new ArrayList( - ((LinkedHashMap)ro).keySet()); - String cap = al.get(0); + for(RequirementAssignment ro: nodeRequirements) { + String cap = ro.getName(); if(tplsRequirements != null && tplsRequirements.get(cap) == null) { ; //pass // ExceptionCollector.appendException( diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java index 709dc81..afedfdb 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java @@ -1,11 +1,7 @@ package org.openecomp.sdc.toscaparser.api; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.Map; +import java.util.*; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.elements.InterfacesDef; import org.openecomp.sdc.toscaparser.api.elements.NodeType; import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; @@ -415,27 +411,20 @@ public class TopologyTemplate { } } } - if(nt.getRequirements() != null && - nt.getRequirements() instanceof ArrayList) { - for(Object oreq: nt.getRequirements()) { - LinkedHashMap req = (LinkedHashMap)oreq; - LinkedHashMap rel = req; - for(String reqName: req.keySet()) { - Object reqItem = req.get(reqName); - if(reqItem instanceof LinkedHashMap) { - Object t = ((LinkedHashMap)reqItem).get("relationship"); - // it can be a string or a LHM... - if(t instanceof LinkedHashMap) { - rel = (LinkedHashMap)t; - } - else { - // we set it to null to fail the next test - // and avoid the get("proprties") - rel = null; - } - break; - } - } + if(nt.getRequirements() != null) { + for(RequirementAssignment req: nt.getRequirements().getAll()) { + LinkedHashMap rel; + Object t = req.getRelationship(); + // it can be a string or a LHM... + if(t instanceof LinkedHashMap) { + rel = (LinkedHashMap)t; + } + else { + // we set it to null to fail the next test + // and avoid the get("proprties") + rel = null; + } + if(rel != null && rel.get("properties") != null) { LinkedHashMap relprops = (LinkedHashMap)rel.get("properties"); @@ -448,7 +437,7 @@ public class TopologyTemplate { } } if(nt.getCapabilitiesObjects() != null) { - for(Capability cap: nt.getCapabilitiesObjects()) { + for(CapabilityAssignment cap: nt.getCapabilitiesObjects()) { if(cap.getPropertiesObjects() != null) { for(Property prop: cap.getPropertiesObjects()) { Object propvalue = Function.getFunction(this,nt,prop.getValue(), resolveGetInput); diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java index 6cf84a5..b153876 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java @@ -1,36 +1,34 @@ package org.openecomp.sdc.toscaparser.api.elements; +import java.util.AbstractMap; import java.util.HashMap; import java.util.Map; +import java.util.stream.Collectors; public class Metadata { private final Map metadataMap; public Metadata(Map metadataMap) { - this.metadataMap = metadataMap; + this.metadataMap = metadataMap != null ? metadataMap : new HashMap<>(); } public String getValue(String key) { - return !isEmpty() ? String.valueOf(this.metadataMap.get(key)) : null; - } - - public Map getPropertyMap() { - if(metadataMap == null){ - return null; - } - return new HashMap<>(metadataMap); - } - - public void setValue(String key, Object value) { - if (!isEmpty()) { - this.metadataMap.put(key, value); + + Object obj = this.metadataMap.get(key); + if (obj != null){ + return String.valueOf(obj); } + return null; } - - private boolean isEmpty() { - return this.metadataMap == null || this.metadataMap.size() == 0; + /** + * Get all properties of a Metadata object.
+ * This object represents the "metadata" section of some entity. + * @return all properties of this Metadata, as a key-value. + */ + public Map getAllProperties() { + return metadataMap.entrySet().stream().map(e-> new AbstractMap.SimpleEntry(e.getKey(), String.valueOf(e.getValue()))).collect(Collectors.toMap(Map.Entry::getKey,Map.Entry::getValue)); } @Override diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java index 549073b..8a3d0b6 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java @@ -4,7 +4,6 @@ import java.util.ArrayList; import java.util.LinkedHashMap; import org.openecomp.sdc.toscaparser.api.*; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.elements.AttributeDef; import org.openecomp.sdc.toscaparser.api.elements.CapabilityTypeDef; import org.openecomp.sdc.toscaparser.api.elements.DataType; @@ -164,22 +163,17 @@ public class GetAttribute extends Function { if(nodeTemplate != null) { LinkedHashMap hostedOnRel = (LinkedHashMap)EntityType.TOSCA_DEF.get(HOSTED_ON); - for(Object ro: nodeTemplate.getRequirements()) { - if(ro != null && ro instanceof LinkedHashMap) { - LinkedHashMap r = (LinkedHashMap)ro; - for(String requirement: r.keySet()) { - String targetName = (String)r.get(requirement); - NodeTemplate targetNode = _findNodeTemplate(targetName); - NodeType targetType = (NodeType)targetNode.getTypeDefinition(); - for(CapabilityTypeDef capability: targetType.getCapabilitiesObjects()) { + for(RequirementAssignment r: nodeTemplate.getRequirements().getAll()) { + String targetName = r.getNodeTemplateName(); + NodeTemplate targetNode = _findNodeTemplate(targetName); + NodeType targetType = (NodeType)targetNode.getTypeDefinition(); + for(CapabilityTypeDef capability: targetType.getCapabilitiesObjects()) { // if(((ArrayList)hostedOnRel.get("valid_target_types")).contains(capability.getType())) { - if(capability.inheritsFrom((ArrayList)hostedOnRel.get("valid_target_types"))) { - if(_attributeExistsInType(targetType)) { - return targetNode; - } - return _findHostContainingAttribute(targetName); - } + if(capability.inheritsFrom((ArrayList)hostedOnRel.get("valid_target_types"))) { + if(_attributeExistsInType(targetType)) { + return targetNode; } + return _findHostContainingAttribute(targetName); } } } @@ -246,16 +240,11 @@ public class GetAttribute extends Function { NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); // Find attribute in node template's requirements - for(Object ro: nodeTpl.getRequirements()) { - if(ro != null && ro instanceof LinkedHashMap) { - LinkedHashMap r = (LinkedHashMap)ro; - for(String req: r.keySet()) { - String nodeName = (String)r.get(req); - if(req.equals(reqOrCap)) { - NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); - return _getCapabilityAttribute(nodeTemplate,req,attrName); - } - } + for(RequirementAssignment r: nodeTpl.getRequirements().getAll()) { + String nodeName = r.getNodeTemplateName(); + if(r.getName().equals(reqOrCap)) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); + return _getCapabilityAttribute(nodeTemplate,r.getName(),attrName); } } // If requirement was not found, look in node template's capabilities @@ -266,9 +255,9 @@ public class GetAttribute extends Function { String capabilityName, String attrName) { // Gets a node template capability attribute - LinkedHashMap caps = nodeTemplate.getCapabilities(); - if(caps != null && caps.keySet().contains(capabilityName)) { - Capability cap = caps.get(capabilityName); + CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); + + if(cap != null) { AttributeDef attribute = null; LinkedHashMap attrs = cap.getDefinition().getAttributesDef(); @@ -283,7 +272,7 @@ public class GetAttribute extends Function { return attribute; } String msg = String.format( - "Requirement/Capability \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", + "Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", capabilityName,((NodeTemplate)context).getName(),nodeTemplate.getName()); ThreadLocalsHolder.getCollector().appendException("KeyError: " + msg); return null; @@ -518,7 +507,7 @@ def _get_capability_attribute(self, 'ntpl1': node_template.name, 'ntpl2': self.context.name})) return attribute - msg = _('Requirement/Capability "{0}" referenced from node template ' + msg = _('Requirement/CapabilityAssignment "{0}" referenced from node template ' '"{1}" was not found in node template "{2}".').format( capability_name, self.context.name, diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java index 71420e8..41495bc 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java @@ -4,7 +4,6 @@ import java.util.ArrayList; import java.util.LinkedHashMap; import org.openecomp.sdc.toscaparser.api.*; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.elements.CapabilityTypeDef; import org.openecomp.sdc.toscaparser.api.elements.EntityType; import org.openecomp.sdc.toscaparser.api.elements.NodeType; @@ -105,17 +104,12 @@ public class GetProperty extends Function { return null; } // look for property in node template's requirements - for(Object r: nodeTpl.getRequirements()) { - if(r instanceof LinkedHashMap) { - LinkedHashMap rlist = (LinkedHashMap)r; - for(String req: rlist.keySet()) { - String nodeName = (String)rlist.get(req); - if(req.equals(reqOrCap)) { - NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); - return _getCapabilityProperty(nodeTemplate,req,propertyName,true); - } - } - } + for(RequirementAssignment req: nodeTpl.getRequirements().getAll()) { + String nodeName = req.getNodeTemplateName(); + if(req.getName().equals(reqOrCap)) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); + return _getCapabilityProperty(nodeTemplate,req.getName(),propertyName,true); + } } // If requirement was not found, look in node template's capabilities return _getCapabilityProperty(nodeTpl,reqOrCap,propertyName,true); @@ -128,9 +122,8 @@ public class GetProperty extends Function { // Gets a node template capability property Object property = null; - LinkedHashMap caps = nodeTemplate.getCapabilities(); - if(caps != null && caps.get(capabilityName) != null) { - Capability cap = caps.get(capabilityName); + CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); + if(cap != null) { LinkedHashMap props = cap.getProperties(); if(props != null && props.get(propertyName) != null) { property = ((Property)props.get(propertyName)).getValue(); @@ -144,7 +137,7 @@ public class GetProperty extends Function { } if(throwErrors) { ThreadLocalsHolder.getCollector().appendException(String.format( - "KeyError: Requirement/Capability \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", + "KeyError: Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", capabilityName,((NodeTemplate)context).getName(),nodeTemplate.getName())); } @@ -262,30 +255,26 @@ public class GetProperty extends Function { NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); LinkedHashMap hostedOnRel = (LinkedHashMap) EntityType.TOSCA_DEF.get(HOSTED_ON); - for(Object r: nodeTemplate.getRequirements()) { - if(r instanceof LinkedHashMap) { - LinkedHashMap rlist = (LinkedHashMap)r; - for(String requirement: rlist.keySet()) { - String targetName = (String)rlist.get(requirement); - NodeTemplate targetNode = _findNodeTemplate(targetName); - NodeType targetType = (NodeType)targetNode.getTypeDefinition(); - for(CapabilityTypeDef capDef: targetType.getCapabilitiesObjects()) { - if(capDef.inheritsFrom((ArrayList)hostedOnRel.get("valid_target_types"))) { - if(_propertyExistsInType(targetType)) { - return targetNode; - } - // If requirement was not found, look in node - // template's capabilities - if(args.size() > 2 && - _getCapabilityProperty(targetNode,(String)args.get(1),(String)args.get(2),false) != null) { - return targetNode; - } - - return _findHostContainingProperty(targetName); - } - } - } - } + for(RequirementAssignment requirement: nodeTemplate.getRequirements().getAll()) { + String targetName = requirement.getNodeTemplateName(); + NodeTemplate targetNode = _findNodeTemplate(targetName); + NodeType targetType = (NodeType)targetNode.getTypeDefinition(); + for(CapabilityTypeDef capDef: targetType.getCapabilitiesObjects()) { + if(capDef.inheritsFrom((ArrayList)hostedOnRel.get("valid_target_types"))) { + if(_propertyExistsInType(targetType)) { + return targetNode; + } + // If requirement was not found, look in node + // template's capabilities + if(args.size() > 2 && + _getCapabilityProperty(targetNode,(String)args.get(1),(String)args.get(2),false) != null) { + return targetNode; + } + + return _findHostContainingProperty(targetName); + } + } + } return null; } @@ -466,7 +455,7 @@ def _get_capability_property(self, 'ntpl1': node_template.name, 'ntpl2': self.context.name})) return property - msg = _('Requirement/Capability "{0}" referenced from node template ' + msg = _('Requirement/CapabilityAssignment "{0}" referenced from node template ' '"{1}" was not found in node template "{2}".').format( capability_name, self.context.name, diff --git a/src/main/resources/TOSCA_definition_1_0.yaml b/src/main/resources/TOSCA_definition_1_0.yaml index 554b7b6..c5a4d0f 100644 --- a/src/main/resources/TOSCA_definition_1_0.yaml +++ b/src/main/resources/TOSCA_definition_1_0.yaml @@ -13,7 +13,7 @@ ########################################################################## # The content of this file reflects TOSCA Simple Profile in YAML version # 1.0.0. It describes the definition for TOSCA types including Node Type, -# Relationship Type, Capability Type and Interfaces. +# Relationship Type, CapabilityAssignment Type and Interfaces. ########################################################################## tosca_definitions_version: tosca_simple_yaml_1_0 @@ -489,8 +489,8 @@ relationship_types: valid_target_types: [ tosca.capabilities.network.Bindable ] ########################################################################## -# Capability Type. -# A Capability Type is a reusable entity that describes a kind of +# CapabilityAssignment Type. +# A CapabilityAssignment Type is a reusable entity that describes a kind of # capability that a Node Type can declare to expose. ########################################################################## capability_types: diff --git a/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml b/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml index 365d70e..8b08837 100644 --- a/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml +++ b/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml @@ -13,7 +13,7 @@ ########################################################################## # The content of this file reflects TOSCA NFV Profile in YAML version # 1.0.0. It describes the definition for TOSCA NFV types including Node Type, -# Relationship Type, Capability Type and Interfaces. +# Relationship Type, CapabilityAssignment Type and Interfaces. ########################################################################## tosca_definitions_version: tosca_simple_profile_for_nfv_1_0_0 @@ -139,8 +139,8 @@ relationship_types: valid_target_types: [ tosca.capabilities.nfv.Forwarder] ########################################################################## -# Capability Type. -# A Capability Type is a reusable entity that describes a kind of +# CapabilityAssignment Type. +# A CapabilityAssignment Type is a reusable entity that describes a kind of # capability that a Node Type can declare to expose. ########################################################################## -- cgit 1.2.3-korg From a58e476f038345be859276db45a50a3f3bdcb57e Mon Sep 17 00:00:00 2001 From: maopengzhang Date: Wed, 30 Aug 2017 18:29:10 +0800 Subject: Resolve the Nullpoint issue add protect for null point issue Change-Id: Ic27231ca7d82158cd103af91f229c85e5edfcdc2 Issue-ID: SDC-261 Signed-off-by: maopengzhang --- .../sdc/toscaparser/api/utils/ValidateUtils.java | 86 +++++++++++++--------- 1 file changed, 51 insertions(+), 35 deletions(-) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java index 291316f..9909685 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java @@ -33,45 +33,55 @@ public class ValidateUtils { } public static Object validateNumeric(Object value) { - if(!(value instanceof Number)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not a numeric",value.toString())); + if(value != null) { + if (!(value instanceof Number)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not a numeric", value.toString())); + } } return value; } public static Object validateInteger(Object value) { - if(!(value instanceof Integer)) { - // allow "true" and "false" - if(value instanceof Boolean) { - return (Boolean)value ? 1 : 0; + if(value != null) { + if (!(value instanceof Integer)) { + // allow "true" and "false" + if (value instanceof Boolean) { + return (Boolean) value ? 1 : 0; + } + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not an integer", value.toString())); } - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not an integer",value.toString())); } return value; } public static Object validateFloat(Object value) { - if(!(value instanceof Float || value instanceof Double)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not a float",value.toString())); + if(value != null) { + if (!(value instanceof Float || value instanceof Double)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not a float", value.toString())); + } } return value; } public static Object validateString(Object value) { - if(!(value instanceof String)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \'%s\' is not a string",value.toString())); + if(value != null) { + if (!(value instanceof String)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \'%s\' is not a string", value.toString())); + } } return value; } public static Object validateList(Object value) { - if(!(value instanceof ArrayList)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not a list",value.toString())); + if(value != null) { + if (!(value instanceof ArrayList)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not a list", value.toString())); + } } return value; } @@ -209,25 +219,29 @@ public class ValidateUtils { } public static Object validateMap(Object ob) { - if(!(ob instanceof LinkedHashMap)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError\"%s\" is not a map.",ob.toString())); + if(ob != null) { + if (!(ob instanceof LinkedHashMap)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError\"%s\" is not a map.", ob.toString())); + } } return ob; } public static Object validateBoolean(Object value) { - if(value instanceof Boolean) { - return value; - } - if(value instanceof String) { - String normalized = ((String)value).toLowerCase(); - if(normalized.equals("true") || normalized.equals("false")) { - return normalized.equals("true"); + if(value != null) { + if (value instanceof Boolean) { + return value; + } + if (value instanceof String) { + String normalized = ((String) value).toLowerCase(); + if (normalized.equals("true") || normalized.equals("false")) { + return normalized.equals("true"); + } } + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not a boolean", value.toString())); } - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not a boolean",value.toString())); return value; } @@ -248,11 +262,13 @@ public class ValidateUtils { */ // timestamps are loaded as Date objects by the YAML parser - if(!(value instanceof Date)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not a valid timestamp", - value.toString())); - + if(value != null) { + if (!(value instanceof Date)) { + ThreadLocalsHolder.getCollector().appendException(String.format( + "ValueError: \"%s\" is not a valid timestamp", + value.toString())); + + } } return value; } -- cgit 1.2.3-korg From 3d50c4939ae90c6b99fbe66695c4d4f2c1c24132 Mon Sep 17 00:00:00 2001 From: maopengzhang Date: Thu, 7 Sep 2017 23:29:07 +0800 Subject: JTOSCA Build failed add profileid in the POM.xml Change-Id: Ibfed4f2a912f9392dcfcc3927d1fd93601245a44 Issue-ID: SDC-292 Signed-off-by: maopengzhang --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 31c6b64..cb7777a 100644 --- a/pom.xml +++ b/pom.xml @@ -25,7 +25,7 @@ /content/sites/site/org/openecomp/sdc/jtosca/${project.version} snapshots releases - + 176c31dfe190a -- cgit 1.2.3-korg From bb51c301ac6902ee7d3b78bebc6ab82399a9a6a9 Mon Sep 17 00:00:00 2001 From: Jessica Wagantall Date: Fri, 8 Sep 2017 13:05:56 -0700 Subject: Cleanup project's name in Sonar The name parameter in the root pom.xml should match the project name in gerrit to reflect consistency in Sonar. Change-Id: Iead6a2840047a3af5018d91a229b388cf538ac4b Issue-id: CIMAN-65 Signed-off-by: Jessica Wagantall --- pom.xml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index cb7777a..69fb8b1 100644 --- a/pom.xml +++ b/pom.xml @@ -5,6 +5,7 @@ org.openecomp.sdc.jtosca jtosca 1.1.10-SNAPSHOT + sdc-jtosca @@ -208,4 +209,4 @@ - \ No newline at end of file + -- cgit 1.2.3-korg From b0686a9ab5db28b0ee1fc3fea270667023626e5f Mon Sep 17 00:00:00 2001 From: Pavel Aharoni Date: Sun, 24 Sep 2017 16:35:27 +0300 Subject: [SDC-380] jtosca errors cutomization Change-Id: I18513daa0c3cdf99b5cf5b6732c7c5c57486c7d0 Signed-off-by: Pavel Aharoni --- pom.xml | 16 ++- .../openecomp/sdc/toscaparser/api/DataEntity.java | 31 +++--- .../sdc/toscaparser/api/DataEntity.java.orig | 2 +- .../sdc/toscaparser/api/EntityTemplate.java | 50 ++++----- .../org/openecomp/sdc/toscaparser/api/Group.java | 11 +- .../sdc/toscaparser/api/ImportsLoader.java | 101 ++++++++--------- .../sdc/toscaparser/api/NodeTemplate.java | 56 +++++----- .../org/openecomp/sdc/toscaparser/api/Policy.java | 11 +- .../openecomp/sdc/toscaparser/api/Repository.java | 23 ++-- .../sdc/toscaparser/api/SubstitutionMappings.java | 60 +++++----- .../sdc/toscaparser/api/TopologyTemplate.java | 26 +++-- .../sdc/toscaparser/api/TopologyTemplate.java.orig | 16 +-- .../sdc/toscaparser/api/ToscaTemplate.java | 78 ++++++------- .../openecomp/sdc/toscaparser/api/Triggers.java | 17 +-- .../sdc/toscaparser/api/UnsupportedType.java | 11 +- .../toscaparser/api/common/ExceptionCollector.java | 122 --------------------- .../toscaparser/api/common/JToscaException.java | 12 +- .../api/common/JToscaValidationIssue.java | 35 ++++++ .../api/common/ValidationIssueCollector.java | 35 ++++++ .../sdc/toscaparser/api/elements/EntityType.java | 6 +- .../sdc/toscaparser/api/elements/GroupType.java | 23 ++-- .../toscaparser/api/elements/InterfacesDef.java | 11 +- .../sdc/toscaparser/api/elements/NodeType.java | 12 +- .../sdc/toscaparser/api/elements/PolicyType.java | 29 ++--- .../sdc/toscaparser/api/elements/PortSpec.java | 17 +-- .../sdc/toscaparser/api/elements/PropertyDef.java | 18 +-- .../toscaparser/api/elements/RelationshipType.java | 13 +-- .../sdc/toscaparser/api/elements/ScalarUnit.java | 22 ++-- .../api/elements/StatefulEntityType.java | 16 ++- .../toscaparser/api/elements/TypeValidation.java | 17 +-- .../api/elements/constraints/Constraint.java | 24 ++-- .../api/elements/constraints/GreaterOrEqual.java | 5 +- .../api/elements/constraints/GreaterThan.java | 7 +- .../api/elements/constraints/InRange.java | 15 +-- .../api/elements/constraints/Length.java | 7 +- .../api/elements/constraints/LessOrEqual.java | 7 +- .../api/elements/constraints/LessThan.java | 7 +- .../api/elements/constraints/MaxLength.java | 7 +- .../api/elements/constraints/MinLength.java | 7 +- .../api/elements/constraints/Pattern.java | 15 +-- .../api/elements/constraints/Schema.java | 19 ++-- .../api/elements/constraints/Schema.java.orig | 2 +- .../api/elements/constraints/ValidValues.java | 2 +- .../sdc/toscaparser/api/functions/Concat.java | 9 +- .../sdc/toscaparser/api/functions/Function.java | 2 +- .../toscaparser/api/functions/GetAttribute.java | 74 +++++++------ .../sdc/toscaparser/api/functions/GetInput.java | 14 +-- .../api/functions/GetOperationOutput.java | 51 ++++----- .../sdc/toscaparser/api/functions/GetProperty.java | 72 ++++++------ .../sdc/toscaparser/api/functions/Token.java | 23 ++-- .../sdc/toscaparser/api/parameters/Input.java | 17 +-- .../sdc/toscaparser/api/parameters/Output.java | 21 ++-- .../openecomp/sdc/toscaparser/api/prereq/CSAR.java | 56 +++++----- .../sdc/toscaparser/api/prereq/CSAR.java.orig | 2 +- .../toscaparser/api/utils/JToscaErrorCodes.java | 16 +-- .../api/utils/TOSCAVersionProperty.java | 22 ++-- .../toscaparser/api/utils/ThreadLocalsHolder.java | 10 +- .../sdc/toscaparser/api/utils/UrlUtils.java | 16 +-- .../sdc/toscaparser/api/utils/ValidateUtils.java | 96 ++++++++-------- .../sdc/toscaparser/api/GetValidationIssues.java | 81 ++++++++++++++ 60 files changed, 836 insertions(+), 767 deletions(-) delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/common/ExceptionCollector.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaValidationIssue.java create mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/common/ValidationIssueCollector.java create mode 100644 src/test/java/org/openecomp/sdc/toscaparser/api/GetValidationIssues.java diff --git a/pom.xml b/pom.xml index 69fb8b1..ba462d9 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.openecomp.sdc.jtosca jtosca - 1.1.10-SNAPSHOT + 1.1.11-SNAPSHOT sdc-jtosca @@ -53,6 +53,20 @@ junit 4.12 + + + com.opencsv + opencsv + 3.10 + + + + + org.apache.commons + commons-io + 1.3.2 + + diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java b/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java index 350068b..08e154f 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java @@ -1,11 +1,10 @@ -package org.openecomp.sdc.toscaparser.api; +package org.openecomp.sdc.toscaparser.api; import java.util.ArrayList; -import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; import org.openecomp.sdc.toscaparser.api.elements.*; import org.openecomp.sdc.toscaparser.api.elements.constraints.Constraint; import org.openecomp.sdc.toscaparser.api.elements.constraints.Schema; @@ -50,9 +49,9 @@ public class DataEntity { else { if(!(value instanceof LinkedHashMap)) { //ERROR under investigation - ThreadLocalsHolder.getCollector().appendWarning(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE001", String.format( "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"", - value.toString(),dataType.getType())); + value.toString(),dataType.getType()))); if (value instanceof List && ((List) value).size() > 0) { value = ((List) value).get(0); @@ -86,9 +85,9 @@ public class DataEntity { for(String valueKey: valueDict.keySet()) { //1710 devlop JSON validation if(!("json").equals(dataType.getType()) && !allowedProps.contains(valueKey)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "UnknownFieldError: Data value of type \"%s\" contains unknown field \"%s\"", - dataType.getType(),valueKey)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE100", String.format( + "UnknownFieldError: Data value of type \"%s\" contains unknown field \"%s\"", + dataType.getType(),valueKey))); } } @@ -109,9 +108,9 @@ public class DataEntity { } } if(missingProp.size() > 0) { - ThreadLocalsHolder.getCollector().appendWarning(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003",String.format( "MissingRequiredFieldError: Data value of type \"%s\" is missing required field(s) \"%s\"", - dataType.getType(),missingProp.toString())); + dataType.getType(),missingProp.toString()))); } // check every field @@ -169,9 +168,9 @@ public class DataEntity { } else if (type == null) { //NOT ANALYZED - ThreadLocalsHolder.getCollector().appendWarning(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE002", String.format( "MissingType: Type is missing for value \"%s\"", - value.toString())); + value.toString()))); return value; } else if(type.equals(Schema.STRING)) { @@ -277,7 +276,7 @@ public class DataEntity { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import MissingRequiredFieldError from toscaparser.common.exception import TypeMismatchError from toscaparser.common.exception import UnknownFieldError @@ -318,7 +317,7 @@ class DataEntity(object): # If the datatype has 'properties' definition else: if not isinstance(self.value, dict): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( TypeMismatchError(what=self.value, type=self.datatype.type)) allowed_props = [] @@ -335,7 +334,7 @@ class DataEntity(object): # check allowed field for value_key in list(self.value.keys()): if value_key not in allowed_props: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError(what=(_('Data value of type "%s"') % self.datatype.type), field=value_key)) @@ -351,7 +350,7 @@ class DataEntity(object): if req_key not in list(self.value.keys()): missingprop.append(req_key) if missingprop: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( MissingRequiredFieldError( what=(_('Data value of type "%s"') % self.datatype.type), required=missingprop)) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java.orig b/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java.orig index c3f8fb5..2c6d923 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java.orig +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java.orig @@ -5,7 +5,7 @@ import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector; import org.openecomp.sdc.toscaparser.api.elements.*; import org.openecomp.sdc.toscaparser.api.elements.constraints.Constraint; import org.openecomp.sdc.toscaparser.api.elements.constraints.Schema; diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java index 9220dac..32de069 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java @@ -1,13 +1,13 @@ package org.openecomp.sdc.toscaparser.api; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.openecomp.sdc.toscaparser.api.elements.*; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + import java.util.ArrayList; import java.util.LinkedHashMap; -import java.util.List; import java.util.Map; -import org.openecomp.sdc.toscaparser.api.elements.*; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public abstract class EntityTemplate { // Base class for TOSCA templates @@ -93,8 +93,8 @@ public abstract class EntityTemplate { if(type == null) { //msg = (_('Policy definition of "%(pname)s" must have' // ' a "type" ''attribute.') % dict(pname=name)) - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValidationError: Policy definition of \"%s\" must have a \"type\" attribute",name)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE140", String.format( + "ValidationError: Policy definition of \"%s\" must have a \"type\" attribute",name))); } typeDefinition = new PolicyType(type, customDef); } @@ -335,9 +335,9 @@ public abstract class EntityTemplate { // '"default_instances" value is not between ' // '"min_instances" and "max_instances".' % // self.name) - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE141", String.format( "ValidationError: \"properties\" of template \"%s\": \"default_instances\" value is not between \"min_instances\" and \"max_instances\"", - name)); + name))); } } } @@ -366,17 +366,17 @@ public abstract class EntityTemplate { } // Required properties found without value or a default value if(!reqPropsNoValueOrDefault.isEmpty()) { - ThreadLocalsHolder.getCollector().appendWarning(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", String.format( "MissingRequiredFieldError: properties of template \"%s\" are missing field(s): %s", - name,reqPropsNoValueOrDefault.toString())); + name,reqPropsNoValueOrDefault.toString()))); } } else { // Required properties in schema, but not in template if(!requiredProps.isEmpty()) { - ThreadLocalsHolder.getCollector().appendWarning(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE004", String.format( "MissingRequiredFieldError2: properties of template \"%s\" are missing field(s): %s", - name,requiredProps.toString())); + name,requiredProps.toString()))); } } } @@ -384,8 +384,8 @@ public abstract class EntityTemplate { @SuppressWarnings("unchecked") private void _validateField(LinkedHashMap template) { if(!(template instanceof LinkedHashMap)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE142", String.format( + "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE))); return;//??? } boolean bBad = false; @@ -402,8 +402,8 @@ public abstract class EntityTemplate { bBad = (template.get(TYPE) == null); } if(bBad) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE143", String.format( + "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE))); } } @@ -417,8 +417,8 @@ public abstract class EntityTemplate { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "UnknownFieldError: Section \"%s\" of template \"%s\" contains unknown field \"%s\"",section,name,sname)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE144", String.format( + "UnknownFieldError: Section \"%s\" of template \"%s\" contains unknown field \"%s\"",section,name,sname))); } } @@ -585,7 +585,7 @@ class EntityTemplate(object): if not type: msg = (_('Policy definition of "%(pname)s" must have' ' a "type" ''attribute.') % dict(pname=name)) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(msg)) self.type_definition = PolicyType(type, custom_def) @@ -729,7 +729,7 @@ class EntityTemplate(object): '"default_instances" value is not between ' '"min_instances" and "max_instances".' % self.name) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(message=err_msg)) def _common_validate_properties(self, entitytype, properties): @@ -751,21 +751,21 @@ class EntityTemplate(object): req_props_no_value_or_default.append(r) # Required properties found without value or a default value if req_props_no_value_or_default: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( MissingRequiredFieldError( what='"properties" of template "%s"' % self.name, required=req_props_no_value_or_default)) else: # Required properties in schema, but not in template if required_props: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( MissingRequiredFieldError( what='"properties" of template "%s"' % self.name, required=required_props)) def _validate_field(self, template): if not isinstance(template, dict): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( MissingRequiredFieldError( what='Template "%s"' % self.name, required=self.TYPE)) try: @@ -777,14 +777,14 @@ class EntityTemplate(object): else: template[self.TYPE] except KeyError: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( MissingRequiredFieldError( what='Template "%s"' % self.name, required=self.TYPE)) def _common_validate_field(self, schema, allowedlist, section): for name in schema: if name not in allowedlist: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError( what=('"%(section)s" of template "%(nodename)s"' % {'section': section, 'nodename': self.name}), diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Group.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Group.java index 8ed623f..d183ac7 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/Group.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/Group.java @@ -1,10 +1,11 @@ package org.openecomp.sdc.toscaparser.api; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.elements.Metadata; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; @@ -70,9 +71,9 @@ public class Group extends EntityTemplate { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE183", String.format( "UnknownFieldError: Groups \"%s\" contains unknown field \"%s\"", - name,key)); + name,key))); } } } @@ -91,7 +92,7 @@ public class Group extends EntityTemplate { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import UnknownFieldError from toscaparser.entity_template import EntityTemplate from toscaparser.utils import validateutils @@ -131,7 +132,7 @@ class Group(EntityTemplate): def _validate_keys(self): for key in self.entity_tpl.keys(): if key not in SECTIONS: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError(what='Groups "%s"' % self.name, field=key)) */ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java index a97a360..6794f9a 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java @@ -1,6 +1,7 @@ package org.openecomp.sdc.toscaparser.api; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import org.openecomp.sdc.toscaparser.api.elements.TypeValidation; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.openecomp.sdc.toscaparser.api.utils.UrlUtils; @@ -42,7 +43,7 @@ public class ImportsLoader { if((_path == null || _path.isEmpty()) && tpl == null) { //msg = _('Input tosca template is not provided.') //log.warning(msg) - ThreadLocalsHolder.getCollector().appendException("ValidationError: Input tosca template is not provided"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE184", "ValidationError: Input tosca template is not provided")); } this.path = _path; @@ -78,8 +79,8 @@ public class ImportsLoader { if(importslist == null) { //msg = _('"imports" keyname is defined without including templates.') //log.error(msg) - ThreadLocalsHolder.getCollector().appendException( - "ValidationError: \"imports\" keyname is defined without including templates"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE185", + "ValidationError: \"imports\" keyname is defined without including templates")); return; } @@ -93,8 +94,8 @@ public class ImportsLoader { if(importNames.contains(importName)) { //msg = (_('Duplicate import name "%s" was found.') % import_name) //log.error(msg) - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValidationError: Duplicate import name \"%s\" was found",importName)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE186", String.format( + "ValidationError: Duplicate import name \"%s\" was found",importName))); } importNames.add(importName); //??? @@ -169,8 +170,8 @@ public class ImportsLoader { private void _validateImportKeys(String importName, LinkedHashMap importUri) { if(importUri.get(FILE) == null) { //log.warning(_('Missing keyname "file" in import "%(name)s".') % {'name': import_name}) - ThreadLocalsHolder.getCollector().appendException(String.format( - "MissingRequiredFieldError: Import of template \"%s\" is missing field %s",importName,FILE)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE187", String.format( + "MissingRequiredFieldError: Import of template \"%s\" is missing field %s",importName,FILE))); } for(String key: importUri.keySet()) { boolean bFound = false; @@ -184,8 +185,8 @@ public class ImportsLoader { //log.warning(_('Unknown keyname "%(key)s" error in ' // 'imported definition "%(def)s".') // % {'key': key, 'def': import_name}) - ThreadLocalsHolder.getCollector().appendException(String.format( - "UnknownFieldError: Import of template \"%s\" has unknown fiels %s",importName,key)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE188", String.format( + "UnknownFieldError: Import of template \"%s\" has unknown fiels %s",importName,key))); } } } @@ -221,9 +222,9 @@ public class ImportsLoader { repository = (String)((LinkedHashMap)importUriDef).get(REPOSITORY); if(repository != null) { if(!repositories.keySet().contains(repository)) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE189", String.format( "InvalidPropertyValueError: Repository \"%s\" not found in \"%s\"", - repository,repositories.keySet().toString())); + repository,repositories.keySet().toString()))); } } } @@ -238,8 +239,8 @@ public class ImportsLoader { // 'definition "%(import_name)s".') // % {'import_name': import_name}) //log.error(msg) - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValidationError: A template file name is not provided with import definition \"%s\"",importName)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE190", String.format( + "ValidationError: A template file name is not provided with import definition \"%s\"",importName))); al[0] = al[1] = null; return al; } @@ -253,8 +254,8 @@ public class ImportsLoader { return al; } catch(IOException e) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ImportError: \"%s\" loading YAML import from \"%s\"",e.getClass().getSimpleName(),fileName)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE191", String.format( + "ImportError: \"%s\" loading YAML import from \"%s\"",e.getClass().getSimpleName(),fileName))); al[0] = al[1] = null; return al; } @@ -269,7 +270,7 @@ public class ImportsLoader { String msg = String.format( "ImportError: Absolute file name \"%s\" cannot be used in the URL-based input template \"%s\"", fileName,path); - ThreadLocalsHolder.getCollector().appendException(msg); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE192", msg)); al[0] = al[1] = null; return al; } @@ -312,7 +313,7 @@ public class ImportsLoader { //log.error(msg) String msg = String.format( "ValueError: \"%s\" is not a valid file",importTemplate); - ThreadLocalsHolder.getCollector().appendException(msg); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE193", msg)); log.debug("ImportsLoader - _loadImportTemplate - {}", msg); } } @@ -330,7 +331,7 @@ public class ImportsLoader { else { String msg = String.format( "Relative file name \"%s\" cannot be used in a pre-parsed input template",fileName); - ThreadLocalsHolder.getCollector().appendException("ImportError: " + msg); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE194", "ImportError: " + msg)); al[0] = al[1] = null; return al; } @@ -339,8 +340,8 @@ public class ImportsLoader { if(importTemplate == null || importTemplate.isEmpty()) { //log.error(_('Import "%(name)s" is not valid.') % // {'name': import_uri_def}) - ThreadLocalsHolder.getCollector().appendException(String.format( - "ImportError: Import \"%s\" is not valid",importUriDef)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE195", String.format( + "ImportError: Import \"%s\" is not valid",importUriDef))); al[0] = al[1] = null; return al; } @@ -348,8 +349,8 @@ public class ImportsLoader { // for now, this must be a file if(!aFile) { log.error("ImportsLoader - _loadImportTemplate - Error!! Expected a file. importUriDef = {}, importTemplate = {}", importUriDef, importTemplate); - ThreadLocalsHolder.getCollector().appendException(String.format( - "ImportError: Import \"%s\" is not a file",importName)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE196", String.format( + "ImportError: Import \"%s\" is not a file",importName))); al[0] = al[1] = null; return al; } @@ -361,14 +362,14 @@ public class ImportsLoader { return al; } catch(FileNotFoundException e) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ImportError: Failed to load YAML from \"%s\"",importName)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE197", String.format( + "ImportError: Failed to load YAML from \"%s\"",importName))); al[0] = al[1] = null; return al; } catch(Exception e) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ImportError: Exception from SnakeYAML file = \"%s\"",importName)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE198", String.format( + "ImportError: Exception from SnakeYAML file = \"%s\"",importName))); al[0] = al[1] = null; return al; } @@ -376,8 +377,8 @@ public class ImportsLoader { if(shortImportNotation) { //log.error(_('Import "%(name)s" is not valid.') % import_uri_def) - ThreadLocalsHolder.getCollector().appendException(String.format( - "ImportError: Import \"%s\" is not valid",importName)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE199", String.format( + "ImportError: Import \"%s\" is not valid",importName))); al[0] = al[1] = null; return al; } @@ -410,7 +411,7 @@ public class ImportsLoader { String msg = String.format( "referenced repository \"%s\" in import definition \"%s\" not found", repository,importName); - ThreadLocalsHolder.getCollector().appendException("ImportError: " + msg); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE200", "ImportError: " + msg)); al[0] = al[1] = null; return al; } @@ -424,8 +425,8 @@ public class ImportsLoader { return al; } catch(IOException e) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ImportError: Exception loading YAML import from \"%s\"",fullUrl)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE201", String.format( + "ImportError: Exception loading YAML import from \"%s\"",fullUrl))); al[0] = al[1] = null; return al; } @@ -434,13 +435,13 @@ public class ImportsLoader { String msg = String.format( "repository URL \"%s\" in import definition \"%s\" is not valid", repoUrl,importName); - ThreadLocalsHolder.getCollector().appendException("ImportError: " + msg); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE202", "ImportError: " + msg)); } // if we got here something is wrong with the flow... log.error("ImportsLoader - _loadImportTemplate - got to dead end (importName {})", importName); - ThreadLocalsHolder.getCollector().appendException(String.format( - "ImportError: _loadImportTemplate got to dead end (importName %s)\n",importName)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE203", String.format( + "ImportError: _loadImportTemplate got to dead end (importName %s)\n",importName))); al[0] = al[1] = null; return al; } @@ -464,7 +465,7 @@ public class ImportsLoader { import logging import os -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import InvalidPropertyValueError from toscaparser.common.exception import MissingRequiredFieldError from toscaparser.common.exception import UnknownFieldError @@ -491,7 +492,7 @@ class ImportsLoader(object): if not path and not tpl: msg = _('Input tosca template is not provided.') log.warning(msg) - ExceptionCollector.appendException(ValidationError(message=msg)) + ValidationIssueCollector.appendException(ValidationError(message=msg)) self.path = path self.repositories = {} if tpl and tpl.get('repositories'): @@ -514,7 +515,7 @@ class ImportsLoader(object): msg = _('"imports" keyname is defined without including ' 'templates.') log.error(msg) - ExceptionCollector.appendException(ValidationError(message=msg)) + ValidationIssueCollector.appendException(ValidationError(message=msg)) return for import_def in self.importslist: @@ -524,7 +525,7 @@ class ImportsLoader(object): msg = (_('Duplicate import name "%s" was found.') % import_name) log.error(msg) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(message=msg)) imports_names.add(import_name) @@ -568,7 +569,7 @@ class ImportsLoader(object): if self.FILE not in import_uri_def.keys(): log.warning(_('Missing keyname "file" in import "%(name)s".') % {'name': import_name}) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( MissingRequiredFieldError( what='Import of template "%s"' % import_name, required=self.FILE)) @@ -577,7 +578,7 @@ class ImportsLoader(object): log.warning(_('Unknown keyname "%(key)s" error in ' 'imported definition "%(def)s".') % {'key': key, 'def': import_name}) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError( what='Import of template "%s"' % import_name, field=key)) @@ -610,7 +611,7 @@ class ImportsLoader(object): repos = self.repositories.keys() if repository is not None: if repository not in repos: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidPropertyValueError( what=_('Repository is not found in "%s"') % repos)) else: @@ -623,7 +624,7 @@ class ImportsLoader(object): 'definition "%(import_name)s".') % {'import_name': import_name}) log.error(msg) - ExceptionCollector.appendException(ValidationError(message=msg)) + ValidationIssueCollector.appendException(ValidationError(message=msg)) return if toscaparser.utils.urlutils.UrlUtils.validate_url(file_name): @@ -638,7 +639,7 @@ class ImportsLoader(object): '"%(template)s".') % {'name': file_name, 'template': self.path}) log.error(msg) - ExceptionCollector.appendException(ImportError(msg)) + ValidationIssueCollector.appendException(ImportError(msg)) return import_template = toscaparser.utils.urlutils.UrlUtils.\ join_url(self.path, file_name) @@ -670,7 +671,7 @@ class ImportsLoader(object): % {'import_template': import_template}) log.error(msg) - ExceptionCollector.appendException + ValidationIssueCollector.appendException (ValueError(msg)) else: # template is pre-parsed if os.path.isabs(file_name) and os.path.isfile(file_name): @@ -681,13 +682,13 @@ class ImportsLoader(object): 'in a pre-parsed input template.') % {'name': file_name}) log.error(msg) - ExceptionCollector.appendException(ImportError(msg)) + ValidationIssueCollector.appendException(ImportError(msg)) return if not import_template: log.error(_('Import "%(name)s" is not valid.') % {'name': import_uri_def}) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ImportError(_('Import "%s" is not valid.') % import_uri_def)) return @@ -695,7 +696,7 @@ class ImportsLoader(object): if short_import_notation: log.error(_('Import "%(name)s" is not valid.') % import_uri_def) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ImportError(_('Import "%s" is not valid.') % import_uri_def)) return @@ -714,7 +715,7 @@ class ImportsLoader(object): 'definition "%(tpl)s" not found.') % {'n_uri': repository, 'tpl': import_name}) log.error(msg) - ExceptionCollector.appendException(ImportError(msg)) + ValidationIssueCollector.appendException(ImportError(msg)) return if toscaparser.utils.urlutils.UrlUtils.validate_url(full_url): @@ -724,5 +725,5 @@ class ImportsLoader(object): 'definition "%(tpl)s".') % {'n_uri': repo_url, 'tpl': import_name}) log.error(msg) - ExceptionCollector.appendException(ImportError(msg)) + ValidationIssueCollector.appendException(ImportError(msg)) */ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java index 6606068..1e97572 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java @@ -1,5 +1,7 @@ package org.openecomp.sdc.toscaparser.api; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; @@ -86,14 +88,14 @@ public class NodeTemplate extends EntityTemplate { } } if(bFound || customDef.get(node) != null) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE205", String.format( "NotImplementedError: Lookup by TOSCA types is not supported. Requirement for \"%s\" can not be full-filled", - getName())); + getName()))); return null; } if(templates.get(node) == null) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "KeyError: Node template \"%s\" was not found",node)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE206", String.format( + "KeyError: Node template \"%s\" was not found",node))); return null; } NodeTemplate relatedTpl = new NodeTemplate(node,templates,customDef,null,null); @@ -105,7 +107,7 @@ public class NodeTemplate extends EntityTemplate { if(relationship == null) { ArrayList parentReqs = ((NodeType)typeDefinition).getAllRequirements(); if(parentReqs == null) { - ThreadLocalsHolder.getCollector().appendException("ValidationError: parent_req is null"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE207", "ValidationError: parent_req is null")); } else { // for(String key: req.keySet()) { @@ -165,9 +167,9 @@ public class NodeTemplate extends EntityTemplate { } } else { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE208", String.format( "MissingRequiredFieldError: \"relationship\" used in template \"%s\" is missing required field \"type\"", - relatedTpl.getName())); + relatedTpl.getName()))); } } for(RelationshipType rtype: ((NodeType)typeDefinition).getRelationship().keySet()) { @@ -275,8 +277,8 @@ public class NodeTemplate extends EntityTemplate { ArrayList requires = (ArrayList)((NodeType)typeDefinition).getValue(REQUIREMENTS, entityTpl, false); if(requires != null) { if(!(requires instanceof ArrayList)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "TypeMismatchError: \"requirements\" of template \"%s\" are not of type \"list\"",name)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE209", String.format( + "TypeMismatchError: \"requirements\" of template \"%s\" are not of type \"list\"",name))); } else { for(Object ro: requires) { @@ -320,8 +322,8 @@ public class NodeTemplate extends EntityTemplate { if(occurrences.size() != 2 || !(0 <= (int)occurrences.get(0) && (int)occurrences.get(0) <= (int)occurrences.get(1)) || (int)occurrences.get(1) == 0) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "InvalidPropertyValueError: property has invalid value %s",occurrences.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE210", String.format( + "InvalidPropertyValueError: property has invalid value %s",occurrences.toString()))); } } @@ -335,8 +337,8 @@ public class NodeTemplate extends EntityTemplate { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "UnknownFieldError: \"requirements\" of template \"%s\" contains unknown field \"%s\"",name,key)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE211", String.format( + "UnknownFieldError: \"requirements\" of template \"%s\" contains unknown field \"%s\"",name,key))); } } } @@ -369,8 +371,8 @@ public class NodeTemplate extends EntityTemplate { _commonValidateField(value,_collectCustomIfaceOperations(iname),"interfaces"); } else { - ThreadLocalsHolder.getCollector().appendException(String.format( - "UnknownFieldError: \"interfaces\" of template \"%s\" contains unknown field %s",name,iname)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE212", String.format( + "UnknownFieldError: \"interfaces\" of template \"%s\" contains unknown field %s",name,iname))); } } } @@ -421,8 +423,8 @@ public class NodeTemplate extends EntityTemplate { } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "UnknownFieldError: Node template \"%s\" has unknown field \"%s\"",name,ntname)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE213", String.format( + "UnknownFieldError: Node template \"%s\" has unknown field \"%s\"",name,ntname))); } } } @@ -455,7 +457,7 @@ public class NodeTemplate extends EntityTemplate { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import InvalidPropertyValueError from toscaparser.common.exception import MissingRequiredFieldError from toscaparser.common.exception import TypeMismatchError @@ -524,11 +526,11 @@ class NodeTemplate(EntityTemplate): 'Requirement for "%s" can not be full-filled.') % self.name if (node in list(self.type_definition.TOSCA_DEF.keys()) or node in self.custom_def): - ExceptionCollector.appendException(NotImplementedError(msg)) + ValidationIssueCollector.appendException(NotImplementedError(msg)) return if node not in self.templates: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_('Node template "%s" was not found.') % node)) return @@ -539,7 +541,7 @@ class NodeTemplate(EntityTemplate): if not relationship: parent_reqs = self.type_definition.get_all_requirements() if parent_reqs is None: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(message='parent_req is ' + str(parent_reqs))) else: @@ -574,7 +576,7 @@ class NodeTemplate(EntityTemplate): elif not relationship.startswith(rel_prfx): relationship = rel_prfx + relationship else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( MissingRequiredFieldError( what=_('"relationship" used in template ' '"%s"') % related_tpl.name, @@ -645,7 +647,7 @@ class NodeTemplate(EntityTemplate): self.entity_tpl) if requires: if not isinstance(requires, list): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( TypeMismatchError( what='"requirements" of template "%s"' % self.name, type='list')) @@ -675,13 +677,13 @@ class NodeTemplate(EntityTemplate): DataEntity.validate_datatype('integer', value) if len(occurrences) != 2 or not (0 <= occurrences[0] <= occurrences[1]) \ or occurrences[1] == 0: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidPropertyValueError(what=(occurrences))) def _validate_requirements_keys(self, requirement): for key in requirement.keys(): if key not in self.REQUIREMENTS_SECTION: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError( what='"requirements" of template "%s"' % self.name, field=key)) @@ -707,7 +709,7 @@ class NodeTemplate(EntityTemplate): self._collect_custom_iface_operations(name), 'interfaces') else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError( what='"interfaces" of template "%s"' % self.name, field=name)) @@ -730,6 +732,6 @@ class NodeTemplate(EntityTemplate): def _validate_fields(self, nodetemplate): for name in nodetemplate.keys(): if name not in self.SECTIONS and name not in self.SPECIAL_SECTIONS: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError(what='Node template "%s"' % self.name, field=name))*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java index a59d9d5..26805bd 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java @@ -1,10 +1,11 @@ package org.openecomp.sdc.toscaparser.api; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; @@ -101,9 +102,9 @@ public class Policy extends EntityTemplate { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE219", String.format( "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"", - name,key)); + name,key))); } } } @@ -122,7 +123,7 @@ public class Policy extends EntityTemplate { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import UnknownFieldError from toscaparser.entity_template import EntityTemplate from toscaparser.triggers import Triggers @@ -181,7 +182,7 @@ class Policy(EntityTemplate): def _validate_keys(self): for key in self.entity_tpl.keys(): if key not in SECTIONS: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError(what='Policy "%s"' % self.name, field=key)) */ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java index 92a90af..3ede22c 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java @@ -1,8 +1,9 @@ package org.openecomp.sdc.toscaparser.api; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.LinkedHashMap; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.openecomp.sdc.toscaparser.api.utils.UrlUtils; @@ -24,9 +25,9 @@ public class Repository { if(reposit instanceof LinkedHashMap) { url = (String)((LinkedHashMap)reposit).get("url"); if(url == null) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE229", String.format( "MissingRequiredFieldError: Repository \"%s\" is missing required field \"url\"", - name)); + name))); } } loadAndValidate(name,reposit); @@ -45,9 +46,9 @@ public class Repository { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE230", String.format( "UnknownFieldError: repositories \"%s\" contains unknown field \"%s\"", - keyname,key)); + keyname,key))); } } @@ -55,8 +56,8 @@ public class Repository { if(repositUrl != null) { boolean urlVal = UrlUtils.validateUrl(repositUrl); if(!urlVal) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "URLException: repsositories \"%s\" Invalid Url",keyname)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE231", String.format( + "URLException: repsositories \"%s\" Invalid Url",keyname))); } } } @@ -74,7 +75,7 @@ public class Repository { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import MissingRequiredFieldError from toscaparser.common.exception import UnknownFieldError from toscaparser.common.exception import URLException @@ -91,7 +92,7 @@ class Repository(object): self.reposit = values if isinstance(self.reposit, dict): if 'url' not in self.reposit.keys(): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( MissingRequiredFieldError(what=_('Repository "%s"') % self.name, required='url')) self.url = self.reposit['url'] @@ -102,7 +103,7 @@ class Repository(object): if isinstance(reposit_def, dict): for key in reposit_def.keys(): if key not in SECTIONS: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError(what=_('repositories "%s"') % self.keyname, field=key)) @@ -111,7 +112,7 @@ class Repository(object): url_val = toscaparser.utils.urlutils.UrlUtils.\ validate_url(reposit_url) if url_val is not True: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( URLException(what=_('repsositories "%s" Invalid Url') % self.keyname)) */ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java b/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java index a68f9fb..e5e9d9a 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java @@ -1,13 +1,17 @@ package org.openecomp.sdc.toscaparser.api; -import java.util.*; - +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; import org.openecomp.sdc.toscaparser.api.elements.NodeType; import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; import org.openecomp.sdc.toscaparser.api.parameters.Input; import org.openecomp.sdc.toscaparser.api.parameters.Output; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; + public class SubstitutionMappings { // SubstitutionMappings class declaration @@ -126,9 +130,9 @@ public class SubstitutionMappings { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE232", String.format( "UnknownFieldError: SubstitutionMappings contain unknown field \"%s\"", - key)); + key))); } } } @@ -137,14 +141,14 @@ public class SubstitutionMappings { // validate the node_type of substitution mappings String nodeType = (String)subMappingDef.get(NODE_TYPE); if(nodeType == null) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE233", String.format( "MissingRequiredFieldError: SubstitutionMappings used in topology_template is missing required field \"%s\"", - NODE_TYPE)); + NODE_TYPE))); } Object nodeTypeDef = customDefs.get(nodeType); if(nodeTypeDef == null) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "InvalidNodeTypeError: \"%s\" is invalid",nodeType)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE234", String.format( + "InvalidNodeTypeError: \"%s\" is invalid",nodeType))); } } @@ -170,9 +174,9 @@ public class SubstitutionMappings { for(String property: requiredProperties) { // Check property which is 'required' and has no 'default' value if(!allInputs.contains(property)) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE235", String.format( "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", - getNodeType(),property)); + getNodeType(),property))); } } // If the optional properties of node type need to be customized by @@ -188,9 +192,9 @@ public class SubstitutionMappings { diffset.removeAll(allInputs); for(String parameter: diffset) { if(allProperties.contains(parameter)) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE236", String.format( "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", - getNodeType(),parameter)); + getNodeType(),parameter))); } } // Additional inputs are not in the properties of node type must @@ -201,9 +205,9 @@ public class SubstitutionMappings { diffset = allInputs; diffset.removeAll(allProperties); if(diffset.contains(inp.getName()) && inp.getDefault() == null) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE237", String.format( "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing rquired input \"%s\"", - getNodeType(),inp.getName())); + getNodeType(),inp.getName()))); } } } @@ -222,7 +226,7 @@ public class SubstitutionMappings { for(CapabilityAssignment cap: nodeCapabilities) { if(tplsCapabilities != null && tplsCapabilities.get(cap.getName()) == null) { ; //pass - // ExceptionCollector.appendException( + // ValidationIssueCollector.appendException( // UnknownFieldError(what='SubstitutionMappings', // field=cap)) } @@ -247,7 +251,7 @@ public class SubstitutionMappings { String cap = ro.getName(); if(tplsRequirements != null && tplsRequirements.get(cap) == null) { ; //pass - // ExceptionCollector.appendException( + // ValidationIssueCollector.appendException( // UnknownFieldError(what='SubstitutionMappings', // field=cap)) } @@ -272,9 +276,9 @@ public class SubstitutionMappings { for(Output output: outputs) { Object ado = getNodeDefinition().getAttributesDef(); if(ado != null && ((LinkedHashMap)ado).get(output.getName()) == null) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE238", String.format( "UnknownOutputError: Unknown output \"%s\" in SubstitutionMappings with node_type \"%s\"", - output.getName(),getNodeType())); + output.getName(),getNodeType()))); } } } @@ -313,7 +317,7 @@ public class SubstitutionMappings { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import InvalidNodeTypeError from toscaparser.common.exception import MissingDefaultValueError from toscaparser.common.exception import MissingRequiredFieldError @@ -392,7 +396,7 @@ class SubstitutionMappings(object): """validate the keys of substitution mappings.""" for key in self.sub_mapping_def.keys(): if key not in self.SECTIONS: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError(what=_('SubstitutionMappings'), field=key)) @@ -400,14 +404,14 @@ class SubstitutionMappings(object): """validate the node_type of substitution mappings.""" node_type = self.sub_mapping_def.get(self.NODE_TYPE) if not node_type: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( MissingRequiredFieldError( what=_('SubstitutionMappings used in topology_template'), required=self.NODE_TYPE)) node_type_def = self.custom_defs.get(node_type) if not node_type_def: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidNodeTypeError(what=node_type)) def _validate_inputs(self): @@ -428,7 +432,7 @@ class SubstitutionMappings(object): for property in required_properties: # Check property which is 'required' and has no 'default' value if property not in all_inputs: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( MissingRequiredInputError( what=_('SubstitutionMappings with node_type ') + self.node_type, @@ -443,7 +447,7 @@ class SubstitutionMappings(object): all_properties = set(self.node_definition.get_properties_def()) for parameter in customized_parameters - all_inputs: if parameter in all_properties: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( MissingRequiredInputError( what=_('SubstitutionMappings with node_type ') + self.node_type, @@ -456,7 +460,7 @@ class SubstitutionMappings(object): for input in self.inputs: if input.name in all_inputs - all_properties \ and input.default is None: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( MissingDefaultValueError( what=_('SubstitutionMappings with node_type ') + self.node_type, @@ -473,7 +477,7 @@ class SubstitutionMappings(object): if (tpls_capabilities and cap not in list(tpls_capabilities.keys())): pass - # ExceptionCollector.appendException( + # ValidationIssueCollector.appendException( # UnknownFieldError(what='SubstitutionMappings', # field=cap)) @@ -488,7 +492,7 @@ class SubstitutionMappings(object): if (tpls_requirements and req not in list(tpls_requirements.keys())): pass - # ExceptionCollector.appendException( + # ValidationIssueCollector.appendException( # UnknownFieldError(what='SubstitutionMappings', # field=req)) @@ -508,7 +512,7 @@ class SubstitutionMappings(object): # has properties, the specification will be amended? for output in self.outputs: if output.name not in self.node_definition.get_attributes_def(): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownOutputError( where=_('SubstitutionMappings with node_type ') + self.node_type, diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java index afedfdb..c19623f 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java @@ -1,7 +1,6 @@ package org.openecomp.sdc.toscaparser.api; -import java.util.*; - +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; import org.openecomp.sdc.toscaparser.api.elements.InterfacesDef; import org.openecomp.sdc.toscaparser.api.elements.NodeType; import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; @@ -12,6 +11,11 @@ import org.openecomp.sdc.toscaparser.api.parameters.Input; import org.openecomp.sdc.toscaparser.api.parameters.Output; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.Map; + public class TopologyTemplate { private static final String DESCRIPTION = "description"; @@ -232,9 +236,9 @@ public class TopologyTemplate { DataEntity.validateDatatype("list", memberNames,null,null,null); if(memberNames.size() < 1 || (new HashSet(memberNames)).size() != memberNames.size()) { - ThreadLocalsHolder.getCollector().appendWarning(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE005",String.format( "InvalidGroupTargetException: Member nodes \"%s\" should be >= 1 and not repeated", - memberNames.toString())); + memberNames.toString()))); } else { memberNodes = _getGroupMembers(memberNames); @@ -281,8 +285,8 @@ public class TopologyTemplate { } for(String member: members) { if(!nodeNames.contains(member)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"",member)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE239", String.format( + "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"",member))); } } } @@ -383,8 +387,8 @@ public class TopologyTemplate { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"",name)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE240", String.format( + "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"",name))); } } } @@ -697,7 +701,7 @@ class TopologyTemplate(object): DataEntity.validate_datatype('list', member_names) if len(member_names) < 1 or \ len(member_names) != len(set(member_names)): - exception.ExceptionCollector.appendException( + exception.ValidationIssueCollector.appendException( exception.InvalidGroupTargetException( message=_('Member nodes "%s" should be >= 1 ' 'and not repeated') % member_names)) @@ -732,7 +736,7 @@ class TopologyTemplate(object): node_names.append(node.name) for member in members: if member not in node_names: - exception.ExceptionCollector.appendException( + exception.ValidationIssueCollector.appendException( exception.InvalidGroupTargetException( message=_('Target member "%s" is not found in ' 'node_templates') % member)) @@ -780,7 +784,7 @@ class TopologyTemplate(object): def _validate_field(self): for name in self.tpl: if name not in SECTIONS: - exception.ExceptionCollector.appendException( + exception.ValidationIssueCollector.appendException( exception.UnknownFieldError(what='Template', field=name)) def _process_intrinsic_functions(self): diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java.orig b/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java.orig index 3af4b34..7e5f4af 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java.orig +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java.orig @@ -1,11 +1,13 @@ package org.openecomp.sdc.toscaparser.api; +import org.openecomp.sdc.toscaparser.api.common.JToscaError; + import java.util.ArrayList; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector; import org.openecomp.sdc.toscaparser.api.elements.InterfacesDef; import org.openecomp.sdc.toscaparser.api.elements.NodeType; import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; @@ -233,9 +235,9 @@ public class TopologyTemplate { DataEntity.validateDatatype("list", memberNames,null,null,null); if(memberNames.size() < 1 || (new HashSet(memberNames)).size() != memberNames.size()) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendError(new JToscaError("JE241", String.format( "InvalidGroupTargetException: Member nodes \"%s\" should be >= 1 and not repeated", - memberNames.toString())); + memberNames.toString()))); } else { memberNodes = _getGroupMembers(memberNames); @@ -282,8 +284,8 @@ public class TopologyTemplate { } for(String member: members) { if(!nodeNames.contains(member)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"",member)); + ThreadLocalsHolder.getCollector().appendError(new JToscaError("JE242", String.format( + "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"",member))); } } } @@ -384,8 +386,8 @@ public class TopologyTemplate { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"",name)); + ThreadLocalsHolder.getCollector().appendError(new JToscaError("JE243", String.format( + "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"",name))); } } } diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java index 5d5cb87..f5902c4 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java @@ -1,5 +1,7 @@ package org.openecomp.sdc.toscaparser.api; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; @@ -8,7 +10,7 @@ import java.io.InputStream; import java.util.*; import java.util.concurrent.ConcurrentHashMap; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector; import org.openecomp.sdc.toscaparser.api.common.JToscaException; import org.openecomp.sdc.toscaparser.api.elements.EntityType; import org.openecomp.sdc.toscaparser.api.elements.Metadata; @@ -110,7 +112,7 @@ public class ToscaTemplate extends Object { boolean aFile, LinkedHashMap yamlDictTpl, boolean _resolveGetInput) throws JToscaException { - ThreadLocalsHolder.setCollector(new ExceptionCollector(_path)); + ThreadLocalsHolder.setCollector(new ValidationIssueCollector()); VALID_TEMPLATE_VERSIONS = new ArrayList<>(); VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_0"); @@ -174,8 +176,8 @@ public class ToscaTemplate extends Object { tpl = yamlDictTpl; } else { - ThreadLocalsHolder.getCollector().appendException( - "ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE244", + "ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse")); log.debug("ToscaTemplate ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse"); } @@ -449,8 +451,8 @@ public class ToscaTemplate extends Object { private void _validateField() { String sVersion = _tplVersion(); if(sVersion == null) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "MissingRequiredField: Template is missing required field \"%s\"",DEFINITION_VERSION)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE245", String.format( + "MissingRequiredField: Template is missing required field \"%s\"",DEFINITION_VERSION))); } else { _validateVersion(sVersion); @@ -473,9 +475,9 @@ public class ToscaTemplate extends Object { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE246", String.format( "UnknownFieldError: Template contains unknown field \"%s\"", - sKey)); + sKey))); } } } @@ -489,9 +491,9 @@ public class ToscaTemplate extends Object { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE247", String.format( "InvalidTemplateVersion: \"%s\" is invalid. Valid versions are %s", - sVersion,VALID_TEMPLATE_VERSIONS.toString())); + sVersion,VALID_TEMPLATE_VERSIONS.toString()))); } else if(!sVersion.equals("tosca_simple_yaml_1_0")) { EntityType.updateDefinitions(sVersion); @@ -521,39 +523,25 @@ public class ToscaTemplate extends Object { } } else { - ThreadLocalsHolder.getCollector().appendException("ValueError: " + _path + " is not a valid file"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE248", "ValueError: " + _path + " is not a valid file")); return null; } return null; } private void verifyTemplate() throws JToscaException { - ThreadLocalsHolder.getCollector().setWantTrace(false); - - //Warnings - int warningsCount = ThreadLocalsHolder.getCollector().warningsCaught(); - if (warningsCount > 0) { - List warningsStrings = ThreadLocalsHolder.getCollector().getWarningsReport(); - log.warn("####################################################################################################"); - log.warn("CSAR Warnings found! CSAR name - {}", inputPath); - log.warn("ToscaTemplate - verifyTemplate - {} Parsing Warning{} occurred...", warningsCount, (warningsCount > 1 ? "s" : "")); - for (String s : warningsStrings) { - log.warn("{}. CSAR name - {}", s, inputPath); - } - log.warn("####################################################################################################"); - } - //Criticals - int criticalsCount = ThreadLocalsHolder.getCollector().criticalsCaught(); - if (criticalsCount > 0) { - List criticalStrings = ThreadLocalsHolder.getCollector().getCriticalsReport(); - log.error("####################################################################################################"); - log.error("ToscaTemplate - verifyTemplate - {} Parsing Critical{} occurred...", criticalsCount, (criticalsCount > 1 ? "s" : "")); - for (String s : criticalStrings) { - log.error("{}. CSAR name - {}", s, inputPath); + int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); + if (validationIssuesCaught > 0) { + List validationIssueStrings = ThreadLocalsHolder.getCollector().getValidationIssueReport(); + log.trace("####################################################################################################"); + log.trace("ToscaTemplate - verifyTemplate - {} Parsing Critical{} occurred...", validationIssuesCaught, (validationIssuesCaught > 1 ? "s" : "")); + for (String s : validationIssueStrings) { + log.trace("{}. CSAR name - {}", s, inputPath); } - throw new JToscaException(String.format("CSAR Validation Failed. CSAR name - {}. Please check logs for details.", inputPath), JToscaErrorCodes.CSAR_TOSCA_VALIDATION_ERROR.getValue()); + log.trace("####################################################################################################"); } + } public String getPath() { @@ -697,7 +685,7 @@ import logging import os from copy import deepcopy -from toscaparser.common.exception import ExceptionCollector.collector +from toscaparser.common.exception import ValidationIssueCollector.collector from toscaparser.common.exception import InvalidTemplateVersion from toscaparser.common.exception import MissingRequiredFieldError from toscaparser.common.exception import UnknownFieldError @@ -749,7 +737,7 @@ class ToscaTemplate(object): def __init__(self, path=None, parsed_params=None, a_file=True, yaml_dict_tpl=None): - ExceptionCollector.collector.start() + ValidationIssueCollector.collector.start() self.a_file = a_file self.input_path = None self.path = None @@ -770,7 +758,7 @@ class ToscaTemplate(object): if yaml_dict_tpl: self.tpl = yaml_dict_tpl else: - ExceptionCollector.collector.appendException( + ValidationIssueCollector.collector.appendException( ValueError(_('No path or yaml_dict_tpl was provided. ' 'There is nothing to parse.'))) @@ -790,7 +778,7 @@ class ToscaTemplate(object): self._handle_nested_tosca_templates_with_topology() self.graph = ToscaGraph(self.nodetemplates) - ExceptionCollector.collector.stop() + ValidationIssueCollector.collector.stop() self.verify_template() def _topology_template(self): @@ -926,7 +914,7 @@ class ToscaTemplate(object): def _validate_field(self): version = self._tpl_version() if not version: - ExceptionCollector.collector.appendException( + ValidationIssueCollector.collector.appendException( MissingRequiredFieldError(what='Template', required=DEFINITION_VERSION)) else: @@ -936,12 +924,12 @@ class ToscaTemplate(object): for name in self.tpl: if (name not in SECTIONS and name not in self.ADDITIONAL_SECTIONS.get(version, ())): - ExceptionCollector.collector.appendException( + ValidationIssueCollector.collector.appendException( UnknownFieldError(what='Template', field=name)) def _validate_version(self, version): if version not in self.VALID_TEMPLATE_VERSIONS: - ExceptionCollector.collector.appendException( + ValidationIssueCollector.collector.appendException( InvalidTemplateVersion( what=version, valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS))) @@ -960,23 +948,23 @@ class ToscaTemplate(object): self.a_file = True # the file has been decompressed locally return os.path.join(csar.temp_dir, csar.get_main_template()) else: - ExceptionCollector.collector.appendException( + ValidationIssueCollector.collector.appendException( ValueError(_('"%(path)s" is not a valid file.') % {'path': path})) def verify_template(self): - if ExceptionCollector.collector.exceptionsCaught(): + if ValidationIssueCollector.collector.exceptionsCaught(): if self.input_path: raise ValidationError( message=(_('\nThe input "%(path)s" failed validation with ' 'the following error(s): \n\n\t') % {'path': self.input_path}) + - '\n\t'.join(ExceptionCollector.collector.getExceptionsReport())) + '\n\t'.join(ValidationIssueCollector.collector.getExceptionsReport())) else: raise ValidationError( message=_('\nThe pre-parsed input failed validation with ' 'the following error(s): \n\n\t') + - '\n\t'.join(ExceptionCollector.collector.getExceptionsReport())) + '\n\t'.join(ValidationIssueCollector.collector.getExceptionsReport())) else: if self.input_path: msg = (_('The input "%(path)s" successfully passed ' diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java index 0ec0b5a..1e82a6c 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java @@ -1,8 +1,9 @@ package org.openecomp.sdc.toscaparser.api; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.LinkedHashMap; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; @@ -77,9 +78,9 @@ public class Triggers extends EntityTemplate { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE249", String.format( "UnknownFieldError: Triggers \"%s\" contains unknown field \"%s\"", - name,key)); + name,key))); } } } @@ -94,9 +95,9 @@ public class Triggers extends EntityTemplate { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE250", String.format( "UnknownFieldError: Triggers \"%s\" contains unknown field \"%s\"", - name,key)); + name,key))); } } } @@ -127,7 +128,7 @@ public class Triggers extends EntityTemplate { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import UnknownFieldError from toscaparser.entity_template import EntityTemplate @@ -170,14 +171,14 @@ class Triggers(EntityTemplate): def _validate_keys(self): for key in self.trigger_tpl.keys(): if key not in SECTIONS: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError(what='Triggers "%s"' % self.name, field=key)) def _validate_condition(self): for key in self.get_condition(): if key not in CONDITION_KEYNAMES: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError(what='Triggers "%s"' % self.name, field=key)) */ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/UnsupportedType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/UnsupportedType.java index 2bd0197..73858c6 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/UnsupportedType.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/UnsupportedType.java @@ -1,6 +1,7 @@ package org.openecomp.sdc.toscaparser.api; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class UnsupportedType { @@ -27,8 +28,8 @@ public class UnsupportedType { public static boolean validateType(String entityType) { for(String ust: unsupportedTypes) { if(ust.equals(entityType)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "UnsupportedTypeError: Entity type \"%s\" is not supported",entityType)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE251", String.format( + "UnsupportedTypeError: Entity type \"%s\" is not supported",entityType))); return true; } } @@ -38,7 +39,7 @@ public class UnsupportedType { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import UnsupportedTypeError from toscaparser.utils.gettextutils import _ @@ -69,7 +70,7 @@ class UnsupportedType(object): @staticmethod def validate_type(entitytype): if entitytype in UnsupportedType.un_supported_types: - ExceptionCollector.appendException(UnsupportedTypeError( + ValidationIssueCollector.appendException(UnsupportedTypeError( what=_('%s') % entitytype)) return True diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/common/ExceptionCollector.java b/src/main/java/org/openecomp/sdc/toscaparser/api/common/ExceptionCollector.java deleted file mode 100644 index fa65ae4..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/common/ExceptionCollector.java +++ /dev/null @@ -1,122 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.common; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -// Perfectly good enough... - -public class ExceptionCollector { - - private static Logger log = LoggerFactory.getLogger(ExceptionCollector.class.getName()); - - private Map notAnalyzedExceptions = new HashMap<>(); - private Map criticalExceptions = new HashMap<>(); - private Map warningExceptions = new HashMap<>(); - - private boolean bWantTrace = true; - private String filePath; - - public enum ReportType {WARNING, CRITICAL, NOT_ANALYZED} - - public ExceptionCollector(String filePath) { - this.filePath = filePath; - } - - public void appendException(String exception) { - - addException(exception, ReportType.NOT_ANALYZED); - } - - public void appendCriticalException(String exception) { - - addException(exception, ReportType.CRITICAL); - } - - public void appendWarning(String exception) { - - addException(exception, ReportType.WARNING); - } - - private void addException(String exception, ReportType type) { - - Map exceptions = getExceptionCollection(type); - - if (!exceptions.containsKey(exception)) { - // get stack trace - StackTraceElement[] ste = Thread.currentThread().getStackTrace(); - StringBuilder sb = new StringBuilder(); - // skip the last 2 (getStackTrace and this) - for (int i = 2; i < ste.length; i++) { - sb.append(String.format(" %s(%s:%d)%s", ste[i].getClassName(), ste[i].getFileName(), - ste[i].getLineNumber(), i == ste.length - 1 ? " " : "\n")); - } - exceptions.put(exception, sb.toString()); - } - } - - public List getCriticalsReport() { - - return getReport(ReportType.CRITICAL); - } - - public List getNotAnalyzedExceptionsReport() { - - return getReport(ReportType.NOT_ANALYZED); - } - - public List getWarningsReport() { - - return getReport(ReportType.WARNING); - } - - private List getReport(ReportType type) { - Map collectedExceptions = getExceptionCollection(type); - - List report = new ArrayList<>(); - if (collectedExceptions.size() > 0) { - for (Map.Entry exception : collectedExceptions.entrySet()) { - report.add(exception.getKey()); - if (bWantTrace) { - report.add(exception.getValue()); - } - } - } - - return report; - } - - private Map getExceptionCollection(ReportType type) { - switch (type) { - case WARNING: - return warningExceptions; - case CRITICAL: - return criticalExceptions; - case NOT_ANALYZED: - return notAnalyzedExceptions; - default: - return notAnalyzedExceptions; - } - } - - public int errorsNotAnalyzedCaught() { - return notAnalyzedExceptions.size(); - } - - public int criticalsCaught() { - return criticalExceptions.size(); - } - - public int warningsCaught() { - return warningExceptions.size(); - } - - public void setWantTrace(boolean b) { - bWantTrace = b; - } - -} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaException.java b/src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaException.java index 6cd5872..f97e6ad 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaException.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaException.java @@ -18,10 +18,10 @@ public class JToscaException extends Exception { this.code = code; } - //JT1001 - Meta file missing - //JT1002 - Invalid yaml content - //JT1003 - Entry-Definition not defined in meta file - //JT1004 - Entry-Definition file missing - //JT1005 - General Error - //JT1006 - General Error/Path not valid + //JE1001 - Meta file missing + //JE1002 - Invalid yaml content + //JE1003 - Entry-Definition not defined in meta file + //JE1004 - Entry-Definition file missing + //JE1005 - General Error + //JE1006 - General Error/Path not valid } diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaValidationIssue.java b/src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaValidationIssue.java new file mode 100644 index 0000000..5caba26 --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaValidationIssue.java @@ -0,0 +1,35 @@ +package org.openecomp.sdc.toscaparser.api.common; + +public class JToscaValidationIssue { + + private String code; + private String message; + + + public JToscaValidationIssue(String code, String message) { + super(); + this.code = code; + this.message = message; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + @Override + public String toString() { + return "JToscaError [code=" + code + ", message=" + message + "]"; + } +} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/common/ValidationIssueCollector.java b/src/main/java/org/openecomp/sdc/toscaparser/api/common/ValidationIssueCollector.java new file mode 100644 index 0000000..6e2961a --- /dev/null +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/common/ValidationIssueCollector.java @@ -0,0 +1,35 @@ +package org.openecomp.sdc.toscaparser.api.common; + +import java.util.*; + +// Perfectly good enough... + +public class ValidationIssueCollector { + + private Map validationIssues = new HashMap(); + public void appendValidationIssue(JToscaValidationIssue issue) { + + validationIssues.put(issue.getMessage(),issue); + + } + + public List getValidationIssueReport() { + List report = new ArrayList<>(); + if (!validationIssues.isEmpty()) { + for (JToscaValidationIssue exception : validationIssues.values()) { + report.add("["+exception.getCode()+"]: "+ exception.getMessage()); + } + } + + return report; + } + public Map getValidationIssues() { + return validationIssues; + } + + + public int validationIssuesCaught() { + return validationIssues.size(); + } + +} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java index 650166d..70f7ae7 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java @@ -203,7 +203,7 @@ public class EntityType { // doesn't do anything except emit an exception anyway //if not hasattr(self, 'defs'): // defs = None - // ExceptionCollector.appendException( + // ValidationIssueCollector.appendException( // ValidationError(message="defs is " + str(defs))) //else: // defs = self.defs @@ -266,7 +266,7 @@ public class EntityType { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import ValidationError from toscaparser.extensions.exttools import ExtTools import org.openecomp.sdc.toscaparser.api.utils.yamlparser @@ -384,7 +384,7 @@ class EntityType(object): value = None if not hasattr(self, 'defs'): defs = None - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(message="defs is " + str(defs))) else: defs = self.defs diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java index d226b78..0f6ae6d 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java @@ -1,8 +1,9 @@ package org.openecomp.sdc.toscaparser.api.elements; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.LinkedHashMap; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class GroupType extends StatefulEntityType { @@ -92,9 +93,9 @@ public class GroupType extends StatefulEntityType { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE120", String.format( "UnknownFieldError: Group Type \"%s\" contains unknown field \"%s\"", - groupType,name)); + groupType,name))); } } } @@ -104,18 +105,18 @@ public class GroupType extends StatefulEntityType { private void _validateMetadata(LinkedHashMap metadata) { String mtt = (String) metadata.get("type"); if(mtt != null && !mtt.equals("map") && !mtt.equals("tosca:map")) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE121", String.format( "InvalidTypeError: \"%s\" defined in group for metadata is invalid", - mtt)); + mtt))); } for(String entrySchema: metadata.keySet()) { Object estob = metadata.get(entrySchema); if(estob instanceof LinkedHashMap) { String est = (String)((LinkedHashMap)estob).get("type"); if(!est.equals("string")) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE122", String.format( "InvalidTypeError: \"%s\" defined in group for metadata \"%s\" is invalid", - est,entrySchema)); + est,entrySchema))); } } } @@ -130,7 +131,7 @@ public class GroupType extends StatefulEntityType { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import InvalidTypeError from toscaparser.common.exception import UnknownFieldError from toscaparser.elements.statefulentitytype import StatefulEntityType @@ -195,19 +196,19 @@ class GroupType(StatefulEntityType): if self.defs: for name in self.defs.keys(): if name not in self.SECTIONS: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError(what='Group Type %s' % self.grouptype, field=name)) def _validate_metadata(self, meta_data): if not meta_data.get('type') in ['map', 'tosca:map']: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidTypeError(what='"%s" defined in group for ' 'metadata' % (meta_data.get('type')))) for entry_schema, entry_schema_type in meta_data.items(): if isinstance(entry_schema_type, dict) and not \ entry_schema_type.get('type') == 'string': - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidTypeError(what='"%s" defined in group for ' 'metadata "%s"' % (entry_schema_type.get('type'), diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java index 8a2b4dd..c13fd96 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java @@ -1,11 +1,12 @@ package org.openecomp.sdc.toscaparser.api.elements; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; import org.openecomp.sdc.toscaparser.api.EntityTemplate; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class InterfacesDef extends StatefulEntityType { @@ -91,9 +92,9 @@ public class InterfacesDef extends StatefulEntityType { inputs = (LinkedHashMap)me.getValue(); } else { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE123", String.format( "UnknownFieldError: \"interfaces\" of template \"%s\" contain unknown field \"%s\"", - nodeTemplate.getName(),me.getKey())); + nodeTemplate.getName(),me.getKey()))); } } } @@ -150,7 +151,7 @@ public class InterfacesDef extends StatefulEntityType { # License for the specific language governing permissions and limitations # under the License. -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import UnknownFieldError from toscaparser.elements.statefulentitytype import StatefulEntityType @@ -203,7 +204,7 @@ class InterfacesDef(StatefulEntityType): else: what = ('"interfaces" of template "%s"' % self.node_template.name) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError(what=what, field=i)) else: self.implementation = value diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java index d5f1a18..157e016 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java @@ -1,11 +1,11 @@ package org.openecomp.sdc.toscaparser.api.elements; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; -import org.openecomp.sdc.toscaparser.api.elements.InterfacesDef; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class NodeType extends StatefulEntityType { @@ -315,8 +315,8 @@ public class NodeType extends StatefulEntityType { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "UnknownFieldError: Nodetype \"%s\" has unknown field \"%s\"",ntype,key)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE124", String.format( + "UnknownFieldError: Nodetype \"%s\" has unknown field \"%s\"",ntype,key))); } } } @@ -326,7 +326,7 @@ public class NodeType extends StatefulEntityType { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import UnknownFieldError from toscaparser.elements.capabilitytype import CapabilityTypeDef import org.openecomp.sdc.toscaparser.api.elements.interfaces as ifaces @@ -517,7 +517,7 @@ class NodeType(StatefulEntityType): if self.defs: for key in self.defs.keys(): if key not in self.SECTIONS: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError(what='Nodetype"%s"' % self.ntype, field=key)) */ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java index c60bed1..b046a48 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java @@ -1,9 +1,10 @@ package org.openecomp.sdc.toscaparser.api.elements; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; import java.util.LinkedHashMap; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.TOSCAVersionProperty; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; @@ -135,9 +136,9 @@ public class PolicyType extends StatefulEntityType { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE125", String.format( "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"", - type,key)); + type,key))); } } } @@ -146,9 +147,9 @@ public class PolicyType extends StatefulEntityType { LinkedHashMap _customDef) { for(String nodetype: _targetsList) { if(_customDef.get(nodetype) == null) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE126", String.format( "InvalidTypeError: \"%s\" defined in targets for policy \"%s\"", - nodetype,type)); + nodetype,type))); } } @@ -157,9 +158,9 @@ public class PolicyType extends StatefulEntityType { private void _validateMetadata(LinkedHashMap _metaData) { String mtype = (String)_metaData.get("type"); if(mtype != null && !mtype.equals("map") && !mtype.equals("tosca:map")) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE127", String.format( "InvalidTypeError: \"%s\" defined in policy for metadata", - mtype)); + mtype))); } for(String entrySchema: metaData.keySet()) { Object estob = metaData.get(entrySchema); @@ -167,9 +168,9 @@ public class PolicyType extends StatefulEntityType { String est = (String) ((LinkedHashMap)estob).get("type"); if(!est.equals("string")) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE128", String.format( "InvalidTypeError: \"%s\" defined in policy for metadata \"%s\"", - est,entrySchema)); + est,entrySchema))); } } } @@ -179,7 +180,7 @@ public class PolicyType extends StatefulEntityType { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import InvalidTypeError from toscaparser.common.exception import UnknownFieldError from toscaparser.elements.statefulentitytype import StatefulEntityType @@ -262,27 +263,27 @@ class PolicyType(StatefulEntityType): def _validate_keys(self): for key in self.defs.keys(): if key not in self.SECTIONS: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError(what='Policy "%s"' % self.type, field=key)) def _validate_targets(self, targets_list, custom_def): for nodetype in targets_list: if nodetype not in custom_def: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidTypeError(what='"%s" defined in targets for ' 'policy "%s"' % (nodetype, self.type))) def _validate_metadata(self, meta_data): if not meta_data.get('type') in ['map', 'tosca:map']: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidTypeError(what='"%s" defined in policy for ' 'metadata' % (meta_data.get('type')))) for entry_schema, entry_schema_type in meta_data.items(): if isinstance(entry_schema_type, dict) and not \ entry_schema_type.get('type') == 'string': - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidTypeError(what='"%s" defined in policy for ' 'metadata "%s"' % (entry_schema_type.get('type'), diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java index 8d490ee..1a6745c 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java @@ -1,9 +1,10 @@ package org.openecomp.sdc.toscaparser.api.elements; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.LinkedHashMap; import org.openecomp.sdc.toscaparser.api.DataEntity; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; @@ -53,9 +54,9 @@ public class PortSpec { // verify one of the specified values is set if(source == null && sourceRange == null && target == null && targetRange == null) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE129", String.format( "InvalidTypeAdditionalRequirementsError: Additional requirements for type \"%s\" not met", - TYPE_URI)); + TYPE_URI))); } // Validate source value is in specified range if(source != null && sourceRange != null) { @@ -75,9 +76,9 @@ public class PortSpec { } } catch(Exception e) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE130", String.format( "ValueError: \"%s\" do not meet requirements for type \"%s\"", - _properties.toString(),SHORTNAME)); + _properties.toString(),SHORTNAME))); } } @@ -85,7 +86,7 @@ public class PortSpec { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import InvalidTypeAdditionalRequirementsError from toscaparser.utils.gettextutils import _ import org.openecomp.sdc.toscaparser.api.utils.validateutils as validateutils @@ -132,7 +133,7 @@ class PortSpec(object): # verify one of the specified values is set if source is None and source_range is None and \ target is None and target_range is None: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidTypeAdditionalRequirementsError( type=PortSpec.TYPE_URI)) # Validate source value is in specified range @@ -155,6 +156,6 @@ class PortSpec(object): msg = _('"%(value)s" do not meet requirements ' 'for type "%(type)s".') \ % {'value': properties, 'type': PortSpec.SHORTNAME} - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(msg)) */ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java index c139eb6..bdd376b 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java @@ -3,7 +3,7 @@ package org.openecomp.sdc.toscaparser.api.elements; import java.util.LinkedHashMap; import java.util.Map; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class PropertyDef { @@ -46,8 +46,8 @@ public class PropertyDef { if(schema.get("type") == null) { //msg = (_('Schema definition of "%(pname)s" must have a "type" ' // 'attribute.') % dict(pname=self.name)) - ThreadLocalsHolder.getCollector().appendException(String.format( - "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE131", String.format( + "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name))); } _loadRequiredAttrFromSchema(); _loadStatusAttrFromSchema(); @@ -84,9 +84,9 @@ public class PropertyDef { // attr, // value, // valid_values) - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE132", String.format( "Schema definition of \"%s\" has \"required\" attribute with an invalid value", - name)); + name))); } } } @@ -117,9 +117,9 @@ public class PropertyDef { // attr, // value, // valid_values) - ThreadLocalsHolder.getCollector().appendWarning(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE006", String.format( "Schema definition of \"%s\" has \"status\" attribute with an invalid value", - name)); + name))); } } } @@ -140,7 +140,7 @@ public class PropertyDef { } /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import InvalidSchemaError from toscaparser.common.exception import TOSCAException from toscaparser.utils.gettextutils import _ @@ -176,7 +176,7 @@ class PropertyDef(object): except KeyError: msg = (_('Schema definition of "%(pname)s" must have a "type" ' 'attribute.') % dict(pname=self.name)) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidSchemaError(message=msg)) if self.schema: diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java index 3903941..3a56840 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java @@ -1,10 +1,9 @@ package org.openecomp.sdc.toscaparser.api.elements; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.LinkedHashMap; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; -import org.openecomp.sdc.toscaparser.api.elements.EntityType; -import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class RelationshipType extends StatefulEntityType { @@ -54,8 +53,8 @@ public class RelationshipType extends StatefulEntityType { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "UnknownFieldError: Relationshiptype \"%s\" has unknown field \"%s\"",type,key)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE133", String.format( + "UnknownFieldError: Relationshiptype \"%s\" has unknown field \"%s\"",type,key))); } } } @@ -63,7 +62,7 @@ public class RelationshipType extends StatefulEntityType { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import UnknownFieldError from toscaparser.elements.statefulentitytype import StatefulEntityType @@ -97,7 +96,7 @@ class RelationshipType(StatefulEntityType): def _validate_keys(self): for key in self.defs.keys(): if key not in self.SECTIONS: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError(what='Relationshiptype "%s"' % self.type, field=key)) */ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java index de18cd6..77cd4c2 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java @@ -4,7 +4,7 @@ import java.util.HashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; import org.slf4j.Logger; @@ -52,9 +52,9 @@ public abstract class ScalarUnit { return key; } } - ThreadLocalsHolder.getCollector().appendWarning(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE007", String.format( "'The unit \"%s\" is not valid. Valid units are \n%s", - inputUnit,SCALAR_UNIT_DICT.keySet().toString())); + inputUnit,SCALAR_UNIT_DICT.keySet().toString()))); return inputUnit; } } @@ -68,8 +68,8 @@ public abstract class ScalarUnit { value = matcher.group(1) + " " + scalarUnit; } else { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not a valid scalar-unit",value.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE134", String.format( + "ValueError: \"%s\" is not a valid scalar-unit",value.toString()))); } return value; } @@ -136,8 +136,8 @@ public abstract class ScalarUnit { if(type.equals(SCALAR_UNIT_FREQUENCY)) { return (new ScalarUnitFrequency(value)).getNumFromScalarUnit(unit); } - ThreadLocalsHolder.getCollector().appendException(String.format( - "TypeError: \"%s\" is not a valid scalar-unit type",type)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE135", String.format( + "TypeError: \"%s\" is not a valid scalar-unit type",type))); return 0.0; } @@ -145,7 +145,7 @@ public abstract class ScalarUnit { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.utils.gettextutils import _ from toscaparser.utils import validateutils @@ -184,7 +184,7 @@ class ScalarUnit(object): '"%(valid_units)s".') % {'unit': input_unit, 'valid_units': sorted(self.SCALAR_UNIT_DICT.keys())}) - ExceptionCollector.appendException(ValueError(msg)) + ValidationIssueCollector.appendException(ValueError(msg)) def validate_scalar_unit(self): regex = re.compile('([0-9.]+)\s*(\w+)') @@ -196,7 +196,7 @@ class ScalarUnit(object): return self.value except Exception: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('"%s" is not a valid scalar-unit.') % self.value)) @@ -257,6 +257,6 @@ def get_scalarunit_value(type, value, unit=None): return (ScalarUnit_Class(value). get_num_from_scalar_unit(unit)) else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( TypeError(_('"%s" is not a valid scalar-unit type.') % type)) */ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java index 5ab816f..0a83cb1 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java @@ -1,14 +1,12 @@ package org.openecomp.sdc.toscaparser.api.elements; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; import org.openecomp.sdc.toscaparser.api.UnsupportedType; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; -import org.openecomp.sdc.toscaparser.api.elements.AttributeDef; -import org.openecomp.sdc.toscaparser.api.elements.EntityType; -import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; @@ -49,8 +47,8 @@ public class StatefulEntityType extends EntityType { } else{ defs = null; - ThreadLocalsHolder.getCollector().appendException(String.format( - "InvalidTypeError: \"%s\" is not a valid type",entityType)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE136", String.format( + "InvalidTypeError: \"%s\" is not a valid type",entityType))); } } type = entityType; @@ -67,8 +65,8 @@ public class StatefulEntityType extends EntityType { Object to = me.getValue(); if(to == null || !(to instanceof LinkedHashMap)) { String s = to == null ? "null" : to.getClass().getSimpleName(); - ThreadLocalsHolder.getCollector().appendException(String.format( - "Unexpected type error: property \"%s\" has type \"%s\" (expected dict)",pdname,s)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE137", String.format( + "Unexpected type error: property \"%s\" has type \"%s\" (expected dict)",pdname,s))); continue; } LinkedHashMap pdschema = (LinkedHashMap)to; @@ -175,7 +173,7 @@ class StatefulEntityType(EntityType): self.defs = custom_def[entitytype] else: self.defs = None - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidTypeError(what=entitytype)) self.type = entitytype diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java index 2caf5c4..50c9739 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java @@ -1,9 +1,10 @@ package org.openecomp.sdc.toscaparser.api.elements; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; import java.util.LinkedHashMap; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.extensions.ExtTools; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; @@ -71,9 +72,9 @@ public class TypeValidation { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE138", String.format( "UnknownFieldError: Template \"%s\" contains unknown field \"%s\"", - importDef.toString(),name)); + importDef.toString(),name))); } } } @@ -89,17 +90,17 @@ public class TypeValidation { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE139", String.format( "InvalidTemplateVersion: version \"%s\" in \"%s\" is not supported\n" + "Allowed versions: [%s]", - sVersion,importDef.toString(),allowed)); + sVersion,importDef.toString(),allowed))); } } } /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import InvalidTemplateVersion from toscaparser.common.exception import UnknownFieldError from toscaparser.extensions.exttools import ExtTools @@ -136,14 +137,14 @@ class TypeValidation(object): for name in custom_type: if name not in self.ALLOWED_TYPE_SECTIONS: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( # UnknownFieldError(what='Template ' + (self.import_def), UnknownFieldError(what= (self.import_def), field=name)) def _validate_type_version(self, version): if version not in self.VALID_TEMPLATE_VERSIONS: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidTemplateVersion( # what=version + ' in ' + self.import_def, what=self.import_def, diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java index 3c60a66..253cc0c 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java @@ -3,7 +3,7 @@ package org.openecomp.sdc.toscaparser.api.elements.constraints; import java.util.ArrayList; import java.util.LinkedHashMap; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; import org.openecomp.sdc.toscaparser.api.elements.ScalarUnit; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; @@ -35,8 +35,8 @@ public abstract class Constraint { if(!(constraint instanceof LinkedHashMap) || ((LinkedHashMap)constraint).size() != 1) { - ThreadLocalsHolder.getCollector().appendException( - "InvalidSchemaError: Invalid constraint schema " + constraint.toString()); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE101", + "InvalidSchemaError: Invalid constraint schema " + constraint.toString())); } if(constraintClass.equals(EQUAL)) { @@ -73,8 +73,8 @@ public abstract class Constraint { return new Pattern(propname,proptype,constraint); } else { - ThreadLocalsHolder.getCollector().appendException(String.format( - "InvalidSchemaError: Invalid property \"%s\"",constraintClass)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE102", String.format( + "InvalidSchemaError: Invalid property \"%s\"",constraintClass))); return null; } } @@ -117,9 +117,9 @@ public abstract class Constraint { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE103", String.format( "InvalidSchemaError: Property \"%s\" is not valid for data type \"%s\"", - constraintKey,propertyType)); + constraintKey,propertyType))); } } @@ -153,7 +153,7 @@ public abstract class Constraint { value = ScalarUnit.getScalarunitValue(propertyType,value,null); } if(!_isValid(value)) { - ThreadLocalsHolder.getCollector().appendWarning("ValidationError: " + _errMsg(value)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE008", "ValidationError: " + _errMsg(value))); } } @@ -183,14 +183,14 @@ class Constraint(object): if(not isinstance(constraint, collections.Mapping) or len(constraint) != 1): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidSchemaError(message=_('Invalid constraint schema.'))) for type in constraint.keys(): ConstraintClass = get_constraint_class(type) if not ConstraintClass: msg = _('Invalid property "%s".') % type - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidSchemaError(message=msg)) return ConstraintClass(property_name, property_type, constraint) @@ -208,7 +208,7 @@ class Constraint(object): '"%(dtype)s".') % dict( ctype=self.constraint_key, dtype=property_type) - ExceptionCollector.appendException(InvalidSchemaError(message=msg)) + ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) def _get_scalarunit_constraint_value(self): if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: @@ -230,7 +230,7 @@ class Constraint(object): value = scalarunit.get_scalarunit_value(self.property_type, value) if not self._is_valid(value): err_msg = self._err_msg(value) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(message=err_msg)) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java index 021bed3..ad6183e 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java @@ -1,8 +1,9 @@ package org.openecomp.sdc.toscaparser.api.elements.constraints; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.Date; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.functions.Function; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; @@ -38,7 +39,7 @@ public class GreaterOrEqual extends Constraint { super(name,type,c); if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"greater_or_equal\" expects comparable values"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE104", "InvalidSchemaError: The property \"greater_or_equal\" expects comparable values")); } } diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java index d23d7ce..b9e06b3 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java @@ -1,8 +1,9 @@ package org.openecomp.sdc.toscaparser.api.elements.constraints; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.Date; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class GreaterThan extends Constraint { @@ -35,7 +36,7 @@ public class GreaterThan extends Constraint { super(name,type,c); if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"greater_than\" expects comparable values"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE105", "InvalidSchemaError: The property \"greater_than\" expects comparable values")); } } @@ -83,7 +84,7 @@ class GreaterThan(Constraint): super(GreaterThan, self).__init__(property_name, property_type, constraint) if not isinstance(constraint[self.GREATER_THAN], self.valid_types): - ExceptionCollector.appendException( + ValidationIsshueCollector.appendException( InvalidSchemaError(message=_('The property "greater_than" ' 'expects comparable values.'))) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java index 282267d..7d0d654 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java @@ -1,8 +1,9 @@ package org.openecomp.sdc.toscaparser.api.elements.constraints; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.Date; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.ArrayList; @@ -46,7 +47,7 @@ public class InRange extends Constraint { super(name,type,c); if(!(constraintValue instanceof ArrayList) || ((ArrayList)constraintValue).size() != 2) { - ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"in_range\" expects a list"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE106", "InvalidSchemaError: The property \"in_range\" expects a list")); } @@ -54,11 +55,11 @@ public class InRange extends Constraint { String msg = "The property \"in_range\" expects comparable values"; for(Object vo: alcv) { if(!validTypes.contains(vo.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: " + msg); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE107", "InvalidSchemaError: " + msg)); } // The only string we allow for range is the special value 'UNBOUNDED' if((vo instanceof String) && !((String)vo).equals(UNBOUNDED)) { - ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: " + msg); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE108", "InvalidSchemaError: " + msg)); } } min = alcv.get(0); @@ -129,19 +130,19 @@ class InRange(Constraint): super(InRange, self).__init__(property_name, property_type, constraint) if(not isinstance(self.constraint_value, collections.Sequence) or (len(constraint[self.IN_RANGE]) != 2)): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidSchemaError(message=_('The property "in_range" ' 'expects a list.'))) msg = _('The property "in_range" expects comparable values.') for value in self.constraint_value: if not isinstance(value, self.valid_types): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidSchemaError(message=msg)) # The only string we allow for range is the special value # 'UNBOUNDED' if(isinstance(value, str) and value != self.UNBOUNDED): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidSchemaError(message=msg)) self.min = self.constraint_value[0] diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java index 4cfd1c0..c94cda5 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java @@ -1,6 +1,7 @@ package org.openecomp.sdc.toscaparser.api.elements.constraints; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class Length extends Constraint { @@ -23,7 +24,7 @@ public class Length extends Constraint { super(name,type,c); if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"length\" expects an integer"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE109", "InvalidSchemaError: The property \"length\" expects an integer")); } } @@ -60,7 +61,7 @@ public class Length extends Constraint { def __init__(self, property_name, property_type, constraint): super(Length, self).__init__(property_name, property_type, constraint) if not isinstance(self.constraint_value, self.valid_types): - ExceptionCollector.appendException( + ValidationIsshueCollector.appendException( InvalidSchemaError(message=_('The property "length" expects ' 'an integer.'))) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java index 00cba36..1601e27 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java @@ -1,8 +1,9 @@ package org.openecomp.sdc.toscaparser.api.elements.constraints; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.Date; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class LessOrEqual extends Constraint { @@ -37,7 +38,7 @@ public class LessOrEqual extends Constraint { super(name,type,c); if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"less_or_equal\" expects comparable values"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE110", "InvalidSchemaError: The property \"less_or_equal\" expects comparable values")); } } @@ -87,7 +88,7 @@ class LessOrEqual(Constraint): super(LessOrEqual, self).__init__(property_name, property_type, constraint) if not isinstance(self.constraint_value, self.valid_types): - ExceptionCollector.appendException( + ValidationIsshueCollector.appendException( InvalidSchemaError(message=_('The property "less_or_equal" ' 'expects comparable values.'))) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java index eb5a41d..b867fa7 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java @@ -1,8 +1,9 @@ package org.openecomp.sdc.toscaparser.api.elements.constraints; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.Date; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class LessThan extends Constraint { @@ -35,7 +36,7 @@ public class LessThan extends Constraint { super(name,type,c); if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"less_than\" expects comparable values"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE111", "InvalidSchemaError: The property \"less_than\" expects comparable values")); } } @@ -85,7 +86,7 @@ def __init__(self, property_name, property_type, constraint): super(LessThan, self).__init__(property_name, property_type, constraint) if not isinstance(self.constraint_value, self.valid_types): - ExceptionCollector.appendException( + ValidationIsshueCollector.appendException( InvalidSchemaError(message=_('The property "less_than" ' 'expects comparable values.'))) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java index 278ae85..48ac349 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java @@ -1,8 +1,9 @@ package org.openecomp.sdc.toscaparser.api.elements.constraints; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.LinkedHashMap; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class MaxLength extends Constraint { @@ -26,7 +27,7 @@ public class MaxLength extends Constraint { super(name,type,c); if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"max_length\" expects an integer"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE112", "InvalidSchemaError: The property \"max_length\" expects an integer")); } } @@ -70,7 +71,7 @@ class MaxLength(Constraint): super(MaxLength, self).__init__(property_name, property_type, constraint) if not isinstance(self.constraint_value, self.valid_types): - ExceptionCollector.appendException( + ValidationIsshueCollector.appendException( InvalidSchemaError(message=_('The property "max_length" ' 'expects an integer.'))) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java index 480c878..0203484 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java @@ -1,8 +1,9 @@ package org.openecomp.sdc.toscaparser.api.elements.constraints; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.LinkedHashMap; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class MinLength extends Constraint { @@ -26,7 +27,7 @@ public class MinLength extends Constraint { super(name,type,c); if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"min_length\" expects an integer"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE113", "InvalidSchemaError: The property \"min_length\" expects an integer")); } } @@ -70,7 +71,7 @@ class MinLength(Constraint): super(MinLength, self).__init__(property_name, property_type, constraint) if not isinstance(self.constraint_value, self.valid_types): - ExceptionCollector.appendException( + ValidationIsshueCollector.appendException( InvalidSchemaError(message=_('The property "min_length" ' 'expects an integer.'))) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java index 444a73c..a29bac6 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java @@ -1,9 +1,10 @@ package org.openecomp.sdc.toscaparser.api.elements.constraints; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.regex.Matcher; import java.util.regex.PatternSyntaxException; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class Pattern extends Constraint { @@ -24,7 +25,7 @@ public class Pattern extends Constraint { super(name,type,c); if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"pattern\" expects a string"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE114", "InvalidSchemaError: The property \"pattern\" expects a string")); } } @@ -32,8 +33,8 @@ public class Pattern extends Constraint { protected boolean _isValid(Object value) { try { if(!(value instanceof String)) { - ThreadLocalsHolder.getCollector().appendException(String.format("ValueError: Input value \"%s\" to \"pattern\" property \"%s\" must be a string", - value.toString(),propertyName)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE115", String.format("ValueError: Input value \"%s\" to \"pattern\" property \"%s\" must be a string", + value.toString(),propertyName))); return false; } String strp = constraintValue.toString(); @@ -46,8 +47,8 @@ public class Pattern extends Constraint { return false; } catch(PatternSyntaxException pse) { - ThreadLocalsHolder.getCollector().appendException(String.format("ValueError: Invalid regex \"%s\" in \"pattern\" property \"%s\"", - constraintValue.toString(),propertyName)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE116", String.format("ValueError: Invalid regex \"%s\" in \"pattern\" property \"%s\"", + constraintValue.toString(),propertyName))); return false; } } @@ -78,7 +79,7 @@ class Pattern(Constraint): def __init__(self, property_name, property_type, constraint): super(Pattern, self).__init__(property_name, property_type, constraint) if not isinstance(self.constraint_value, self.valid_types): - ExceptionCollector.appendException( + ValidationIsshueCollector.appendException( InvalidSchemaError(message=_('The property "pattern" ' 'expects a string.'))) self.match = re.compile(self.constraint_value).match diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java index ca721e6..d0ee118 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java @@ -1,11 +1,12 @@ package org.openecomp.sdc.toscaparser.api.elements.constraints; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; @@ -71,15 +72,15 @@ public class Schema { if(!(_schemaDict instanceof LinkedHashMap)) { //msg = (_('Schema definition of "%(pname)s" must be a dict.') // % dict(pname=name)) - ThreadLocalsHolder.getCollector().appendException(String.format( - "InvalidSchemaError: Schema definition of \"%s\" must be a dict",name)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE117", String.format( + "InvalidSchemaError: Schema definition of \"%s\" must be a dict",name))); } if(_schemaDict.get("type") == null) { //msg = (_('Schema definition of "%(pname)s" must have a "type" ' // 'attribute.') % dict(pname=name)) - ThreadLocalsHolder.getCollector().appendException(String.format( - "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE118", String.format( + "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name))); } schema = _schemaDict; @@ -122,9 +123,9 @@ public class Schema { } else { // error - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE119", String.format( "UnknownFieldError: Constraint type \"%s\" for property \"%s\" is not supported", - cClass,name)); + cClass,name))); } break; } @@ -211,14 +212,14 @@ def __init__(self, name, schema_dict): if not isinstance(schema_dict, collections.Mapping): msg = (_('Schema definition of "%(pname)s" must be a dict.') % dict(pname=name)) - ExceptionCollector.appendException(InvalidSchemaError(message=msg)) + ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) try: schema_dict['type'] except KeyError: msg = (_('Schema definition of "%(pname)s" must have a "type" ' 'attribute.') % dict(pname=name)) - ExceptionCollector.appendException(InvalidSchemaError(message=msg)) + ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) self.schema = schema_dict self._len = None diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java.orig b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java.orig index 96eff34..355f505 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java.orig +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java.orig @@ -5,7 +5,7 @@ import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector; public class Schema { diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java index 06622e4..60b6be2 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java @@ -62,7 +62,7 @@ def __init__(self, property_name, property_type, constraint): super(ValidValues, self).__init__(property_name, property_type, constraint) if not isinstance(self.constraint_value, collections.Sequence): - ExceptionCollector.appendException( + ValidationIsshueCollector.appendException( InvalidSchemaError(message=_('The property "valid_values" ' 'expects a list.'))) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java index 6dc7deb..84afbc9 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java @@ -1,9 +1,10 @@ package org.openecomp.sdc.toscaparser.api.functions; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; import org.openecomp.sdc.toscaparser.api.TopologyTemplate; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class Concat extends Function { @@ -37,9 +38,9 @@ public class Concat extends Function { @Override void validate() { if(args.size() < 1) { - ThreadLocalsHolder.getCollector().appendException( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE145", "ValueError: Invalid arguments for function \"concat\". " + - "Expected at least one argument"); + "Expected at least one argument")); } } @@ -68,7 +69,7 @@ Example: def validate(self): if len(self.args) < 1: - ExceptionCollector.appendException( + ValidationIsshueCollector.appendException( ValueError(_('Invalid arguments for function "{0}". Expected ' 'at least one arguments.').format(CONCAT))) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java index 7615a00..0278508 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java @@ -171,7 +171,7 @@ public abstract class Function { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIsshueCollector from toscaparser.common.exception import UnknownInputError from toscaparser.dataentity import DataEntity from toscaparser.elements.constraints import Schema diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java index 8a3d0b6..66e9320 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java @@ -1,5 +1,7 @@ package org.openecomp.sdc.toscaparser.api.functions; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; import java.util.LinkedHashMap; @@ -45,8 +47,8 @@ public class GetAttribute extends Function { @Override void validate() { if(args.size() < 2) { - ThreadLocalsHolder.getCollector().appendException( - "ValueError: Illegal arguments for function \"get_attribute\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\""); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", + "ValueError: Illegal arguments for function \"get_attribute\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); return; } else if(args.size() == 2) { @@ -76,9 +78,9 @@ public class GetAttribute extends Function { for(Object elem: args.subList(index,args.size())) { if(valueType.equals("list")) { if(!(elem instanceof Integer)) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE147", String.format( "ValueError: Illegal arguments for function \"get_attribute\" \"%s\". Expected positive integer argument", - elem.toString())); + elem.toString()))); } Object ob = attr.getSchema().get("entry_schema"); valueType = (String) @@ -98,9 +100,9 @@ public class GetAttribute extends Function { } } if(bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: 'Illegal arguments for function \"get_attribute\". Unexpected attribute/index value \"%d\"", - elem)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE148", String.format( + "ValueError: 'Illegal arguments for function \"get_attribute\". Unexpected attribute/index value \"%s\"", + elem))); return; } else { // It is a complex type @@ -112,9 +114,9 @@ public class GetAttribute extends Function { valueType = (String)prop.getSchema().get("type"); } else { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE149", String.format( "KeyError: Illegal arguments for function \"get_attribute\". Attribute name \"%s\" not found in \"%\"", - elem,valueType)); + elem,valueType))); } } } @@ -146,9 +148,9 @@ public class GetAttribute extends Function { if(nodeTpl != null && !_attributeExistsInType(nodeTpl.getTypeDefinition()) && !nodeTpl.getProperties().keySet().contains(getAttributeName())) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE150", String.format( "KeyError: Attribute \"%s\" was not found in node template \"%s\"", - getAttributeName(),nodeTpl.getName())); + getAttributeName(),nodeTpl.getName()))); } return nodeTpl; } @@ -187,34 +189,34 @@ public class GetAttribute extends Function { // Currently this is the only way to tell whether the function // is used within the outputs section of the TOSCA template. if(context instanceof ArrayList) { - ThreadLocalsHolder.getCollector().appendException( - "ValueError: \"get_attribute: [ HOST, ... ]\" is not allowed in \"outputs\" section of the TOSCA template"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE151", + "ValueError: \"get_attribute: [ HOST, ... ]\" is not allowed in \"outputs\" section of the TOSCA template")); return null; } NodeTemplate nodeTpl = _findHostContainingAttribute(SELF); if(nodeTpl == null) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE152", String.format( "ValueError: \"get_attribute: [ HOST, ... ]\" was used in " + "node template \"%s\" but \"%s\" was not found in " + - "the relationship chain",((NodeTemplate)context).getName(),HOSTED_ON)); + "the relationship chain",((NodeTemplate)context).getName(),HOSTED_ON))); return null; } return nodeTpl; } if(nodeTemplateName.equals(TARGET)) { if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendException( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE153", "KeyError: \"TARGET\" keyword can only be used in context " + - " to \"Relationships\" target node"); + " to \"Relationships\" target node")); return null; } return ((RelationshipTemplate)context).getTarget(); } if(nodeTemplateName.equals(SOURCE)) { if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendException( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE154", "KeyError: \"SOURCE\" keyword can only be used in context " + - " to \"Relationships\" source node"); + " to \"Relationships\" source node")); return null; } return ((RelationshipTemplate)context).getTarget(); @@ -231,8 +233,8 @@ public class GetAttribute extends Function { return nt; } } - ThreadLocalsHolder.getCollector().appendException(String.format( - "KeyError: Node template \"%s\" was not found",nodeTemplateName)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE155", String.format( + "KeyError: Node template \"%s\" was not found",nodeTemplateName))); return null; } @@ -265,16 +267,16 @@ public class GetAttribute extends Function { attribute = attrs.get(attrName); } if(attribute == null) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE156", String.format( "KeyError: Attribute \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - attrName,capabilityName,nodeTemplate.getName(),((NodeTemplate)context).getName())); + attrName,capabilityName,nodeTemplate.getName(),((NodeTemplate)context).getName()))); } return attribute; } String msg = String.format( "Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", capabilityName,((NodeTemplate)context).getName(),nodeTemplate.getName()); - ThreadLocalsHolder.getCollector().appendException("KeyError: " + msg); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE157", "KeyError: " + msg)); return null; } @@ -316,7 +318,7 @@ Examples: def validate(self): if len(self.args) < 2: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('Illegal arguments for function "{0}". Expected ' 'arguments: "node-template-name", "req-or-cap"' '(optional), "property name"' @@ -344,7 +346,7 @@ def validate(self): for elem in self.args[index:]: if value_type == "list": if not isinstance(elem, int): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('Illegal arguments for function' ' "{0}". "{1}" Expected positive' ' integer argument' @@ -353,7 +355,7 @@ def validate(self): elif value_type == "map": value_type = attr.schema['entry_schema']['type'] elif value_type in Schema.PROPERTY_TYPES: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('Illegal arguments for function' ' "{0}". Unexpected attribute/' 'index value "{1}"' @@ -367,7 +369,7 @@ def validate(self): prop = found[0] value_type = prop.schema['type'] else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_('Illegal arguments for function' ' "{0}". Attribute name "{1}" not' ' found in "{2}"' @@ -394,7 +396,7 @@ def _find_node_template_containing_attribute(self): if node_tpl and \ not self._attribute_exists_in_type(node_tpl.type_definition) \ and self.attribute_name not in node_tpl.get_properties(): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_('Attribute "%(att)s" was not found in node ' 'template "%(ntpl)s".') % {'att': self.attribute_name, @@ -428,14 +430,14 @@ def _find_node_template(self, node_template_name): # Currently this is the only way to tell whether the function # is used within the outputs section of the TOSCA template. if isinstance(self.context, list): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_( '"get_attribute: [ HOST, ... ]" is not allowed in ' '"outputs" section of the TOSCA template.'))) return node_tpl = self._find_host_containing_attribute() if not node_tpl: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_( '"get_attribute: [ HOST, ... ]" was used in node ' 'template "{0}" but "{1}" was not found in ' @@ -445,14 +447,14 @@ def _find_node_template(self, node_template_name): return node_tpl if node_template_name == TARGET: if not isinstance(self.context.type_definition, RelationshipType): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_('"TARGET" keyword can only be used in context' ' to "Relationships" target node'))) return return self.context.target if node_template_name == SOURCE: if not isinstance(self.context.type_definition, RelationshipType): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_('"SOURCE" keyword can only be used in context' ' to "Relationships" source node'))) return @@ -464,7 +466,7 @@ def _find_node_template(self, node_template_name): for node_template in self.tosca_tpl.nodetemplates: if node_template.name == name: return node_template - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_( 'Node template "{0}" was not found.' ).format(node_template_name))) @@ -498,7 +500,7 @@ def _get_capability_attribute(self, if attrs and attr_name in attrs.keys(): attribute = attrs[attr_name] if not attribute: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_('Attribute "%(attr)s" was not found in ' 'capability "%(cap)s" of node template ' '"%(ntpl1)s" referenced from node template ' @@ -512,7 +514,7 @@ def _get_capability_attribute(self, capability_name, self.context.name, node_template.name) - ExceptionCollector.appendException(KeyError(msg)) + ValidationIssueCollector.appendException(KeyError(msg)) @property def node_template_name(self): diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java index dd6c05c..67cecd7 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java @@ -5,7 +5,7 @@ import java.util.LinkedHashMap; import org.openecomp.sdc.toscaparser.api.DataEntity; import org.openecomp.sdc.toscaparser.api.TopologyTemplate; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; import org.openecomp.sdc.toscaparser.api.parameters.Input; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; @@ -25,9 +25,9 @@ public class GetInput extends Function { // args.toString())); // } if(args.size() > 2) { - ThreadLocalsHolder.getCollector().appendWarning(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE009", String.format( "ValueError: Expected max 2 arguments for function \"get_input\" but received \"%s\"", - args.size())); + args.size()))); } boolean bFound = false; for(Input inp: toscaTpl.getInputs()) { @@ -37,8 +37,8 @@ public class GetInput extends Function { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "UnknownInputError: Unknown input \"%s\"",args.get(0))); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE158", String.format( + "UnknownInputError: Unknown input \"%s\"",args.get(0)))); } } @@ -99,13 +99,13 @@ Example: def validate(self): if len(self.args) != 1: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_( 'Expected one argument for function "get_input" but ' 'received "%s".') % self.args)) inputs = [input.name for input in self.tosca_tpl.inputs] if self.args[0] not in inputs: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownInputError(input_name=self.args[0])) def result(self): diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java index 22f2cd7..7af7eeb 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java @@ -1,9 +1,10 @@ package org.openecomp.sdc.toscaparser.api.functions; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; import org.openecomp.sdc.toscaparser.api.*; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.elements.InterfacesDef; import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType; @@ -23,10 +24,10 @@ public class GetOperationOutput extends Function { _findOperationName(interfaceName,(String)args.get(2)); } else { - ThreadLocalsHolder.getCollector().appendException( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE159", "ValueError: Illegal arguments for function \"get_operation_output\". " + "Expected arguments: \"template_name\",\"interface_name\"," + - "\"operation_name\",\"output_variable_name\""); + "\"operation_name\",\"output_variable_name\"")); } } @@ -42,9 +43,9 @@ public class GetOperationOutput extends Function { return _interfaceName; } else { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE160", String.format( "ValueError: invalid interface name \"%s\" in \"get_operation_output\"", - _interfaceName)); + _interfaceName))); return null; } } @@ -64,9 +65,9 @@ public class GetOperationOutput extends Function { return operationName; } else { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE161", String.format( "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", - operationName)); + operationName))); return null; } } @@ -83,16 +84,16 @@ public class GetOperationOutput extends Function { return operationName; } else { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE162", String.format( "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", - operationName)); + operationName))); return null; } } else { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE163", String.format( "ValueError: Invalid interface name \"%s\" in \"get_operation_output\"", - interfaceName)); + interfaceName))); return null; } } @@ -100,18 +101,18 @@ public class GetOperationOutput extends Function { private NodeTemplate _findNodeTemplate(String nodeTemplateName) { if(nodeTemplateName.equals(TARGET)) { if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendException( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE164", "KeyError: \"TARGET\" keyword can only be used in context " + - " to \"Relationships\" target node"); + " to \"Relationships\" target node")); return null; } return ((RelationshipTemplate)context).getTarget(); } if(nodeTemplateName.equals(SOURCE)) { if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendException( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE165", "KeyError: \"SOURCE\" keyword can only be used in context " + - " to \"Relationships\" source node"); + " to \"Relationships\" source node")); return null; } return ((RelationshipTemplate)context).getTarget(); @@ -128,8 +129,8 @@ public class GetOperationOutput extends Function { return nt; } } - ThreadLocalsHolder.getCollector().appendException(String.format( - "KeyError: Node template \"%s\" was not found",nodeTemplateName)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE166", String.format( + "KeyError: Node template \"%s\" was not found",nodeTemplateName))); return null; } @@ -149,7 +150,7 @@ def validate(self): interface_name = self._find_interface_name(self.args[1]) self._find_operation_name(interface_name, self.args[2]) else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('Illegal arguments for function "{0}". Expected ' 'arguments: "template_name","interface_name",' '"operation_name","output_variable_name"' @@ -160,7 +161,7 @@ def _find_interface_name(self, interface_name): if interface_name in toscaparser.elements.interfaces.SECTIONS: return interface_name else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('Enter a valid interface name' ).format(GET_OPERATION_OUTPUT))) return @@ -173,7 +174,7 @@ def _find_operation_name(self, interface_name, operation_name): interfaces_relationship_configure_operations): return operation_name else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('Enter an operation of Configure interface' ).format(GET_OPERATION_OUTPUT))) return @@ -183,12 +184,12 @@ def _find_operation_name(self, interface_name, operation_name): StatefulEntityType.interfaces_node_lifecycle_operations): return operation_name else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('Enter an operation of Standard interface' ).format(GET_OPERATION_OUTPUT))) return else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('Enter a valid operation name' ).format(GET_OPERATION_OUTPUT))) return @@ -196,14 +197,14 @@ def _find_operation_name(self, interface_name, operation_name): def _find_node_template(self, node_template_name): if node_template_name == TARGET: if not isinstance(self.context.type_definition, RelationshipType): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_('"TARGET" keyword can only be used in context' ' to "Relationships" target node'))) return return self.context.target if node_template_name == SOURCE: if not isinstance(self.context.type_definition, RelationshipType): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_('"SOURCE" keyword can only be used in context' ' to "Relationships" source node'))) return @@ -215,7 +216,7 @@ def _find_node_template(self, node_template_name): for node_template in self.tosca_tpl.nodetemplates: if node_template.name == name: return node_template - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_( 'Node template "{0}" was not found.' ).format(node_template_name))) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java index 41495bc..1abee6e 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java @@ -1,5 +1,7 @@ package org.openecomp.sdc.toscaparser.api.functions; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; import java.util.LinkedHashMap; @@ -46,8 +48,8 @@ public class GetProperty extends Function { @Override void validate() { if(args.size() < 2) { - ThreadLocalsHolder.getCollector().appendException( - "ValueError: Illegal arguments for function \"get_property\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\""); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE167", + "ValueError: Illegal arguments for function \"get_property\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); return; } if(args.size() == 2) { @@ -129,16 +131,16 @@ public class GetProperty extends Function { property = ((Property)props.get(propertyName)).getValue(); } if(property == null && throwErrors) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE168", String.format( "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - propertyName,capabilityName,nodeTemplate.getName(),((NodeTemplate)context).getName())); + propertyName,capabilityName,nodeTemplate.getName(),((NodeTemplate)context).getName()))); } return property; } if(throwErrors) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE169", String.format( "KeyError: Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", - capabilityName,((NodeTemplate)context).getName(),nodeTemplate.getName())); + capabilityName,((NodeTemplate)context).getName(),nodeTemplate.getName()))); } return null; @@ -152,9 +154,9 @@ public class GetProperty extends Function { LinkedHashMap props = nodeTpl.getProperties(); Property found = props.get(propertyName); if(found == null) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE170", String.format( "KeyError: Property \"%s\" was not found in node template \"%s\"", - propertyName,nodeTpl.getName())); + propertyName,nodeTpl.getName()))); } return found; } @@ -167,25 +169,25 @@ public class GetProperty extends Function { if(nodeTemplateName.equals(HOST)) { NodeTemplate node = _findHostContainingProperty(null); if(node == null) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE171", String.format( "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - (String)args.get(2),(String)args.get(1),((NodeTemplate)context).getName())); + (String)args.get(2),(String)args.get(1),((NodeTemplate)context).getName()))); return null; } return node; } if(nodeTemplateName.equals(TARGET)) { if(!(((RelationshipTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendException( - "KeyError: \"TARGET\" keyword can only be used in context to \"Relationships\" target node"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE172", + "KeyError: \"TARGET\" keyword can only be used in context to \"Relationships\" target node")); return null; } return ((RelationshipTemplate)context).getTarget(); } if(nodeTemplateName.equals(SOURCE)) { if(!(((RelationshipTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendException( - "KeyError: \"SOURCE\" keyword can only be used in context to \"Relationships\" target node"); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE173", + "KeyError: \"SOURCE\" keyword can only be used in context to \"Relationships\" target node")); return null; } return ((RelationshipTemplate)context).getSource(); @@ -198,9 +200,9 @@ public class GetProperty extends Function { return nodeTemplate; } } - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE174", String.format( "KeyError: Node template \"%s\" was not found. Referenced from Node Template \"%s\"", - nodeTemplateName,((NodeTemplate)context).getName())); + nodeTemplateName,((NodeTemplate)context).getName()))); return null; } @@ -212,16 +214,16 @@ public class GetProperty extends Function { return ((ArrayList)value).get(index); } else { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE175", String.format( "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an element with index %d", - args.get(2),args.get(1),((NodeTemplate)context).getName(),index)); + args.get(2),args.get(1),((NodeTemplate)context).getName(),index))); } } else { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE176", String.format( "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a list", - args.get(2),args.get(1),((NodeTemplate)context).getName())); + args.get(2),args.get(1),((NodeTemplate)context).getName()))); } return null; } @@ -234,15 +236,15 @@ public class GetProperty extends Function { return ov; } else { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE177", String.format( "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an attribute named \"%s\"", - args.get(2),args.get(1),((NodeTemplate)context).getName(),attribute)); + args.get(2),args.get(1),((NodeTemplate)context).getName(),attribute))); } } else { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE178", String.format( "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a dict", - args.get(2),args.get(1),((NodeTemplate)context).getName())); + args.get(2),args.get(1),((NodeTemplate)context).getName()))); } return null; } @@ -379,7 +381,7 @@ Examples: def validate(self): if len(self.args) < 2: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_( 'Expected arguments: "node-template-name", "req-or-cap" ' '(optional), "property name".'))) @@ -446,7 +448,7 @@ def _get_capability_property(self, if props and property_name in props.keys(): property = props[property_name].value if not property: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_('Property "%(prop)s" was not found in ' 'capability "%(cap)s" of node template ' '"%(ntpl1)s" referenced from node template ' @@ -460,7 +462,7 @@ def _get_capability_property(self, capability_name, self.context.name, node_template.name) - ExceptionCollector.appendException(KeyError(msg)) + ValidationIssueCollector.appendException(KeyError(msg)) def _find_property(self, property_name): node_tpl = self._find_node_template(self.args[0]) @@ -469,7 +471,7 @@ def _find_property(self, property_name): props = node_tpl.get_properties() found = [props[property_name]] if property_name in props else [] if len(found) == 0: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_('Property "%(prop)s" was not found in node ' 'template "%(ntpl)s".') % {'prop': property_name, @@ -485,14 +487,14 @@ def _find_node_template(self, node_template_name): return self._find_host_containing_property() if node_template_name == TARGET: if not isinstance(self.context.type_definition, RelationshipType): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_('"TARGET" keyword can only be used in context' ' to "Relationships" target node'))) return return self.context.target if node_template_name == SOURCE: if not isinstance(self.context.type_definition, RelationshipType): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_('"SOURCE" keyword can only be used in context' ' to "Relationships" source node'))) return @@ -502,7 +504,7 @@ def _find_node_template(self, node_template_name): for node_template in self.tosca_tpl.nodetemplates: if node_template.name == node_template_name: return node_template - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_( 'Node template "{0}" was not found.' ).format(node_template_name))) @@ -512,7 +514,7 @@ def _get_index_value(self, value, index): if index < len(value): return value[index] else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_( "Property '{0}' found in capability '{1}'" " referenced from node template {2}" @@ -522,7 +524,7 @@ def _get_index_value(self, value, index): self.context.name, index))) else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_( "Property '{0}' found in capability '{1}'" " referenced from node template {2}" @@ -535,7 +537,7 @@ def _get_attribute_value(self, value, attibute): if attibute in value: return value[attibute] else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_( "Property '{0}' found in capability '{1}'" " referenced from node template {2}" @@ -545,7 +547,7 @@ def _get_attribute_value(self, value, attibute): self.context.name, attibute))) else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( KeyError(_( "Property '{0}' found in capability '{1}'" " referenced from node template {2}" diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java index 4438908..8f35a80 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java @@ -1,11 +1,10 @@ package org.openecomp.sdc.toscaparser.api.functions; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; -import java.util.LinkedHashMap; -import org.openecomp.sdc.toscaparser.api.NodeTemplate; import org.openecomp.sdc.toscaparser.api.TopologyTemplate; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class Token extends Function { @@ -42,21 +41,21 @@ public class Token extends Function { @Override void validate() { if(args.size() < 3) { - ThreadLocalsHolder.getCollector().appendException( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE180", "ValueError: Invalid arguments for function \"token\". " + - "Expected at least three arguments"); + "Expected at least three arguments")); } else { if(!(args.get(1) instanceof String) || ((String)args.get(1)).length() != 1) { - ThreadLocalsHolder.getCollector().appendException( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE181", "ValueError: Invalid arguments for function \"token\". " + - "Expected single char value as second argument"); + "Expected single char value as second argument")); } if(!(args.get(2) instanceof Integer)) { - ThreadLocalsHolder.getCollector().appendException( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE182", "ValueError: Invalid arguments for function \"token\"" + - "Expected integer value as third argument"); + "Expected integer value as third argument")); } } } @@ -91,18 +90,18 @@ Example: def validate(self): if len(self.args) < 3: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('Invalid arguments for function "{0}". Expected ' 'at least three arguments.').format(TOKEN))) else: if not isinstance(self.args[1], str) or len(self.args[1]) != 1: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('Invalid arguments for function "{0}". ' 'Expected single char value as second ' 'argument.').format(TOKEN))) if not isinstance(self.args[2], int): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('Invalid arguments for function "{0}". ' 'Expected integer value as third ' 'argument.').format(TOKEN))) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java b/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java index 28e57d2..15ca6da 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java @@ -1,11 +1,12 @@ package org.openecomp.sdc.toscaparser.api.parameters; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; import org.openecomp.sdc.toscaparser.api.DataEntity; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.elements.EntityType; import org.openecomp.sdc.toscaparser.api.elements.constraints.Constraint; import org.openecomp.sdc.toscaparser.api.elements.constraints.Schema; @@ -95,9 +96,9 @@ public class Input { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE214", String.format( "UnknownFieldError: Input \"%s\" contains unknown field \"%s\"", - name,key)); + name,key))); } } } @@ -118,8 +119,8 @@ public class Input { } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: Invalid type \"%s\"",inputType)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE215", String.format( + "ValueError: Invalid type \"%s\"",inputType))); } } @@ -150,7 +151,7 @@ public class Input { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import MissingRequiredFieldError from toscaparser.common.exception import UnknownFieldError from toscaparser.dataentity import DataEntity @@ -207,13 +208,13 @@ class Input(object): def _validate_field(self): for name in self.schema.schema: if name not in self.INPUTFIELD: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError(what='Input "%s"' % self.name, field=name)) def validate_type(self, input_type): if input_type not in Schema.PROPERTY_TYPES: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('Invalid type "%s".') % type)) # tODO(anyone) Need to test for any built-in datatype not just network diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java b/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java index 34ecf12..381388b 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java @@ -1,8 +1,9 @@ package org.openecomp.sdc.toscaparser.api.parameters; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.LinkedHashMap; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class Output { @@ -34,15 +35,15 @@ public class Output { private void _validateField() { if(!(attrs instanceof LinkedHashMap)) { //TODO wrong error message... - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE216", String.format( "ValidationError: Output \"%s\" has wrong type. Expecting a dict", - name)); + name))); } if(getValue() == null) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE217", String.format( "MissingRequiredFieldError: Output \"%s\" is missing required \"%s\"", - name,VALUE)); + name,VALUE))); } for(String key: attrs.keySet()) { boolean bFound = false; @@ -53,9 +54,9 @@ public class Output { } } if(!bFound) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE218", String.format( "UnknownFieldError: Output \"%s\" contains unknown field \"%s\"", - name,key)); + name,key))); } } } @@ -94,16 +95,16 @@ class Output(object): def _validate_field(self): if not isinstance(self.attrs, dict): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( MissingRequiredFieldError(what='Output "%s"' % self.name, required=self.VALUE)) if self.value is None: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( MissingRequiredFieldError(what='Output "%s"' % self.name, required=self.VALUE)) for name in self.attrs: if name not in self.OUTPUTFIELD: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( UnknownFieldError(what='Output "%s"' % self.name, field=name)) */ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java b/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java index 85b54ee..ef29b53 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java @@ -1,5 +1,7 @@ package org.openecomp.sdc.toscaparser.api.prereq; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; @@ -61,7 +63,7 @@ public class CSAR { if(isFile) { File f = new File(path); if (!f.isFile()) { - ThreadLocalsHolder.getCollector().appendException(String.format("\"%s\" is not a file", path)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE220", String.format("\"%s\" is not a file", path))); return false; } else { @@ -70,7 +72,7 @@ public class CSAR { } else { if(!UrlUtils.validateUrl(path)) { - ThreadLocalsHolder.getCollector().appendException(String.format("ImportError: \"%s\" does not exist",path)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE221", String.format("ImportError: \"%s\" does not exist",path))); return false; } // get it to a local file @@ -82,7 +84,7 @@ public class CSAR { Files.copy(in,ptf,StandardCopyOption.REPLACE_EXISTING); } catch(Exception e) { - ThreadLocalsHolder.getCollector().appendException("ImportError: failed to load CSAR from " + path); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE222", "ImportError: failed to load CSAR from " + path)); return false; } @@ -199,7 +201,7 @@ public class CSAR { //ThreadLocalsHolder.getCollector().appendCriticalException(e.getMessage()); throw e; } catch (Exception e) { - ThreadLocalsHolder.getCollector().appendException("ValidationError: " + e.getMessage()); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE223", "ValidationError: " + e.getMessage())); errorCaught = true; } @@ -281,10 +283,10 @@ public class CSAR { return (LinkedHashMap)data; } catch(Exception e) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE224", String.format( "The file \"%s\" in the CSAR \"%s\" does not " + "contain valid TOSCA YAML content", - mainTemplate,csar)); + mainTemplate,csar))); } } return null; @@ -361,9 +363,9 @@ public class CSAR { } } else { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE225", String.format( "ValueError: Unexpected artifact definition for \"%s\"", - artifactKey)); + artifactKey))); errorCaught = true; } } @@ -429,12 +431,12 @@ public class CSAR { return; } else { - ThreadLocalsHolder.getCollector().appendException(msg); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE226", msg)); errorCaught = true; } } catch (Exception e) { - ThreadLocalsHolder.getCollector().appendException(msg); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE227", msg)); } } @@ -446,8 +448,8 @@ public class CSAR { } if(raiseExc) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: The resource \"%s\" does not exist",resourceFile)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE228", String.format( + "ValueError: The resource \"%s\" does not exist",resourceFile))); } errorCaught = true; } @@ -511,7 +513,7 @@ public class CSAR { /*python -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import URLException from toscaparser.common.exception import ValidationError from toscaparser.imports import ImportsLoader @@ -543,14 +545,14 @@ class CSAR(object): missing_err_msg = (_('"%s" does not exist.') % self.path) if self.a_file: if not os.path.isfile(self.path): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(message=missing_err_msg)) return False else: self.csar = self.path else: # a URL if not UrlUtils.validate_url(self.path): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(message=missing_err_msg)) return False else: @@ -560,7 +562,7 @@ class CSAR(object): # validate that it is a valid zip file if not zipfile.is_zipfile(self.csar): err_msg = (_('"%s" is not a valid zip file.') % self.path) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(message=err_msg)) return False @@ -571,7 +573,7 @@ class CSAR(object): err_msg = (_('"%s" is not a valid CSAR as it does not contain the ' 'required file "TOSCA.meta" in the folder ' '"TOSCA-Metadata".') % self.path) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(message=err_msg)) return False @@ -585,11 +587,11 @@ class CSAR(object): if type(meta) is dict: self.metadata = meta else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(message=invalid_yaml_err_msg)) return False except yaml.YAMLError: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(message=invalid_yaml_err_msg)) return False @@ -598,7 +600,7 @@ class CSAR(object): '"Entry-Definitions" in ' '"TOSCA-Metadata/TOSCA.meta".') % self.path) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(message=err_msg)) return False @@ -608,7 +610,7 @@ class CSAR(object): if entry and entry not in filelist: err_msg = (_('The "Entry-Definitions" file defined in the ' 'CSAR "%s" does not exist.') % self.path) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(message=err_msg)) return False @@ -654,11 +656,11 @@ class CSAR(object): try: tosca_yaml = yaml.load(data) if type(tosca_yaml) is not dict: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(message=invalid_tosca_yaml_err_msg)) return tosca_yaml except Exception: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValidationError(message=invalid_tosca_yaml_err_msg)) def get_description(self): @@ -718,7 +720,7 @@ class CSAR(object): main_tpl_file, artifact['file']) else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('Unexpected artifact ' 'definition for "%s".') % artifact_key)) @@ -759,11 +761,11 @@ class CSAR(object): if UrlUtils.url_accessible(resource_file): return else: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( URLException(what=msg)) self.error_caught = True except Exception: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( URLException(what=msg)) self.error_caught = True @@ -773,7 +775,7 @@ class CSAR(object): return if raise_exc: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('The resource "%s" does not exist.') % resource_file)) self.error_caught = True diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java.orig b/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java.orig index aa36b9e..b4d2614 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java.orig +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java.orig @@ -18,7 +18,7 @@ import java.util.zip.ZipFile; import java.util.zip.ZipInputStream; import org.openecomp.sdc.toscaparser.api.ImportsLoader; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector; import org.openecomp.sdc.toscaparser.api.elements.Metadata; import org.openecomp.sdc.toscaparser.api.utils.UrlUtils; import org.slf4j.Logger; diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java index 354fef0..6fb4606 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java @@ -2,14 +2,14 @@ package org.openecomp.sdc.toscaparser.api.utils; public enum JToscaErrorCodes { - MISSING_META_FILE("JT1001"), - INVALID_META_YAML_CONTENT("JT1002"), - ENTRY_DEFINITION_NOT_DEFINED("JT1003"), - MISSING_ENTRY_DEFINITION_FILE ("JT1004"), - GENERAL_ERROR("JT1005"), - PATH_NOT_VALID("JT1006"), - CSAR_TOSCA_VALIDATION_ERROR("JT1007"), - INVALID_CSAR_FORMAT("JT1008"); + MISSING_META_FILE("JE1001"), + INVALID_META_YAML_CONTENT("JE1002"), + ENTRY_DEFINITION_NOT_DEFINED("JE1003"), + MISSING_ENTRY_DEFINITION_FILE ("JE1004"), + GENERAL_ERROR("JE1005"), + PATH_NOT_VALID("JE1006"), + CSAR_TOSCA_VALIDATION_ERROR("JE1007"), + INVALID_CSAR_FORMAT("JE1008"); private String value; diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java index 6b3c1ce..f749f7b 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java @@ -1,10 +1,10 @@ package org.openecomp.sdc.toscaparser.api.utils; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; - public class TOSCAVersionProperty {// test with functions/test_concat.yaml private String version; @@ -35,10 +35,10 @@ public class TOSCAVersionProperty {// test with functions/test_concat.yaml Pattern pattern = Pattern.compile(versionRe); Matcher matcher = pattern.matcher(version); if(!matcher.find()) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE252", String.format( "InvalidTOSCAVersionPropertyException: " + "Value of TOSCA version property \"%s\" is invalid", - version)); + version))); return; } minorVersion = matcher.group("gMinorVersion"); @@ -77,10 +77,10 @@ public class TOSCAVersionProperty {// test with functions/test_concat.yaml if((fixVersion == null && value != null) || (minorVersion.equals("0") && majorVersion.equals("0") && fixVersion.equals("0") && value != null)) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE253", String.format( "InvalidTOSCAVersionPropertyException: " + "Value of TOSCA version property \"%s\" is invalid", - version)); + version))); } return value; } @@ -92,10 +92,10 @@ public class TOSCAVersionProperty {// test with functions/test_concat.yaml // Eg: version = 18.0.0-1 is invalid. if(qualifier == null && value != null) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE254", String.format( "InvalidTOSCAVersionPropertyException: " + "Value of TOSCA version property \"%s\" is invalid", - version)); + version))); } return value; } @@ -120,7 +120,7 @@ class TOSCAVersionProperty(object): self.version = str(version) match = self.VERSION_RE.match(self.version) if not match: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidTOSCAVersionPropertyException(what=(self.version))) return ver = match.groupdict() @@ -161,7 +161,7 @@ class TOSCAVersionProperty(object): if (self.fix_version is None and value) or \ (self.minor_version == self.major_version == self.fix_version == '0' and value): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidTOSCAVersionPropertyException(what=(self.version))) return value @@ -173,7 +173,7 @@ class TOSCAVersionProperty(object): Eg: version = 18.0.0-1 is invalid. """ if not self.qualifier and value: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( InvalidTOSCAVersionPropertyException(what=(self.version))) return value diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java index 47ba972..0e1531f 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java @@ -1,20 +1,20 @@ package org.openecomp.sdc.toscaparser.api.utils; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; +import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector; public class ThreadLocalsHolder { - private static final ThreadLocal exceptionCollectorThreadLocal = new ThreadLocal<>(); + private static final ThreadLocal exceptionCollectorThreadLocal = new ThreadLocal<>(); private ThreadLocalsHolder(){} - public static ExceptionCollector getCollector() { + public static ValidationIssueCollector getCollector() { return exceptionCollectorThreadLocal.get(); } - public static void setCollector(ExceptionCollector exceptionCollector) { + public static void setCollector(ValidationIssueCollector validationIssueCollector) { cleanup(); - exceptionCollectorThreadLocal.set(exceptionCollector); + exceptionCollectorThreadLocal.set(validationIssueCollector); } public static void cleanup(){ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java index 092f827..3383bd7 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java @@ -1,12 +1,12 @@ package org.openecomp.sdc.toscaparser.api.utils; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.io.IOException; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; - public class UrlUtils { public static boolean validateUrl(String sUrl) { @@ -34,16 +34,16 @@ public class UrlUtils { // relative_path: heat-translator // - joined: http://www.githib.com/openstack/heat-translator if(!validateUrl(sUrl)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: The URL \"%s\" is malformed",sUrl)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE255", String.format( + "ValueError: The URL \"%s\" is malformed",sUrl))); } try { URL base = new URL(sUrl); return (new URL(base,relativePath)).toString(); } catch(MalformedURLException e) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: Joining URL \"%s\" and relative path \"%s\" caused an exception",sUrl,relativePath)); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE256", String.format( + "ValueError: Joining URL \"%s\" and relative path \"%s\" caused an exception",sUrl,relativePath))); return sUrl; } } @@ -70,7 +70,7 @@ public class UrlUtils { from six.moves.urllib.parse import urljoin from six.moves.urllib.parse import urlparse -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.utils.gettextutils import _ try: @@ -108,7 +108,7 @@ class UrlUtils(object): - joined: http://www.githib.com/openstack/heat-translator """ if not UrlUtils.validate_url(url): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('"%s" is not a valid URL.') % url)) return urljoin(url, relative_path) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java index 9909685..53f5bec 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java @@ -1,11 +1,11 @@ package org.openecomp.sdc.toscaparser.api.utils; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; + import java.util.ArrayList; import java.util.Date; import java.util.LinkedHashMap; -import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector; - public class ValidateUtils { private static final String RANGE_UNBOUNDED = "UNBOUNDED"; @@ -35,8 +35,8 @@ public class ValidateUtils { public static Object validateNumeric(Object value) { if(value != null) { if (!(value instanceof Number)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not a numeric", value.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE257", String.format( + "ValueError: \"%s\" is not a numeric",value.toString()))); } } return value; @@ -49,8 +49,8 @@ public class ValidateUtils { if (value instanceof Boolean) { return (Boolean) value ? 1 : 0; } - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not an integer", value.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE258", String.format( + "ValueError: \"%s\" is not an integer",value.toString()))); } } return value; @@ -59,8 +59,8 @@ public class ValidateUtils { public static Object validateFloat(Object value) { if(value != null) { if (!(value instanceof Float || value instanceof Double)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not a float", value.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE259", String.format( + "ValueError: \"%s\" is not a float",value.toString()))); } } return value; @@ -69,8 +69,8 @@ public class ValidateUtils { public static Object validateString(Object value) { if(value != null) { if (!(value instanceof String)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \'%s\' is not a string", value.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE260", String.format( + "ValueError: \'%s\' is not a string",value.toString()))); } } return value; @@ -79,8 +79,8 @@ public class ValidateUtils { public static Object validateList(Object value) { if(value != null) { if (!(value instanceof ArrayList)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not a list", value.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE261", String.format( + "ValueError: \"%s\" is not a list",value.toString()))); } } return value; @@ -93,8 +93,8 @@ public class ValidateUtils { validateList(range); // validate range list has a min and max if(range instanceof ArrayList && ((ArrayList)range).size() != 2) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not a valid range",range.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE262", String.format( + "ValueError: \"%s\" is not a valid range",range.toString()))); // too dangerous to continue... return range; } @@ -106,8 +106,8 @@ public class ValidateUtils { if(!(r0 instanceof Integer) && !(r0 instanceof Float) || !(r1 instanceof Integer) && !(r1 instanceof Float)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not a valid range",range.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE263", String.format( + "ValueError: \"%s\" is not a valid range",range.toString()))); // too dangerous to continue... return range; } @@ -131,8 +131,8 @@ public class ValidateUtils { if(!minTest && !maxTest) { // Note: min == max is allowed if(min > max) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError:\"%s\" is not a valid range",range.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE264", String.format( + "ValueError:\"%s\" is not a valid range",range.toString()))); } } return range; @@ -142,8 +142,8 @@ public class ValidateUtils { public static Object validateValueInRange(Object value,Object range,String propName) { // verify all 3 are numeric and convert to Floats if(!(value instanceof Integer || value instanceof Float)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: validateInRange: \"%s\" is not a number",range.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE265", String.format( + "ValueError: validateInRange: \"%s\" is not a number",range.toString()))); return value; } Float fval = value instanceof Integer ? ((Integer)value).floatValue() : (Float)value; @@ -154,8 +154,8 @@ public class ValidateUtils { // better safe than sorry... // validate that range list has a min and max if(range instanceof ArrayList && ((ArrayList)range).size() != 2) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not a valid range",range.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE266", String.format( + "ValueError: \"%s\" is not a valid range",range.toString()))); // too dangerous to continue... return value; } @@ -167,8 +167,8 @@ public class ValidateUtils { if(!(r0 instanceof Integer) && !(r0 instanceof Float) || !(r1 instanceof Integer) && !(r1 instanceof Float)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not a valid range",range.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE267", String.format( + "ValueError: \"%s\" is not a valid range",range.toString()))); // too dangerous to continue... return value; } @@ -192,8 +192,8 @@ public class ValidateUtils { if(!minTest && !maxTest) { // Note: min == max is allowed if(min > max) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError:\"%s\" is not a valid range",range.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE268", String.format( + "ValueError:\"%s\" is not a valid range",range.toString()))); } } // finally... @@ -211,9 +211,9 @@ public class ValidateUtils { } } if(bError) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE269", String.format( "RangeValueError: Property \"%s\", \"%s\" not in range [\"%s\" - \"%s\"", - propName,value.toString(),r0.toString(),r1.toString())); + propName,value.toString(),r0.toString(),r1.toString()))); } return value; } @@ -221,8 +221,8 @@ public class ValidateUtils { public static Object validateMap(Object ob) { if(ob != null) { if (!(ob instanceof LinkedHashMap)) { - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError\"%s\" is not a map.", ob.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE270", String.format( + "ValueError\"%s\" is not a map.",ob.toString()))); } } return ob; @@ -239,8 +239,8 @@ public class ValidateUtils { return normalized.equals("true"); } } - ThreadLocalsHolder.getCollector().appendException(String.format( - "ValueError: \"%s\" is not a boolean", value.toString())); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE271", String.format( + "ValueError: \"%s\" is not a boolean",value.toString()))); } return value; } @@ -256,7 +256,7 @@ public class ValidateUtils { except Exception as e: original_err_msg = str(e) log.error(original_err_msg) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') % {'val': value, 'msg': original_err_msg})) */ @@ -264,9 +264,9 @@ public class ValidateUtils { // timestamps are loaded as Date objects by the YAML parser if(value != null) { if (!(value instanceof Date)) { - ThreadLocalsHolder.getCollector().appendException(String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE272", String.format( "ValueError: \"%s\" is not a valid timestamp", - value.toString())); + value.toString()))); } } @@ -278,7 +278,7 @@ public class ValidateUtils { /*python from toscaparser.elements import constraints -from toscaparser.common.exception import ExceptionCollector +from toscaparser.common.exception import ValidationIssueCollector from toscaparser.common.exception import InvalidTOSCAVersionPropertyException from toscaparser.common.exception import RangeValueError from toscaparser.utils.gettextutils import _ @@ -301,7 +301,7 @@ def str_to_num(value): def validate_numeric(value): if not isinstance(value, numbers.Number): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('"%s" is not a numeric.') % value)) return value @@ -311,28 +311,28 @@ def validate_integer(value): try: value = int(value) except Exception: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('"%s" is not an integer.') % value)) return value def validate_float(value): if not isinstance(value, float): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('"%s" is not a float.') % value)) return value def validate_string(value): if not isinstance(value, six.string_types): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('"%s" is not a string.') % value)) return value def validate_list(value): if not isinstance(value, list): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('"%s" is not a list.') % value)) return value @@ -342,7 +342,7 @@ def validate_range(range): validate_list(range) # validate range list has a min and max if len(range) != 2: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('"%s" is not a valid range.') % range)) # validate min and max are numerics or the keyword UNBOUNDED min_test = max_test = False @@ -358,7 +358,7 @@ def validate_range(range): if not min_test and not max_test: # Note: min == max is allowed if min > max: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('"%s" is not a valid range.') % range)) return range @@ -371,7 +371,7 @@ def validate_value_in_range(value, range, prop_name): # Note: value is valid if equal to min if range[0] != RANGE_UNBOUNDED: if value < range[0]: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( RangeValueError(pname=prop_name, pvalue=value, vmin=range[0], @@ -379,7 +379,7 @@ def validate_value_in_range(value, range, prop_name): # Note: value is valid if equal to max if range[1] != RANGE_UNBOUNDED: if value > range[1]: - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( RangeValueError(pname=prop_name, pvalue=value, vmin=range[0], @@ -389,7 +389,7 @@ def validate_value_in_range(value, range, prop_name): def validate_map(value): if not isinstance(value, collections.Mapping): - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('"%s" is not a map.') % value)) return value @@ -403,7 +403,7 @@ def validate_boolean(value): if normalised in ['true', 'false']: return normalised == 'true' - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('"%s" is not a boolean.') % value)) @@ -417,7 +417,7 @@ def validate_timestamp(value): except Exception as e: original_err_msg = str(e) log.error(original_err_msg) - ExceptionCollector.appendException( + ValidationIssueCollector.appendException( ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') % {'val': value, 'msg': original_err_msg})) return diff --git a/src/test/java/org/openecomp/sdc/toscaparser/api/GetValidationIssues.java b/src/test/java/org/openecomp/sdc/toscaparser/api/GetValidationIssues.java new file mode 100644 index 0000000..7a0eec1 --- /dev/null +++ b/src/test/java/org/openecomp/sdc/toscaparser/api/GetValidationIssues.java @@ -0,0 +1,81 @@ +package org.openecomp.sdc.toscaparser.api; + +import com.opencsv.CSVWriter; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.List; +import java.util.Scanner; +//Generate excel file, include all validation issues errors in jtosca +//the error java code, the line number and file name for each error. +public class GetValidationIssues { + + public static CSVWriter fileWriter = null; + public static List data = new ArrayList<>(); + + public static void main(String[] args) { + System.out.println("GetAllValidationIssues - path to project files Directory is " + Arrays.toString(args)); + File jtoscaFiles = new File(args[0]+ "\\jtosca\\src\\main\\java\\org\\openecomp\\sdc\\toscaparser\\api"); + + try { + printFiles(jtoscaFiles); + fileWriter = new CSVWriter(new FileWriter(args[1]+"\\JToscaValidationIssues_"+System.currentTimeMillis()+".csv"), '\t'); + fileWriter.writeNext(new String[] {"Error Message", "Class Name", "Line No."}, false); + fileWriter.writeAll(data, false); + } catch (IOException e) { + e.printStackTrace(); + } finally { + try { + fileWriter.flush(); + fileWriter.close(); + } catch (IOException e) { + System.out.println("Error while flushing/closing fileWriter !!!"); + e.printStackTrace(); + } + } + } + + private static void printFiles(File dir) { + if (dir != null && dir.exists()) { + for (File file : dir.listFiles()) { + if (file.isDirectory()) + printFiles(file); + else { + Scanner scanner = null; + try { + scanner = new Scanner(file); + + int lineNum = 0; + while (scanner.hasNextLine()) { + String line = scanner.nextLine(); + lineNum++; + if (line.startsWith("/*python")) + break; + + if (!line.trim().startsWith("//") && !line.trim().startsWith("#") && line.contains("ThreadLocalsHolder.getCollector().appendValidationIssue")) { + String errMsg = line.trim(); + if (!errMsg.contains(";")) { + String nextLine = null; + while (scanner.hasNextLine() && (nextLine == null || !nextLine.contains(";"))) { + nextLine = scanner.nextLine(); + errMsg += nextLine.trim(); + } + } + + data.add(new String[]{errMsg, file.getName(), String.valueOf(lineNum)}); + } + } + } catch (IOException e) { + e.printStackTrace(); + } + } + } + } + } +} + -- cgit 1.2.3-korg From eec33fb03652c0421e83052de60ec95e18e3382b Mon Sep 17 00:00:00 2001 From: Pavel Aharoni Date: Tue, 26 Sep 2017 18:51:38 +0300 Subject: [SDC-402] TDP 335705 Change-Id: Idfba2f7ebc3a7e3319cdfc52014081f7d845b85d Signed-off-by: Pavel Aharoni --- .../sdc/toscaparser/api/elements/InterfacesDef.java | 5 ++--- .../sdc/toscaparser/api/elements/NodeType.java | 3 +-- .../sdc/toscaparser/api/functions/GetInput.java | 20 +++++++++++++------- 3 files changed, 16 insertions(+), 12 deletions(-) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java index c13fd96..357ee23 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java @@ -1,14 +1,13 @@ package org.openecomp.sdc.toscaparser.api.elements; +import org.openecomp.sdc.toscaparser.api.EntityTemplate; import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; -import org.openecomp.sdc.toscaparser.api.EntityTemplate; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class InterfacesDef extends StatefulEntityType { public static final String LIFECYCLE = "tosca.interfaces.node.lifecycle.Standard"; diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java index 157e016..cb4aa74 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java @@ -1,13 +1,12 @@ package org.openecomp.sdc.toscaparser.api.elements; import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class NodeType extends StatefulEntityType { // TOSCA built-in node type diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java index 67cecd7..14b0d4e 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java @@ -1,14 +1,14 @@ package org.openecomp.sdc.toscaparser.api.functions; -import java.util.ArrayList; -import java.util.LinkedHashMap; - import org.openecomp.sdc.toscaparser.api.DataEntity; import org.openecomp.sdc.toscaparser.api.TopologyTemplate; import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; import org.openecomp.sdc.toscaparser.api.parameters.Input; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.ArrayList; +import java.util.LinkedHashMap; + public class GetInput extends Function { public GetInput(TopologyTemplate toscaTpl,Object context,String name,ArrayList _args) { @@ -51,11 +51,17 @@ public class GetInput extends Function { Object value = DataEntity.validateDatatype( type, toscaTpl.getParsedParams().get(getInputName()),null,null,null); - - if (value instanceof ArrayList && args.size() == 2 && args.get(1) instanceof Integer) { - return ((ArrayList) value).get((Integer)args.get(1)); + //SDC resolving Get Input + if (value instanceof ArrayList){ + if(args.size() == 2 && args.get(1) instanceof Integer && ((ArrayList) value).size()> (Integer)args.get(1)){ + return ((ArrayList) value).get((Integer) args.get(1)); + } + else{ + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE273",String.format( + "GetInputError: cannot resolve input name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0)))); + return null; + } } - return value; } -- cgit 1.2.3-korg From f7cc6a3e9bf206b6500da5519f0abad8bf997fe7 Mon Sep 17 00:00:00 2001 From: Pavel Aharoni Date: Wed, 27 Sep 2017 10:18:38 +0300 Subject: [SDC-402] TDP 335705 test fix Change-Id: Ia9a72c70fe084bba250da316bc9d4d443c010c79 Signed-off-by: Pavel Aharoni --- pom.xml | 2 +- .../org/openecomp/sdc/toscaparser/api/functions/GetInput.java | 9 +++++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index ba462d9..cf28b14 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.openecomp.sdc.jtosca jtosca - 1.1.11-SNAPSHOT + 1.1.12-SNAPSHOT sdc-jtosca diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java index 14b0d4e..0c96b0f 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java @@ -73,9 +73,14 @@ public class GetInput extends Function { } } if(inputDef != null) { - if (args.size() == 2 && args.get(1) instanceof Integer) { - if (inputDef.getDefault() != null && inputDef.getDefault() instanceof ArrayList) { + if (args.size() == 2 && inputDef.getDefault() != null && inputDef.getDefault() instanceof ArrayList){ + if ( args.get(1) instanceof Integer + && ((ArrayList) inputDef.getDefault()).size()> ((Integer)args.get(1)).intValue()) { return ((ArrayList) inputDef.getDefault()).get(((Integer)args.get(1)).intValue()); + }else{ + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE274",(String.format( + "GetInputError: cannot resolve input Def name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0))))); + return null; } } return inputDef.getDefault(); -- cgit 1.2.3-korg From 64e270e0e18eeac2533dd0b6fed4c19f295aecb7 Mon Sep 17 00:00:00 2001 From: Pavel Aharoni Date: Wed, 27 Sep 2017 17:01:29 +0300 Subject: [SDC-402] fix version.properties with latest Change-Id: I3a65f072a2cfdd79d22109e1f24a32d219e63e17 Signed-off-by: Pavel Aharoni --- version.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.properties b/version.properties index 0424f62..b00ca58 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=1 -patch=0 +patch=12 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 098f0718bc90415be59cfbb5bbd7aed17e3d6fb7 Mon Sep 17 00:00:00 2001 From: xg353y Date: Fri, 29 Sep 2017 14:00:15 +0200 Subject: Add rst file Change-Id: Id121e228a12832311e3d75010784ea0b90ed3339 Issue-ID: SDC-419 Signed-off-by: xg353y --- docs/index.rst | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 docs/index.rst diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..833e1aa --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,8 @@ +.. This work is licensed under a Creative Commons Attribution 4.0 International License. + +TODO Add files to toctree and delete this header +------------------------------------------------ +.. toctree:: + :maxdepth: 1 + + -- cgit 1.2.3-korg From 1cbc7e2fd5a9c743f6ddd51e003a7e1ddb03ae43 Mon Sep 17 00:00:00 2001 From: Pavel Aharoni Date: Mon, 2 Oct 2017 16:27:22 +0300 Subject: [SDC-426] get_input fix Change-Id: I86e8f9d7bfe4f34ef305c81e82af818f661f6ddb Signed-off-by: Pavel Aharoni --- pom.xml | 2 +- .../sdc/toscaparser/api/functions/Function.java | 41 +++++++++++++--------- 2 files changed, 26 insertions(+), 17 deletions(-) diff --git a/pom.xml b/pom.xml index cf28b14..f94da34 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.openecomp.sdc.jtosca jtosca - 1.1.12-SNAPSHOT + 1.1.13-SNAPSHOT sdc-jtosca diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java index 0278508..3437735 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java @@ -98,31 +98,40 @@ public abstract class Function { if (rawFunctionObj instanceof LinkedHashMap) { // In map type case LinkedHashMap rawFunction = ((LinkedHashMap) rawFunctionObj); - if(rawFunction.size() == 1) { // End point + if(rawFunction.size() == 1 && + !(rawFunction.values().iterator().next() instanceof LinkedHashMap)) { // End point return getFunctionForObjectItem(ttpl, context, rawFunction, resolveGetInput); } else { - // iterate over map nested properties in recursion, convert leaves to function, - // and collect them in the same hierarchy as the original map. - LinkedHashMap rawFunctionObjMap = new LinkedHashMap(); - for (Object rawFunctionObjItem: rawFunction.entrySet()) { - Object itemValue = getFunction(ttpl, context, ((Map.Entry)rawFunctionObjItem).getValue(), resolveGetInput); - rawFunctionObjMap.put(((Map.Entry)rawFunctionObjItem).getKey(), itemValue); - } - return rawFunctionObjMap; + return getFunctionForMap(ttpl, context, rawFunction, resolveGetInput); } } else if (rawFunctionObj instanceof ArrayList) { // In list type case - // iterate over list properties in recursion, convert leaves to function, - // and collect them in the same hierarchy as the original list. - ArrayList rawFunctionObjList = new ArrayList<>(); - for (Object rawFunctionObjItem: (ArrayList) rawFunctionObj) { - rawFunctionObjList.add(getFunction(ttpl, context, rawFunctionObjItem, resolveGetInput)); - } - return rawFunctionObjList; + return getFunctionForList(ttpl, context, (ArrayList) rawFunctionObj, resolveGetInput); } return rawFunctionObj; } + private static Object getFunctionForList(TopologyTemplate ttpl, Object context, ArrayList rawFunctionObj, boolean resolveGetInput) { + // iterate over list properties in recursion, convert leaves to function, + // and collect them in the same hierarchy as the original list. + ArrayList rawFunctionObjList = new ArrayList<>(); + for (Object rawFunctionObjItem: rawFunctionObj) { + rawFunctionObjList.add(getFunction(ttpl, context, rawFunctionObjItem, resolveGetInput)); + } + return rawFunctionObjList; + } + + private static Object getFunctionForMap(TopologyTemplate ttpl, Object context, LinkedHashMap rawFunction, boolean resolveGetInput) { + // iterate over map nested properties in recursion, convert leaves to function, + // and collect them in the same hierarchy as the original map. + LinkedHashMap rawFunctionObjMap = new LinkedHashMap(); + for (Object rawFunctionObjItem: rawFunction.entrySet()) { + Object itemValue = getFunction(ttpl, context, ((Map.Entry)rawFunctionObjItem).getValue(), resolveGetInput); + rawFunctionObjMap.put(((Map.Entry)rawFunctionObjItem).getKey(), itemValue); + } + return rawFunctionObjMap; + } + private static Object getFunctionForObjectItem(TopologyTemplate ttpl, Object context, Object rawFunctionObjItem, boolean resolveGetInput) { if(isFunction(rawFunctionObjItem)) { LinkedHashMap rawFunction = (LinkedHashMap) rawFunctionObjItem; -- cgit 1.2.3-korg From ee82b7d120725c1c5ebad4bceaf8cf8a1206e50e Mon Sep 17 00:00:00 2001 From: Michael Lando Date: Tue, 24 Oct 2017 23:19:06 +0300 Subject: update version file Change-Id: If4c8542319ac23167bc138719d059897ad0b4aa2 Issue-Id: SDC-530 Signed-off-by: Michael Lando --- version.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.properties b/version.properties index b00ca58..2c02041 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=1 -patch=12 +patch=13 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 8279429c5bc5b7567237da42bb2f5a9ebab8cdb9 Mon Sep 17 00:00:00 2001 From: Yuli Shlosberg Date: Tue, 7 Nov 2017 17:01:14 +0200 Subject: add sonar to JTOSCA Change-Id: Ic87fa456ec3833a8c33b2d127c1599ab95fad9b9 Issue-Id: SDC-632 Signed-off-by: Yuli Shlosberg (cherry picked from commit b4a13fb4abe20753a176a3958879d39fa920c4e5) --- pom.xml | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index f94da34..40b511a 100644 --- a/pom.xml +++ b/pom.xml @@ -19,14 +19,23 @@ - true - ${project.basedir} + + ${project.basedir}/target/jacoco.exec https://nexus.onap.org /content/sites/site/org/openecomp/sdc/jtosca/${project.version} snapshots releases 176c31dfe190a + + ${project.build.sourceEncoding} + true + ${project.basedir} + . + **/scripts/**/* + **/test/**/*,**/tests/**/* + app/**/*.js,server-mock/**/*.js,src/**/*.js,src/main/**/*.java + ${project.version} @@ -185,6 +194,11 @@ + + org.sonarsource.scanner.maven + sonar-maven-plugin + 3.0.2 + -- cgit 1.2.3-korg From ddfa0448cc31009a8fcad826440f130c2f5a1abf Mon Sep 17 00:00:00 2001 From: Idan Amit Date: Tue, 7 Nov 2017 17:59:15 +0200 Subject: Remove MojoHaus license plugin Removed MojoHaus license-maven-plugin from jtosca pom.xml Change-Id: I71642e618b3a413106fb20149cd510f7aef7868a Issue-Id: SDC-161 Signed-off-by: Idan Amit --- pom.xml | 35 +---------------------------------- 1 file changed, 1 insertion(+), 34 deletions(-) diff --git a/pom.xml b/pom.xml index 40b511a..8337a3a 100644 --- a/pom.xml +++ b/pom.xml @@ -160,40 +160,7 @@ maven-javadoc-plugin 2.10.3 - - - org.codehaus.mojo - license-maven-plugin - 1.10 - - false - ============LICENSE_START======================================================= - ============LICENSE_END========================================================= - ================================================================================ - apache_v2 - 2017 - AT&T Intellectual Property. All rights - reserved. - jtosca - true - true - true - true - false - - **/*.java - - - - - first - - update-file-header - - - - - + org.sonarsource.scanner.maven sonar-maven-plugin -- cgit 1.2.3-korg From a5ae1468e16834137bc8331dc1661e700f181dc8 Mon Sep 17 00:00:00 2001 From: Pavel Aharoni Date: Tue, 7 Nov 2017 21:58:37 +0200 Subject: [SDC-643] Fix Constraint validation Change-Id: I02b5b8b4516f0c190d460c067ea40bb6880dc8cf Signed-off-by: Pavel Aharoni --- pom.xml | 2 +- .../sdc/toscaparser/api/elements/constraints/Constraint.java | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 8337a3a..3b26cab 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.openecomp.sdc.jtosca jtosca - 1.1.13-SNAPSHOT + 1.1.14-SNAPSHOT sdc-jtosca diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java index 253cc0c..5cf7444 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java @@ -5,6 +5,7 @@ import java.util.LinkedHashMap; import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; import org.openecomp.sdc.toscaparser.api.elements.ScalarUnit; +import org.openecomp.sdc.toscaparser.api.functions.Function; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public abstract class Constraint { @@ -141,6 +142,11 @@ public abstract class Constraint { } public void validate(Object value) { + if (Function.isFunction(value)){ + //skipping constraints check for functions + return; + } + valueMsg = value; boolean bFound = false; for(String s: ScalarUnit.SCALAR_UNIT_TYPES) { -- cgit 1.2.3-korg From 0472ae9b03fcbacab5b8108989e7e1fb93288f43 Mon Sep 17 00:00:00 2001 From: Gal Grottas Date: Mon, 20 Nov 2017 15:41:04 +0200 Subject: Tosca parser fails to parse csar with get_attributes Change-Id: Ibe6f0f9e35cf55fe3de6aa300b0cd09c000705f4 Issue-Id: SDC-682 Signed-off-by: Gal Grottas --- pom.xml | 2 +- .../toscaparser/api/functions/GetAttribute.java | 381 ++++++++++----------- version.properties | 2 +- 3 files changed, 191 insertions(+), 194 deletions(-) diff --git a/pom.xml b/pom.xml index 3b26cab..e3f8db6 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.openecomp.sdc.jtosca jtosca - 1.1.14-SNAPSHOT + 1.1.16-SNAPSHOT sdc-jtosca diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java index 66e9320..5433aac 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java @@ -40,89 +40,87 @@ public class GetAttribute extends Function { // * { get_attribute: [ HOST, private_address, 0 ] } // * { get_attribute: [ HOST, private_address, 0, some_prop] } - public GetAttribute(TopologyTemplate ttpl,Object context,String name,ArrayList args) { - super(ttpl,context,name,args); + public GetAttribute(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); } - + @Override void validate() { - if(args.size() < 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", - "ValueError: Illegal arguments for function \"get_attribute\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); - return; - } - else if(args.size() == 2) { - _findNodeTemplateContainingAttribute(); + if (args.size() < 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", + "ValueError: Illegal arguments for function \"get_attribute\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); + return; + } else if (args.size() == 2) { + _findNodeTemplateContainingAttribute(); + } else { + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl == null) { + return; + } + int index = 2; + AttributeDef attr = nodeTpl.getTypeDefinition().getAttributeDefValue((String) args.get(1)); + if (attr != null) { + // found + } else { + index = 3; + // then check the req or caps + if (!(args.get(1) instanceof String) || !(args.get(2) instanceof String)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", "ValueError: Illegal arguments for function \"get_attribute\". Expected a String argument")); + } + + attr = _findReqOrCapAttribute(args.get(1).toString(), args.get(2).toString()); + if (attr == null) { + return; + } + } + + + String valueType = (String) attr.getSchema().get("type"); + if (args.size() > index) { + for (Object elem : args.subList(index, args.size())) { + if (valueType.equals("list")) { + if (!(elem instanceof Integer)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE147", String.format( + "ValueError: Illegal arguments for function \"get_attribute\" \"%s\". Expected positive integer argument", + elem.toString()))); + } + Object ob = attr.getSchema().get("entry_schema"); + valueType = (String) + ((LinkedHashMap) ob).get("type"); + } else if (valueType.equals("map")) { + Object ob = attr.getSchema().get("entry_schema"); + valueType = (String) + ((LinkedHashMap) ob).get("type"); + } else { + boolean bFound = false; + for (String p : Schema.PROPERTY_TYPES) { + if (p.equals(valueType)) { + bFound = true; + break; + } + } + if (bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE148", String.format( + "ValueError: 'Illegal arguments for function \"get_attribute\". Unexpected attribute/index value \"%s\"", + elem))); + return; + } else { // It is a complex type + DataType dataType = new DataType(valueType, null); + LinkedHashMap props = + dataType.getAllProperties(); + PropertyDef prop = props.get((String) elem); + if (prop != null) { + valueType = (String) prop.getSchema().get("type"); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE149", String.format( + "KeyError: Illegal arguments for function \"get_attribute\". Attribute name \"%s\" not found in \"%\"", + elem, valueType))); + } + } + } + } + } } - else { - NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); - if(nodeTpl == null) { - return; - } - int index = 2; - AttributeDef attr = nodeTpl.getTypeDefinition().getAttributeDefValue((String)args.get(1)); - if(attr != null) { - // found - } - else { - index = 3; - // then check the req or caps - attr = _findReqOrCapAttribute((String)args.get(1),(String)args.get(2)); - if(attr == null) { - return; - } - } - - String valueType = (String)attr.getSchema().get("type"); - if(args.size() > index) { - for(Object elem: args.subList(index,args.size())) { - if(valueType.equals("list")) { - if(!(elem instanceof Integer)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE147", String.format( - "ValueError: Illegal arguments for function \"get_attribute\" \"%s\". Expected positive integer argument", - elem.toString()))); - } - Object ob = attr.getSchema().get("entry_schema"); - valueType = (String) - ((LinkedHashMap)ob).get("type"); - } - else if(valueType.equals("map")) { - Object ob = attr.getSchema().get("entry_schema"); - valueType = (String) - ((LinkedHashMap)ob).get("type"); - } - else { - boolean bFound = false; - for(String p: Schema.PROPERTY_TYPES) { - if(p.equals(valueType)) { - bFound = true; - break; - } - } - if(bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE148", String.format( - "ValueError: 'Illegal arguments for function \"get_attribute\". Unexpected attribute/index value \"%s\"", - elem))); - return; - } - else { // It is a complex type - DataType dataType = new DataType(valueType,null); - LinkedHashMap props = - dataType.getAllProperties(); - PropertyDef prop = props.get((String)elem); - if(prop != null) { - valueType = (String)prop.getSchema().get("type"); - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE149", String.format( - "KeyError: Illegal arguments for function \"get_attribute\". Attribute name \"%s\" not found in \"%\"", - elem,valueType))); - } - } - } - } - } - } } @Override @@ -131,48 +129,48 @@ public class GetAttribute extends Function { } private NodeTemplate getReferencedNodeTemplate() { - // Gets the NodeTemplate instance the get_attribute function refers to - - // If HOST keyword was used as the node template argument, the node - // template which contains the attribute along the HostedOn relationship - // chain will be returned. - - return _findNodeTemplateContainingAttribute(); - + // Gets the NodeTemplate instance the get_attribute function refers to + + // If HOST keyword was used as the node template argument, the node + // template which contains the attribute along the HostedOn relationship + // chain will be returned. + + return _findNodeTemplateContainingAttribute(); + } // Attributes can be explicitly created as part of the type definition // or a property name can be implicitly used as an attribute name private NodeTemplate _findNodeTemplateContainingAttribute() { - NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); - if(nodeTpl != null && - !_attributeExistsInType(nodeTpl.getTypeDefinition()) && - !nodeTpl.getProperties().keySet().contains(getAttributeName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE150", String.format( - "KeyError: Attribute \"%s\" was not found in node template \"%s\"", - getAttributeName(),nodeTpl.getName()))); + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl != null && + !_attributeExistsInType(nodeTpl.getTypeDefinition()) && + !nodeTpl.getProperties().keySet().contains(getAttributeName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE150", String.format( + "KeyError: Attribute \"%s\" was not found in node template \"%s\"", + getAttributeName(), nodeTpl.getName()))); } - return nodeTpl; + return nodeTpl; } private boolean _attributeExistsInType(StatefulEntityType typeDefinition) { - LinkedHashMap attrsDef = typeDefinition.getAttributesDef(); - return attrsDef.get(getAttributeName()) != null; + LinkedHashMap attrsDef = typeDefinition.getAttributesDef(); + return attrsDef.get(getAttributeName()) != null; } private NodeTemplate _findHostContainingAttribute(String nodeTemplateName) { NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); - if(nodeTemplate != null) { - LinkedHashMap hostedOnRel = - (LinkedHashMap)EntityType.TOSCA_DEF.get(HOSTED_ON); - for(RequirementAssignment r: nodeTemplate.getRequirements().getAll()) { + if (nodeTemplate != null) { + LinkedHashMap hostedOnRel = + (LinkedHashMap) EntityType.TOSCA_DEF.get(HOSTED_ON); + for (RequirementAssignment r : nodeTemplate.getRequirements().getAll()) { String targetName = r.getNodeTemplateName(); NodeTemplate targetNode = _findNodeTemplate(targetName); - NodeType targetType = (NodeType)targetNode.getTypeDefinition(); - for(CapabilityTypeDef capability: targetType.getCapabilitiesObjects()) { + NodeType targetType = (NodeType) targetNode.getTypeDefinition(); + for (CapabilityTypeDef capability : targetType.getCapabilitiesObjects()) { // if(((ArrayList)hostedOnRel.get("valid_target_types")).contains(capability.getType())) { - if(capability.inheritsFrom((ArrayList)hostedOnRel.get("valid_target_types"))) { - if(_attributeExistsInType(targetType)) { + if (capability.inheritsFrom((ArrayList) hostedOnRel.get("valid_target_types"))) { + if (_attributeExistsInType(targetType)) { return targetNode; } return _findHostContainingAttribute(targetName); @@ -182,110 +180,109 @@ public class GetAttribute extends Function { } return null; } - + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { - if(nodeTemplateName.equals(HOST)) { - // Currently this is the only way to tell whether the function - // is used within the outputs section of the TOSCA template. - if(context instanceof ArrayList) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE151", - "ValueError: \"get_attribute: [ HOST, ... ]\" is not allowed in \"outputs\" section of the TOSCA template")); - return null; - } - NodeTemplate nodeTpl = _findHostContainingAttribute(SELF); - if(nodeTpl == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE152", String.format( - "ValueError: \"get_attribute: [ HOST, ... ]\" was used in " + - "node template \"%s\" but \"%s\" was not found in " + - "the relationship chain",((NodeTemplate)context).getName(),HOSTED_ON))); - return null; - } - return nodeTpl; - } - if(nodeTemplateName.equals(TARGET)) { - if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE153", - "KeyError: \"TARGET\" keyword can only be used in context " + - " to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate)context).getTarget(); - } - if(nodeTemplateName.equals(SOURCE)) { - if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE154", - "KeyError: \"SOURCE\" keyword can only be used in context " + - " to \"Relationships\" source node")); - return null; - } - return ((RelationshipTemplate)context).getTarget(); - } - String name; - if(nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { - name = ((NodeTemplate)context).getName(); - } - else { - name = nodeTemplateName; - } - for(NodeTemplate nt: toscaTpl.getNodeTemplates()) { - if(nt.getName().equals(name)) { - return nt; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE155", String.format( - "KeyError: Node template \"%s\" was not found",nodeTemplateName))); - return null; - } - - public AttributeDef _findReqOrCapAttribute(String reqOrCap,String attrName) { - - NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); - // Find attribute in node template's requirements - for(RequirementAssignment r: nodeTpl.getRequirements().getAll()) { + if (nodeTemplateName.equals(HOST)) { + // Currently this is the only way to tell whether the function + // is used within the outputs section of the TOSCA template. + if (context instanceof ArrayList) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE151", + "ValueError: \"get_attribute: [ HOST, ... ]\" is not allowed in \"outputs\" section of the TOSCA template")); + return null; + } + NodeTemplate nodeTpl = _findHostContainingAttribute(SELF); + if (nodeTpl == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE152", String.format( + "ValueError: \"get_attribute: [ HOST, ... ]\" was used in " + + "node template \"%s\" but \"%s\" was not found in " + + "the relationship chain", ((NodeTemplate) context).getName(), HOSTED_ON))); + return null; + } + return nodeTpl; + } + if (nodeTemplateName.equals(TARGET)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE153", + "KeyError: \"TARGET\" keyword can only be used in context " + + " to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + if (nodeTemplateName.equals(SOURCE)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE154", + "KeyError: \"SOURCE\" keyword can only be used in context " + + " to \"Relationships\" source node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + String name; + if (nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { + name = ((NodeTemplate) context).getName(); + } else { + name = nodeTemplateName; + } + for (NodeTemplate nt : toscaTpl.getNodeTemplates()) { + if (nt.getName().equals(name)) { + return nt; + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE155", String.format( + "KeyError: Node template \"%s\" was not found", nodeTemplateName))); + return null; + } + + public AttributeDef _findReqOrCapAttribute(String reqOrCap, String attrName) { + + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + // Find attribute in node template's requirements + for (RequirementAssignment r : nodeTpl.getRequirements().getAll()) { String nodeName = r.getNodeTemplateName(); - if(r.getName().equals(reqOrCap)) { + if (r.getName().equals(reqOrCap)) { NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); - return _getCapabilityAttribute(nodeTemplate,r.getName(),attrName); + return _getCapabilityAttribute(nodeTemplate, r.getName(), attrName); } } - // If requirement was not found, look in node template's capabilities - return _getCapabilityAttribute(nodeTpl,reqOrCap,attrName); + // If requirement was not found, look in node template's capabilities + return _getCapabilityAttribute(nodeTpl, reqOrCap, attrName); } private AttributeDef _getCapabilityAttribute(NodeTemplate nodeTemplate, - String capabilityName, - String attrName) { - // Gets a node template capability attribute + String capabilityName, + String attrName) { + // Gets a node template capability attribute CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); - if(cap != null) { - AttributeDef attribute = null; - LinkedHashMap attrs = - cap.getDefinition().getAttributesDef(); - if(attrs != null && attrs.keySet().contains(attrName)) { - attribute = attrs.get(attrName); - } - if(attribute == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE156", String.format( - "KeyError: Attribute \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - attrName,capabilityName,nodeTemplate.getName(),((NodeTemplate)context).getName()))); - } - return attribute; - } - String msg = String.format( - "Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", - capabilityName,((NodeTemplate)context).getName(),nodeTemplate.getName()); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE157", "KeyError: " + msg)); - return null; + if (cap != null) { + AttributeDef attribute = null; + LinkedHashMap attrs = + cap.getDefinition().getAttributesDef(); + if (attrs != null && attrs.keySet().contains(attrName)) { + attribute = attrs.get(attrName); + } + if (attribute == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE156", String.format( + "KeyError: Attribute \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + attrName, capabilityName, nodeTemplate.getName(), ((NodeTemplate) context).getName()))); + } + return attribute; + } + String msg = String.format( + "Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", + capabilityName, ((NodeTemplate) context).getName(), nodeTemplate.getName()); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE157", "KeyError: " + msg)); + return null; } String getNodeTemplateName() { - return (String)args.get(0); + return (String) args.get(0); } String getAttributeName() { - return (String)args.get(1); + return (String) args.get(1); } } diff --git a/version.properties b/version.properties index 2c02041..f7d411d 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=1 -patch=13 +patch=16 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From ea45979501b62b3c1e9e02edfb7ded6779e85018 Mon Sep 17 00:00:00 2001 From: Pavel Aharoni Date: Tue, 21 Nov 2017 17:26:49 +0200 Subject: [SDC-697] Adding snapshots repo to pom.xml Change-Id: Iefb98c060f607bd74541ff2744ab92a145514e62 Signed-off-by: Pavel Aharoni --- pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pom.xml b/pom.xml index e3f8db6..b7172c6 100644 --- a/pom.xml +++ b/pom.xml @@ -185,6 +185,11 @@ Staging Repository ${nexus.proxy}/content/repositories/staging/ + + ecomp-snapshots + Snapshots Repository + ${nexus.proxy}/content/repositories/snapshots/ + -- cgit 1.2.3-korg From d5e341bc22977beac2cc2c9d4b9368245cefda3a Mon Sep 17 00:00:00 2001 From: Yuli Shlosberg Date: Tue, 28 Nov 2017 15:55:44 +0200 Subject: add tests Change-Id: Ifa5f6891a06d6bf5ae82d0dd73ee01aa60967afe Issue-ID: SDC-695 Signed-off-by: Yuli Shlosberg --- pom.xml | 15 +- .../sdc/toscaparser/api/CapabilityAssignment.java | 8 +- .../sdc/toscaparser/api/DataEntity.java.orig | 453 ----------- .../sdc/toscaparser/api/EntityTemplate.java | 2 +- .../sdc/toscaparser/api/TopologyTemplate.java.orig | 859 --------------------- .../sdc/toscaparser/api/ToscaTemplate.java | 5 +- .../sdc/toscaparser/api/elements/NodeType.java | 15 +- .../toscaparser/api/elements/TypeValidation.java | 1 + .../JToscaMetadataParse.java | 26 - .../sdc/toscaparser/api/JToscaMetadataParse.java | 41 + .../csars/tmpCSAR_Huawei_vSPGW_fixed.csar.csar | Bin 0 -> 44576 bytes version.properties | 2 +- 12 files changed, 75 insertions(+), 1352 deletions(-) delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java.orig delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java.orig delete mode 100644 src/test/java/org.openecomp.sdc.toscaparser/JToscaMetadataParse.java create mode 100644 src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java create mode 100644 src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar.csar diff --git a/pom.xml b/pom.xml index b7172c6..c277b0c 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.openecomp.sdc.jtosca jtosca - 1.1.16-SNAPSHOT + 1.1.19-SNAPSHOT sdc-jtosca @@ -160,7 +160,18 @@ maven-javadoc-plugin 2.10.3 - + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.19.1 + + + */* + + + org.sonarsource.scanner.maven sonar-maven-plugin diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignment.java b/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignment.java index 0eaa099..f3bc2bd 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignment.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignment.java @@ -12,13 +12,15 @@ public class CapabilityAssignment { private String name; private LinkedHashMap _properties; private CapabilityTypeDef _definition; + private LinkedHashMap _customDef; public CapabilityAssignment(String cname, - LinkedHashMap cproperties, - CapabilityTypeDef cdefinition) { + LinkedHashMap cproperties, + CapabilityTypeDef cdefinition, LinkedHashMap customDef) { name = cname; _properties = cproperties; _definition = cdefinition; + _customDef = customDef; } /** @@ -38,7 +40,7 @@ public class CapabilityAssignment { if(propsDef != null) { PropertyDef pd = (PropertyDef)propsDef.get(pname); if(pd != null) { - properties.add(new Property(pname,pvalue,pd.getSchema(),null)); + properties.add(new Property(pname,pvalue,pd.getSchema(), _customDef)); } } } diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java.orig b/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java.orig deleted file mode 100644 index 2c6d923..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java.orig +++ /dev/null @@ -1,453 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.List; - -import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector; -import org.openecomp.sdc.toscaparser.api.elements.*; -import org.openecomp.sdc.toscaparser.api.elements.constraints.Constraint; -import org.openecomp.sdc.toscaparser.api.elements.constraints.Schema; -import org.openecomp.sdc.toscaparser.api.functions.Function; -import org.openecomp.sdc.toscaparser.api.utils.TOSCAVersionProperty; -import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; - -public class DataEntity { - // A complex data value entity - - private LinkedHashMap customDef; - private DataType dataType; - private LinkedHashMap schema; - private Object value; - private String propertyName; - - public DataEntity(String _dataTypeName,Object _valueDict, - LinkedHashMap _customDef,String _propName) { - - customDef = _customDef; - dataType = new DataType(_dataTypeName,_customDef); - schema = dataType.getAllProperties(); - value = _valueDict; - propertyName = _propName; - } - - @SuppressWarnings("unchecked") - public Object validate() { - // Validate the value by the definition of the datatype - - // A datatype can not have both 'type' and 'properties' definitions. - // If the datatype has 'type' definition - if(dataType.getValueType() != null) { - value = DataEntity.validateDatatype(dataType.getValueType(),value,null,customDef,null); - Schema schemaCls = new Schema(propertyName,dataType.getDefs()); - for(Constraint constraint: schemaCls.getConstraints()) { - constraint.validate(value); - } - } - // If the datatype has 'properties' definition - else { - if(!(value instanceof LinkedHashMap)) { - //ERROR under investigation - ExceptionCollector.appendWarning(String.format( - "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"", - value.toString(),dataType.getType())); -<<<<<<< HEAD - return value; - } - LinkedHashMap valueDict = (LinkedHashMap)value; -======= - - if (value instanceof List) - value = ((List) value).get(0); - - if (!(value instanceof LinkedHashMap)) - return value; - } - - - - LinkedHashMap valueDict = (LinkedHashMap)value; ->>>>>>> master - ArrayList allowedProps = new ArrayList<>(); - ArrayList requiredProps = new ArrayList<>(); - LinkedHashMap defaultProps = new LinkedHashMap<>(); - if(schema != null) { - allowedProps.addAll(schema.keySet()); - for(String name: schema.keySet()) { - PropertyDef propDef = schema.get(name); - if(propDef.isRequired()) { - requiredProps.add(name); - } - if(propDef.getDefault() != null) { - defaultProps.put(name,propDef.getDefault()); - } - } - } - - // check allowed field - for(String valueKey: valueDict.keySet()) { - //1710 devlop JSON validation - if(!("json").equals(dataType.getType()) && !allowedProps.contains(valueKey)) { - ExceptionCollector.appendException(String.format( - "UnknownFieldError: Data value of type \"%s\" contains unknown field \"%s\"", - dataType.getType(),valueKey)); - } - } - - // check default field - for(String defKey: defaultProps.keySet()) { - Object defValue = defaultProps.get(defKey); - if(valueDict.get(defKey) == null) { - valueDict.put(defKey, defValue); - } - - } - - // check missing field - ArrayList missingProp = new ArrayList<>(); - for(String reqKey: requiredProps) { - if(!valueDict.keySet().contains(reqKey)) { - missingProp.add(reqKey); - } - } - if(missingProp.size() > 0) { - ExceptionCollector.appendWarning(String.format( - "MissingRequiredFieldError: Data value of type \"%s\" is missing required field(s) \"%s\"", - dataType.getType(),missingProp.toString())); - } - - // check every field - for(String vname: valueDict.keySet()) { - Object vvalue = valueDict.get(vname); - LinkedHashMap schemaName = _findSchema(vname); - if(schemaName == null) { - continue; - } - Schema propSchema = new Schema(vname,schemaName); - // check if field value meets type defined - DataEntity.validateDatatype(propSchema.getType(), - vvalue, - propSchema.getEntrySchema(), - customDef, - null); - - // check if field value meets constraints defined - if(propSchema.getConstraints() != null) { - for(Constraint constraint: propSchema.getConstraints()) { - if(vvalue instanceof ArrayList) { - for(Object val: (ArrayList)vvalue) { - constraint.validate(val); - } - } - else { - constraint.validate(vvalue); - } - } - } - } - } - return value; - } - - private LinkedHashMap _findSchema(String name) { - if(schema != null && schema.get(name) != null) { - return schema.get(name).getSchema(); - } - return null; - } - - public static Object validateDatatype(String type, - Object value, - LinkedHashMap entrySchema, - LinkedHashMap customDef, - String propName) { - // Validate value with given type - - // If type is list or map, validate its entry by entry_schema(if defined) - // If type is a user-defined complex datatype, custom_def is required. - - if(Function.isFunction(value)) { - return value; - } - else if (type == null) { - //NOT ANALYZED - ExceptionCollector.appendWarning(String.format( - "MissingType: Type is missing for value \"%s\"", - value.toString())); - return value; - } - else if(type.equals(Schema.STRING)) { - return ValidateUtils.validateString(value); - } - else if(type.equals(Schema.INTEGER)) { - return ValidateUtils.validateInteger(value); - } - else if(type.equals(Schema.FLOAT)) { - return ValidateUtils.validateFloat(value); - } - else if(type.equals(Schema.NUMBER)) { - return ValidateUtils.validateNumeric(value); - } - else if(type.equals(Schema.BOOLEAN)) { - return ValidateUtils.validateBoolean(value); - } - else if(type.equals(Schema.RANGE)) { - return ValidateUtils.validateRange(value); - } - else if(type.equals(Schema.TIMESTAMP)) { - ValidateUtils.validateTimestamp(value); - return value; - } - else if(type.equals(Schema.LIST)) { - ValidateUtils.validateList(value); - if(entrySchema != null) { - DataEntity.validateEntry(value,entrySchema,customDef); - } - return value; - } - else if(type.equals(Schema.SCALAR_UNIT_SIZE)) { - return (new ScalarUnitSize(value)).validateScalarUnit(); - } - else if(type.equals(Schema.SCALAR_UNIT_FREQUENCY)) { - return (new ScalarUnitFrequency(value)).validateScalarUnit(); - } - else if(type.equals(Schema.SCALAR_UNIT_TIME)) { - return (new ScalarUnitTime(value)).validateScalarUnit(); - } - else if(type.equals(Schema.VERSION)) { - return (new TOSCAVersionProperty(value)).getVersion(); - } - else if(type.equals(Schema.MAP)) { - ValidateUtils.validateMap(value); - if(entrySchema != null) { - DataEntity.validateEntry(value,entrySchema,customDef); - } - return value; - } - else if(type.equals(Schema.PORTSPEC)) { - // tODO(TBD) bug 1567063, validate source & target as PortDef type - // as complex types not just as integers - PortSpec.validateAdditionalReq(value,propName,customDef); - } - else { - DataEntity data = new DataEntity(type,value,customDef,null); - return data.validate(); - } - - return value; - } - - @SuppressWarnings("unchecked") - public static Object validateEntry(Object value, - LinkedHashMap entrySchema, - LinkedHashMap customDef) { - - // Validate entries for map and list - Schema schema = new Schema(null,entrySchema); - Object valueob = value; - ArrayList valueList = null; - if(valueob instanceof LinkedHashMap) { - valueList = new ArrayList(((LinkedHashMap)valueob).values()); - } - else if(valueob instanceof ArrayList) { - valueList = (ArrayList)valueob; - } - if(valueList != null) { - for(Object v: valueList) { - DataEntity.validateDatatype(schema.getType(),v,schema.getEntrySchema(),customDef,null); - if(schema.getConstraints() != null) { - for(Constraint constraint: schema.getConstraints()) { - constraint.validate(v); - } - } - } - } - return value; - } - - @Override - public String toString() { - return "DataEntity{" + - "customDef=" + customDef + - ", dataType=" + dataType + - ", schema=" + schema + - ", value=" + value + - ", propertyName='" + propertyName + '\'' + - '}'; - } -} - -/*python - -from toscaparser.common.exception import ExceptionCollector -from toscaparser.common.exception import MissingRequiredFieldError -from toscaparser.common.exception import TypeMismatchError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.elements.constraints import Schema -from toscaparser.elements.datatype import DataType -from toscaparser.elements.portspectype import PortSpec -from toscaparser.elements.scalarunit import ScalarUnit_Frequency -from toscaparser.elements.scalarunit import ScalarUnit_Size -from toscaparser.elements.scalarunit import ScalarUnit_Time -from toscaparser.utils.gettextutils import _ -from toscaparser.utils import validateutils - - -class DataEntity(object): - '''A complex data value entity.''' - - def __init__(self, datatypename, value_dict, custom_def=None, - prop_name=None): - self.custom_def = custom_def - self.datatype = DataType(datatypename, custom_def) - self.schema = self.datatype.get_all_properties() - self.value = value_dict - self.property_name = prop_name - - def validate(self): - '''Validate the value by the definition of the datatype.''' - - # A datatype can not have both 'type' and 'properties' definitions. - # If the datatype has 'type' definition - if self.datatype.value_type: - self.value = DataEntity.validate_datatype(self.datatype.value_type, - self.value, - None, - self.custom_def) - schema = Schema(self.property_name, self.datatype.defs) - for constraint in schema.constraints: - constraint.validate(self.value) - # If the datatype has 'properties' definition - else: - if not isinstance(self.value, dict): - ExceptionCollector.appendException( - TypeMismatchError(what=self.value, - type=self.datatype.type)) - allowed_props = [] - required_props = [] - default_props = {} - if self.schema: - allowed_props = self.schema.keys() - for name, prop_def in self.schema.items(): - if prop_def.required: - required_props.append(name) - if prop_def.default: - default_props[name] = prop_def.default - - # check allowed field - for value_key in list(self.value.keys()): - if value_key not in allowed_props: - ExceptionCollector.appendException( - UnknownFieldError(what=(_('Data value of type "%s"') - % self.datatype.type), - field=value_key)) - - # check default field - for def_key, def_value in list(default_props.items()): - if def_key not in list(self.value.keys()): - self.value[def_key] = def_value - - # check missing field - missingprop = [] - for req_key in required_props: - if req_key not in list(self.value.keys()): - missingprop.append(req_key) - if missingprop: - ExceptionCollector.appendException( - MissingRequiredFieldError( - what=(_('Data value of type "%s"') - % self.datatype.type), required=missingprop)) - - # check every field - for name, value in list(self.value.items()): - schema_name = self._find_schema(name) - if not schema_name: - continue - prop_schema = Schema(name, schema_name) - # check if field value meets type defined - DataEntity.validate_datatype(prop_schema.type, value, - prop_schema.entry_schema, - self.custom_def) - # check if field value meets constraints defined - if prop_schema.constraints: - for constraint in prop_schema.constraints: - if isinstance(value, list): - for val in value: - constraint.validate(val) - else: - constraint.validate(value) - - return self.value - - def _find_schema(self, name): - if self.schema and name in self.schema.keys(): - return self.schema[name].schema - - @staticmethod - def validate_datatype(type, value, entry_schema=None, custom_def=None, - prop_name=None): - '''Validate value with given type. - - If type is list or map, validate its entry by entry_schema(if defined) - If type is a user-defined complex datatype, custom_def is required. - ''' - from toscaparser.functions import is_function - if is_function(value): - return value - if type == Schema.STRING: - return validateutils.validate_string(value) - elif type == Schema.INTEGER: - return validateutils.validate_integer(value) - elif type == Schema.FLOAT: - return validateutils.validate_float(value) - elif type == Schema.NUMBER: - return validateutils.validate_numeric(value) - elif type == Schema.BOOLEAN: - return validateutils.validate_boolean(value) - elif type == Schema.RANGE: - return validateutils.validate_range(value) - elif type == Schema.TIMESTAMP: - validateutils.validate_timestamp(value) - return value - elif type == Schema.LIST: - validateutils.validate_list(value) - if entry_schema: - DataEntity.validate_entry(value, entry_schema, custom_def) - return value - elif type == Schema.SCALAR_UNIT_SIZE: - return ScalarUnit_Size(value).validate_scalar_unit() - elif type == Schema.SCALAR_UNIT_FREQUENCY: - return ScalarUnit_Frequency(value).validate_scalar_unit() - elif type == Schema.SCALAR_UNIT_TIME: - return ScalarUnit_Time(value).validate_scalar_unit() - elif type == Schema.VERSION: - return validateutils.TOSCAVersionProperty(value).get_version() - elif type == Schema.MAP: - validateutils.validate_map(value) - if entry_schema: - DataEntity.validate_entry(value, entry_schema, custom_def) - return value - elif type == Schema.PORTSPEC: - # tODO(TBD) bug 1567063, validate source & target as PortDef type - # as complex types not just as integers - PortSpec.validate_additional_req(value, prop_name, custom_def) - else: - data = DataEntity(type, value, custom_def) - return data.validate() - - @staticmethod - def validate_entry(value, entry_schema, custom_def=None): - '''Validate entries for map and list.''' - schema = Schema(None, entry_schema) - valuelist = value - if isinstance(value, dict): - valuelist = list(value.values()) - for v in valuelist: - DataEntity.validate_datatype(schema.type, v, schema.entry_schema, - custom_def) - if schema.constraints: - for constraint in schema.constraints: - constraint.validate(v) - return value -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java index 32de069..ed19d88 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java @@ -279,7 +279,7 @@ public abstract class EntityTemplate { if(pp != null) { properties.putAll(pp); } - CapabilityAssignment cap = new CapabilityAssignment(name, properties, c); + CapabilityAssignment cap = new CapabilityAssignment(name, properties, c, customDef); capability.add(cap); } } diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java.orig b/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java.orig deleted file mode 100644 index 7e5f4af..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java.orig +++ /dev/null @@ -1,859 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import org.openecomp.sdc.toscaparser.api.common.JToscaError; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector; -import org.openecomp.sdc.toscaparser.api.elements.InterfacesDef; -import org.openecomp.sdc.toscaparser.api.elements.NodeType; -import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; -import org.openecomp.sdc.toscaparser.api.functions.Function; -import org.openecomp.sdc.toscaparser.api.functions.GetAttribute; -import org.openecomp.sdc.toscaparser.api.functions.GetInput; -import org.openecomp.sdc.toscaparser.api.parameters.Input; -import org.openecomp.sdc.toscaparser.api.parameters.Output; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class TopologyTemplate { - - private static final String DESCRIPTION = "description"; - private static final String INPUTS = "inputs"; - private static final String NODE_TEMPLATES = "node_templates"; - private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; - private static final String OUTPUTS = "outputs"; - private static final String GROUPS = "groups"; - private static final String SUBSTITUTION_MAPPINGS = "substitution_mappings"; - private static final String POLICIES = "policies"; - private static final String METADATA = "metadata"; - - private static String SECTIONS[] = { - DESCRIPTION, INPUTS, NODE_TEMPLATES, RELATIONSHIP_TEMPLATES, - OUTPUTS, GROUPS, SUBSTITUTION_MAPPINGS, POLICIES, METADATA - }; - - private LinkedHashMap tpl; - LinkedHashMap metaData; - private ArrayList inputs; - private ArrayList outputs; - private ArrayList relationshipTemplates; - private ArrayList nodeTemplates; - private LinkedHashMap customDefs; - private LinkedHashMap relTypes;//TYPE - private NodeTemplate subMappedNodeTemplate; - private ArrayList groups; - private ArrayList policies; - private LinkedHashMap parsedParams = null;//TYPE - private String description; - private ToscaGraph graph; - private SubstitutionMappings substitutionMappings; - - public TopologyTemplate( - LinkedHashMap _template, - LinkedHashMap _customDefs, - LinkedHashMap _relTypes,//TYPE - LinkedHashMap _parsedParams, - NodeTemplate _subMappedNodeTemplate) { - - tpl = _template; - if(tpl != null) { - subMappedNodeTemplate = _subMappedNodeTemplate; - metaData = _metaData(); - customDefs = _customDefs; - relTypes = _relTypes; - parsedParams = _parsedParams; - _validateField(); - description = _tplDescription(); - inputs = _inputs(); - relationshipTemplates =_relationshipTemplates(); - nodeTemplates = _nodeTemplates(); - outputs = _outputs(); - if(nodeTemplates != null) { - graph = new ToscaGraph(nodeTemplates); - } - groups = _groups(); - policies = _policies(); - _processIntrinsicFunctions(); - substitutionMappings = _substitutionMappings(); - } - } - - @SuppressWarnings("unchecked") - private ArrayList _inputs() { - //DumpUtils.dumpYaml(customDefs,0); - ArrayList alInputs = new ArrayList<>(); - for(String name: _tplInputs().keySet()) { - Object attrs = _tplInputs().get(name); - Input input = new Input(name,(LinkedHashMap)attrs,customDefs); - if(parsedParams != null && parsedParams.get(name) != null) { - input.validate(parsedParams.get(name)); - } - else { - Object _default = input.getDefault(); - if(_default != null) { - input.validate(_default); - } - } - if((parsedParams != null && parsedParams.get(input.getName()) == null || parsedParams == null) - && input.isRequired() && input.getDefault() == null) { - System.out.format("Log warning: The required parameter \"%s\" is not provided\n",input.getName()); - } - alInputs.add(input); - } - return alInputs; - - } - - private LinkedHashMap _metaData() { - if(tpl.get(METADATA) != null) { - return (LinkedHashMap)tpl.get(METADATA); - } - else { - return new LinkedHashMap(); - } - - } - - private ArrayList _nodeTemplates() { - ArrayList alNodeTemplates = new ArrayList<>(); - LinkedHashMap tpls = _tplNodeTemplates(); - if(tpls != null) { - for(String name: tpls.keySet()) { - NodeTemplate tpl = new NodeTemplate(name, - tpls, - customDefs, - relationshipTemplates, - relTypes); - if(tpl.getTypeDefinition() != null) { - boolean b = NodeType.TOSCA_DEF.get(tpl.getType()) != null; - if(b || (tpl.getCustomDef() != null && !tpl.getCustomDef().isEmpty())) { - tpl.validate(); - alNodeTemplates.add(tpl); - } - } - } - } - return alNodeTemplates; - } - - @SuppressWarnings("unchecked") - private ArrayList _relationshipTemplates() { - ArrayList alRelationshipTemplates = new ArrayList<>(); - LinkedHashMap tpls = _tplRelationshipTemplates(); - if(tpls != null) { - for(String name: tpls.keySet()) { - RelationshipTemplate tpl = new RelationshipTemplate( - (LinkedHashMap)tpls.get(name),name,customDefs,null,null); - - alRelationshipTemplates.add(tpl); - } - } - return alRelationshipTemplates; - } - - private ArrayList _outputs() { - ArrayList alOutputs = new ArrayList<>(); - for(Map.Entry me: _tplOutputs().entrySet()) { - String oname = me.getKey(); - LinkedHashMap oattrs = (LinkedHashMap)me.getValue(); - Output o = new Output(oname,oattrs); - o.validate(); - alOutputs.add(o); - } - return alOutputs; - } - - private SubstitutionMappings _substitutionMappings() { - LinkedHashMap tplSubstitutionMapping = (LinkedHashMap) _tplSubstitutionMappings(); - - //*** the commenting-out below and the weaker condition are in the Python source - // #if tpl_substitution_mapping and self.sub_mapped_node_template: - if(tplSubstitutionMapping != null && tplSubstitutionMapping.size() > 0) { - return new SubstitutionMappings(tplSubstitutionMapping, - nodeTemplates, - inputs, - outputs, - groups, - subMappedNodeTemplate, - customDefs); - } - return null; - - } - - @SuppressWarnings("unchecked") - private ArrayList _policies() { - ArrayList alPolicies = new ArrayList<>(); - for(Object po: _tplPolicies()) { - LinkedHashMap policy = (LinkedHashMap)po; - for(Map.Entry me: policy.entrySet()) { - String policyName = me.getKey(); - LinkedHashMap policyTpl = (LinkedHashMap)me.getValue(); - ArrayList targetList = (ArrayList)policyTpl.get("targets"); - //ArrayList targetObjects = new ArrayList<>(); - ArrayList targetNodes = new ArrayList<>(); - ArrayList targetObjects = new ArrayList<>(); - ArrayList targetGroups = new ArrayList<>(); - String targetsType = "groups"; - if(targetList != null && targetList.size() >= 1) { - targetGroups = _getPolicyGroups(targetList); - if(targetGroups == null) { - targetsType = "node_templates"; - targetNodes = _getGroupMembers(targetList); - for(NodeTemplate nt: targetNodes) { - targetObjects.add(nt); - } - } - else { - for(Group gr: targetGroups) { - targetObjects.add(gr); - } - } - } - Policy policyObj = new Policy(policyName, - policyTpl, - targetObjects, - targetsType, - customDefs); - alPolicies.add(policyObj); - } - } - return alPolicies; - } - - private ArrayList _groups() { - ArrayList groups = new ArrayList<>(); - ArrayList memberNodes = null; - for(Map.Entry me: _tplGroups().entrySet()) { - String groupName = me.getKey(); - LinkedHashMap groupTpl = (LinkedHashMap)me.getValue(); - ArrayList memberNames = (ArrayList)groupTpl.get("members"); - if(memberNames != null) { - DataEntity.validateDatatype("list", memberNames,null,null,null); - if(memberNames.size() < 1 || - (new HashSet(memberNames)).size() != memberNames.size()) { - ThreadLocalsHolder.getCollector().appendError(new JToscaError("JE241", String.format( - "InvalidGroupTargetException: Member nodes \"%s\" should be >= 1 and not repeated", - memberNames.toString()))); - } - else { - memberNodes = _getGroupMembers(memberNames); - } - } - Group group = new Group(groupName, - groupTpl, - memberNodes, - customDefs); - groups.add(group); - } - return groups; - } - - private ArrayList _getGroupMembers(ArrayList memberNames) { - ArrayList memberNodes = new ArrayList<>(); - _validateGroupMembers(memberNames); - for(String member: memberNames) { - for(NodeTemplate node: nodeTemplates) { - if(member.equals(node.getName())) { - memberNodes.add(node); - } - } - } - return memberNodes; - } - - private ArrayList _getPolicyGroups(ArrayList memberNames) { - ArrayList memberGroups = new ArrayList<>(); - for(String member: memberNames) { - for(Group group: groups) { - if(member.equals(group.getName())) { - memberGroups.add(group); - } - } - } - return memberGroups; - } - - private void _validateGroupMembers(ArrayList members) { - ArrayList nodeNames = new ArrayList<>(); - for(NodeTemplate node: nodeTemplates) { - nodeNames.add(node.getName()); - } - for(String member: members) { - if(!nodeNames.contains(member)) { - ThreadLocalsHolder.getCollector().appendError(new JToscaError("JE242", String.format( - "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"",member))); - } - } - } - - // topology template can act like node template - // it is exposed by substitution_mappings. - - public String nodetype() { - return substitutionMappings.getNodeType(); - } - - public LinkedHashMap capabilities() { - return substitutionMappings.getCapabilities(); - } - - public LinkedHashMap requirements() { - return substitutionMappings.getRequirements(); - } - - private String _tplDescription() { - return (String)tpl.get(DESCRIPTION); - //if description: - // return description.rstrip() - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplInputs() { - if(tpl.get(INPUTS) != null) { - return (LinkedHashMap)tpl.get(INPUTS); - } - else { - return new LinkedHashMap(); - } - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplNodeTemplates() { - return (LinkedHashMap)tpl.get(NODE_TEMPLATES); - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplRelationshipTemplates() { - if(tpl.get(RELATIONSHIP_TEMPLATES) != null) { - return (LinkedHashMap)tpl.get(RELATIONSHIP_TEMPLATES); - } - else { - return new LinkedHashMap(); - } - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplOutputs() { - if(tpl.get(OUTPUTS) != null) { - return (LinkedHashMap)tpl.get(OUTPUTS); - } - else { - return new LinkedHashMap(); - } - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplSubstitutionMappings() { - if(tpl.get(SUBSTITUTION_MAPPINGS) != null) { - return (LinkedHashMap)tpl.get(SUBSTITUTION_MAPPINGS); - } - else { - return new LinkedHashMap(); - } - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplGroups() { - if(tpl.get(GROUPS) != null) { - return (LinkedHashMap)tpl.get(GROUPS); - } - else { - return new LinkedHashMap(); - } - } - - @SuppressWarnings("unchecked") - private ArrayList _tplPolicies() { - if(tpl.get(POLICIES) != null) { - return (ArrayList)tpl.get(POLICIES); - } - else { - return new ArrayList(); - } - } - - private void _validateField() { - for(String name: tpl.keySet()) { - boolean bFound = false; - for(String section: SECTIONS) { - if(name.equals(section)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendError(new JToscaError("JE243", String.format( - "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"",name))); - } - } - } - - @SuppressWarnings("unchecked") - private void _processIntrinsicFunctions() { - // Process intrinsic functions - - // Current implementation processes functions within node template - // properties, requirements, interfaces inputs and template outputs. - - if(nodeTemplates != null) { - for(NodeTemplate nt: nodeTemplates) { - for(Property prop: nt.getPropertiesObjects()) { - prop.setValue(Function.getFunction(this,nt,prop.getValue())); - } - for(InterfacesDef ifd: nt.getInterfaces()) { - LinkedHashMap ifin = ifd.getInputs(); - if(ifin != null) { - for(Map.Entry me: ifin.entrySet()) { - String name = me.getKey(); - Object value = Function.getFunction(this,nt,me.getValue()); - ifd.setInput(name,value); - } - } - } - if(nt.getRequirements() != null && - nt.getRequirements() instanceof ArrayList) { - for(Object oreq: nt.getRequirements()) { - LinkedHashMap req = (LinkedHashMap)oreq; - LinkedHashMap rel = req; - for(String reqName: req.keySet()) { - Object reqItem = req.get(reqName); - if(reqItem instanceof LinkedHashMap) { - Object t = ((LinkedHashMap)reqItem).get("relationship"); - // it can be a string or a LHM... - if(t instanceof LinkedHashMap) { - rel = (LinkedHashMap)t; - } - else { - // we set it to null to fail the next test - // and avoid the get("proprties") - rel = null; - } - break; - } - } - if(rel != null && rel.get("properties") != null) { - LinkedHashMap relprops = - (LinkedHashMap)rel.get("properties"); - for(String key: relprops.keySet()) { - Object value = relprops.get(key); - Object func = Function.getFunction(this,req,value); - relprops.put(key,func); - } - } - } - } - if(nt.getCapabilitiesObjects() != null) { - for(Capability cap: nt.getCapabilitiesObjects()) { - if(cap.getPropertiesObjects() != null) { - for(Property prop: cap.getPropertiesObjects()) { - Object propvalue = Function.getFunction(this,nt,prop.getValue()); - if(propvalue instanceof GetInput) { - propvalue = ((GetInput)propvalue).result(); - for(String p: cap.getProperties().keySet()) { - //Object v = cap.getProperties().get(p); - if(p.equals(prop.getName())) { - cap.setProperty(p,propvalue); - } - } - } - } - } - } - } - for(RelationshipType rel: nt.getRelationships().keySet()) { - NodeTemplate node = nt.getRelationships().get(rel); - ArrayList relTpls = node.getRelationshipTemplate(); - if(relTpls != null) { - for(RelationshipTemplate relTpl: relTpls) { - // TT 5 - for(InterfacesDef iface: relTpl.getInterfaces()) { - if(iface.getInputs() != null) { - for(String name: iface.getInputs().keySet()) { - Object value = iface.getInputs().get(name); - Object func = Function.getFunction( - this, - relTpl, - value); - iface.setInput(name,func); - } - } - } - } - } - } - } - } - for(Output output: outputs) { - Object func = Function.getFunction(this,outputs,output.getValue()); - if(func instanceof GetAttribute) { - output.setAttr(Output.VALUE,func); - } - } - } - - public static String getSubMappingNodeType(LinkedHashMap topologyTpl) { - if(topologyTpl != null && topologyTpl instanceof LinkedHashMap) { - Object submapTpl = topologyTpl.get(SUBSTITUTION_MAPPINGS); - return SubstitutionMappings.stGetNodeType((LinkedHashMap)submapTpl); - } - return null; - } - - // getters - - public LinkedHashMap getTpl() { - return tpl; - } - - public LinkedHashMap getMetadata() { - return metaData; - } - - public ArrayList getInputs() { - return inputs; - } - - public ArrayList getOutputs() { - return outputs; - } - - public ArrayList getPolicies() { - return policies; - } - - public ArrayList getRelationshipTemplates() { - return relationshipTemplates; - } - - public ArrayList getNodeTemplates() { - return nodeTemplates; - } - - public ArrayList getGroups() { - return groups; - } - - public SubstitutionMappings getSubstitutionMappings() { - return substitutionMappings; - } - - public LinkedHashMap getParsedParams() { - return parsedParams; - } -} - -/*python - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import logging - -from toscaparser.common import exception -from toscaparser.dataentity import DataEntity -from toscaparser import functions -from toscaparser.groups import Group -from toscaparser.nodetemplate import NodeTemplate -from toscaparser.parameters import Input -from toscaparser.parameters import Output -from toscaparser.policy import Policy -from toscaparser.relationship_template import RelationshipTemplate -from toscaparser.substitution_mappings import SubstitutionMappings -from toscaparser.tpl_relationship_graph import ToscaGraph -from toscaparser.utils.gettextutils import _ - - -# Topology template key names -SECTIONS = (DESCRIPTION, INPUTS, NODE_TEMPLATES, - RELATIONSHIP_TEMPLATES, OUTPUTS, GROUPS, - SUBSTITUION_MAPPINGS, POLICIES) = \ - ('description', 'inputs', 'node_templates', - 'relationship_templates', 'outputs', 'groups', - 'substitution_mappings', 'policies') - -log = logging.getLogger("tosca.model") - - -class TopologyTemplate(object): - - '''Load the template data.''' - def __init__(self, template, custom_defs, - rel_types=None, parsed_params=None, - sub_mapped_node_template=None): - self.tpl = template - self.sub_mapped_node_template = sub_mapped_node_template - if self.tpl: - self.custom_defs = custom_defs - self.rel_types = rel_types - self.parsed_params = parsed_params - self._validate_field() - self.description = self._tpl_description() - self.inputs = self._inputs() - self.relationship_templates = self._relationship_templates() - self.nodetemplates = self._nodetemplates() - self.outputs = self._outputs() - if hasattr(self, 'nodetemplates'): - self.graph = ToscaGraph(self.nodetemplates) - self.groups = self._groups() - self.policies = self._policies() - self._process_intrinsic_functions() - self.substitution_mappings = self._substitution_mappings() - - def _inputs(self): - inputs = [] - for name, attrs in self._tpl_inputs().items(): - input = Input(name, attrs) - if self.parsed_params and name in self.parsed_params: - input.validate(self.parsed_params[name]) - else: - default = input.default - if default: - input.validate(default) - if (self.parsed_params and input.name not in self.parsed_params - or self.parsed_params is None) and input.required \ - and input.default is None: - log.warning(_('The required parameter %s ' - 'is not provided') % input.name) - - inputs.append(input) - return inputs - - def _nodetemplates(self): - nodetemplates = [] - tpls = self._tpl_nodetemplates() - if tpls: - for name in tpls: - tpl = NodeTemplate(name, tpls, self.custom_defs, - self.relationship_templates, - self.rel_types) - if (tpl.type_definition and - (tpl.type in tpl.type_definition.TOSCA_DEF or - (tpl.type not in tpl.type_definition.TOSCA_DEF and - bool(tpl.custom_def)))): - tpl.validate(self) - nodetemplates.append(tpl) - return nodetemplates - - def _relationship_templates(self): - rel_templates = [] - tpls = self._tpl_relationship_templates() - for name in tpls: - tpl = RelationshipTemplate(tpls[name], name, self.custom_defs) - rel_templates.append(tpl) - return rel_templates - - def _outputs(self): - outputs = [] - for name, attrs in self._tpl_outputs().items(): - output = Output(name, attrs) - output.validate() - outputs.append(output) - return outputs - - def _substitution_mappings(self): - tpl_substitution_mapping = self._tpl_substitution_mappings() - # if tpl_substitution_mapping and self.sub_mapped_node_template: - if tpl_substitution_mapping: - return SubstitutionMappings(tpl_substitution_mapping, - self.nodetemplates, - self.inputs, - self.outputs, - self.sub_mapped_node_template, - self.custom_defs) - - def _policies(self): - policies = [] - for policy in self._tpl_policies(): - for policy_name, policy_tpl in policy.items(): - target_list = policy_tpl.get('targets') - if target_list and len(target_list) >= 1: - target_objects = [] - targets_type = "groups" - target_objects = self._get_policy_groups(target_list) - if not target_objects: - targets_type = "node_templates" - target_objects = self._get_group_members(target_list) - policyObj = Policy(policy_name, policy_tpl, - target_objects, targets_type, - self.custom_defs) - policies.append(policyObj) - return policies - - def _groups(self): - groups = [] - member_nodes = None - for group_name, group_tpl in self._tpl_groups().items(): - member_names = group_tpl.get('members') - if member_names is not None: - DataEntity.validate_datatype('list', member_names) - if len(member_names) < 1 or \ - len(member_names) != len(set(member_names)): - exception.ExceptionCollector.appendException( - exception.InvalidGroupTargetException( - message=_('Member nodes "%s" should be >= 1 ' - 'and not repeated') % member_names)) - else: - member_nodes = self._get_group_members(member_names) - group = Group(group_name, group_tpl, - member_nodes, - self.custom_defs) - groups.append(group) - return groups - - def _get_group_members(self, member_names): - member_nodes = [] - self._validate_group_members(member_names) - for member in member_names: - for node in self.nodetemplates: - if node.name == member: - member_nodes.append(node) - return member_nodes - - def _get_policy_groups(self, member_names): - member_groups = [] - for member in member_names: - for group in self.groups: - if group.name == member: - member_groups.append(group) - return member_groups - - def _validate_group_members(self, members): - node_names = [] - for node in self.nodetemplates: - node_names.append(node.name) - for member in members: - if member not in node_names: - exception.ExceptionCollector.appendException( - exception.InvalidGroupTargetException( - message=_('Target member "%s" is not found in ' - 'node_templates') % member)) - - # topology template can act like node template - # it is exposed by substitution_mappings. - def nodetype(self): - return self.substitution_mappings.node_type \ - if self.substitution_mappings else None - - def capabilities(self): - return self.substitution_mappings.capabilities \ - if self.substitution_mappings else None - - def requirements(self): - return self.substitution_mappings.requirements \ - if self.substitution_mappings else None - - def _tpl_description(self): - description = self.tpl.get(DESCRIPTION) - if description: - return description.rstrip() - - def _tpl_inputs(self): - return self.tpl.get(INPUTS) or {} - - def _tpl_nodetemplates(self): - return self.tpl.get(NODE_TEMPLATES) - - def _tpl_relationship_templates(self): - return self.tpl.get(RELATIONSHIP_TEMPLATES) or {} - - def _tpl_outputs(self): - return self.tpl.get(OUTPUTS) or {} - - def _tpl_substitution_mappings(self): - return self.tpl.get(SUBSTITUION_MAPPINGS) or {} - - def _tpl_groups(self): - return self.tpl.get(GROUPS) or {} - - def _tpl_policies(self): - return self.tpl.get(POLICIES) or {} - - def _validate_field(self): - for name in self.tpl: - if name not in SECTIONS: - exception.ExceptionCollector.appendException( - exception.UnknownFieldError(what='Template', field=name)) - - def _process_intrinsic_functions(self): - """Process intrinsic functions - - Current implementation processes functions within node template - properties, requirements, interfaces inputs and template outputs. - """ - if hasattr(self, 'nodetemplates'): - for node_template in self.nodetemplates: - for prop in node_template.get_properties_objects(): - prop.value = functions.get_function(self, - node_template, - prop.value) - for interface in node_template.interfaces: - if interface.inputs: - for name, value in interface.inputs.items(): - interface.inputs[name] = functions.get_function( - self, - node_template, - value) - if node_template.requirements and \ - isinstance(node_template.requirements, list): - for req in node_template.requirements: - rel = req - for req_name, req_item in req.items(): - if isinstance(req_item, dict): - rel = req_item.get('relationship') - break - if rel and 'properties' in rel: - for key, value in rel['properties'].items(): - rel['properties'][key] = \ - functions.get_function(self, - req, - value) - if node_template.get_capabilities_objects(): - for cap in node_template.get_capabilities_objects(): - if cap.get_properties_objects(): - for prop in cap.get_properties_objects(): - propvalue = functions.get_function( - self, - node_template, - prop.value) - if isinstance(propvalue, functions.GetInput): - propvalue = propvalue.result() - for p, v in cap._properties.items(): - if p == prop.name: - cap._properties[p] = propvalue - for rel, node in node_template.relationships.items(): - rel_tpls = node.relationship_tpl - if rel_tpls: - for rel_tpl in rel_tpls: - for interface in rel_tpl.interfaces: - if interface.inputs: - for name, value in \ - interface.inputs.items(): - interface.inputs[name] = \ - functions.get_function(self, - rel_tpl, - value) - for output in self.outputs: - func = functions.get_function(self, self.outputs, output.value) - if isinstance(func, functions.GetAttribute): - output.attrs[output.VALUE] = func - - @classmethod - def get_sub_mapping_node_type(cls, topology_tpl): - if topology_tpl and isinstance(topology_tpl, dict): - submap_tpl = topology_tpl.get(SUBSTITUION_MAPPINGS) - return SubstitutionMappings.get_node_type(submap_tpl) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java index f5902c4..4c6ba3a 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java @@ -116,11 +116,13 @@ public class ToscaTemplate extends Object { VALID_TEMPLATE_VERSIONS = new ArrayList<>(); VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_0"); + VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_1"); VALID_TEMPLATE_VERSIONS.addAll(exttools.getVersions()); ADDITIONAL_SECTIONS = new LinkedHashMap<>(); SPECIAL_SECTIONS = new ArrayList<>(); SPECIAL_SECTIONS.add(METADATA); ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_0",SPECIAL_SECTIONS); + ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_1",SPECIAL_SECTIONS); ADDITIONAL_SECTIONS.putAll(exttools.getSections()); //long startTime = System.nanoTime(); @@ -495,8 +497,9 @@ public class ToscaTemplate extends Object { "InvalidTemplateVersion: \"%s\" is invalid. Valid versions are %s", sVersion,VALID_TEMPLATE_VERSIONS.toString()))); } - else if(!sVersion.equals("tosca_simple_yaml_1_0")) { + else if ((!sVersion.equals("tosca_simple_yaml_1_0") && !sVersion.equals("tosca_simple_yaml_1_1"))) { EntityType.updateDefinitions(sVersion); + } } diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java index cb4aa74..48fbe59 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java @@ -131,16 +131,15 @@ public class NodeType extends StatefulEntityType { // This method will lookup all node types if they have the // provided capability. - // Filter the node types ArrayList nodeTypes = new ArrayList<>(); - for(String nt: TOSCA_DEF.keySet()) { - if(nt.startsWith(NODE_PREFIX) && !nt.equals("tosca.nodes.Root")) { + for(String nt: customDef.keySet()) { + if(nt.startsWith(NODE_PREFIX) || nt.startsWith("org.openecomp") && !nt.equals("tosca.nodes.Root")) { nodeTypes.add(nt); } } for(String nt: nodeTypes) { - LinkedHashMap nodeDef = (LinkedHashMap)TOSCA_DEF.get(nt); + LinkedHashMap nodeDef = (LinkedHashMap)customDef.get(nt); if(nodeDef instanceof LinkedHashMap && nodeDef.get("capabilities") != null) { LinkedHashMap nodeCaps = (LinkedHashMap)nodeDef.get("capabilities"); if(nodeCaps != null) { @@ -161,13 +160,17 @@ public class NodeType extends StatefulEntityType { @SuppressWarnings("unchecked") private String _getRelation(String key,String ndtype) { String relation = null; - NodeType ntype = new NodeType(ndtype,null); + NodeType ntype = new NodeType(ndtype, customDef); LinkedHashMap caps = ntype.getCapabilities(); if(caps != null && caps.get(key) != null) { CapabilityTypeDef c = caps.get(key); for(int i=0; i< RELATIONSHIP_TYPE.length; i++) { String r = RELATIONSHIP_TYPE[i]; - LinkedHashMap rtypedef = (LinkedHashMap)TOSCA_DEF.get(r); + if(r != null) { + relation = r; + break; + } + LinkedHashMap rtypedef = (LinkedHashMap)customDef.get(r); for(Object o: rtypedef.values()) { LinkedHashMap properties = (LinkedHashMap)o; if(properties.get(c.getType()) != null) { diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java index 50c9739..7bfe333 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java @@ -41,6 +41,7 @@ public class TypeValidation { private static ArrayList _getVTV() { ArrayList vtv = new ArrayList<>(); vtv.add("tosca_simple_yaml_1_0"); + vtv.add("tosca_simple_yaml_1_1"); ExtTools exttools = new ExtTools(); vtv.addAll(exttools.getVersions()); return vtv; diff --git a/src/test/java/org.openecomp.sdc.toscaparser/JToscaMetadataParse.java b/src/test/java/org.openecomp.sdc.toscaparser/JToscaMetadataParse.java deleted file mode 100644 index 584a0fd..0000000 --- a/src/test/java/org.openecomp.sdc.toscaparser/JToscaMetadataParse.java +++ /dev/null @@ -1,26 +0,0 @@ -package org.openecomp.sdc.toscaparser; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import java.io.File; -import java.util.LinkedHashMap; - -import org.junit.Test; -import org.openecomp.sdc.toscaparser.api.ToscaTemplate; -import org.openecomp.sdc.toscaparser.api.common.JToscaException; - -public class JToscaMetadataParse { - - @Test - public void testMetadataParsedCorrectly() throws JToscaException { - String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/csar_hello_world.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - LinkedHashMap metadataProperties = toscaTemplate.getMetaProperties("TOSCA.meta"); - assertNotNull(metadataProperties); - Object entryDefinition = metadataProperties.get("Entry-Definitions"); - assertNotNull(entryDefinition); - assertEquals("tosca_helloworld.yaml", entryDefinition); - } -} diff --git a/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java b/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java new file mode 100644 index 0000000..79d166f --- /dev/null +++ b/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java @@ -0,0 +1,41 @@ +package org.openecomp.sdc.toscaparser.api; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.util.Collection; +import java.util.LinkedHashMap; + +import org.junit.Test; +import org.openecomp.sdc.toscaparser.api.common.JToscaException; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class JToscaMetadataParse { + + @Test + public void testMetadataParsedCorrectly() throws JToscaException { + String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/csar_hello_world.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + LinkedHashMap metadataProperties = toscaTemplate.getMetaProperties("TOSCA.meta"); + assertNotNull(metadataProperties); + Object entryDefinition = metadataProperties.get("Entry-Definitions"); + assertNotNull(entryDefinition); + assertEquals("tosca_helloworld.yaml", entryDefinition); + } + + @Test + public void noWarningsAfterParse() throws JToscaException { + String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/tmpCSAR_Huawei_vSPGW_fixed.csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + + +// Collection issues = ThreadLocalsHolder.getCollector().getValidationIssues().values(); +// assertTrue(issues.size() == 0 ); + } + +} diff --git a/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar.csar b/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar.csar new file mode 100644 index 0000000..70f8cc4 Binary files /dev/null and b/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar.csar differ diff --git a/version.properties b/version.properties index f7d411d..f842c49 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=1 -patch=16 +patch=19 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 7cdb2b0e8a55d750104972b2ca2ae4ed51ae0aec Mon Sep 17 00:00:00 2001 From: Yuli Shlosberg Date: Wed, 29 Nov 2017 14:34:55 +0200 Subject: new Junit test Change-Id: I9a15d78ced6a10d2a79f08268a558f007bb22773 Issue-ID: SDC-695 Signed-off-by: Yuli Shlosberg --- .../java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java b/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java index 79d166f..37292e5 100644 --- a/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java +++ b/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java @@ -32,10 +32,8 @@ public class JToscaMetadataParse { String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/tmpCSAR_Huawei_vSPGW_fixed.csar.csar").getFile(); File file = new File(fileStr); ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - - -// Collection issues = ThreadLocalsHolder.getCollector().getValidationIssues().values(); -// assertTrue(issues.size() == 0 ); + int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); + assertTrue(validationIssuesCaught == 0 ); } } -- cgit 1.2.3-korg From 704b2296a078eeb321cfcc685c2aee3a5e60b457 Mon Sep 17 00:00:00 2001 From: PriyanshuAgarwal Date: Thu, 2 Nov 2017 17:07:19 +0530 Subject: Fixed the open streams which were not being closed Updated files in JTosca Library Change-Id: Ib6b55ef4e367ee9a4f0761ae436bfcb9944f3a41 Issue-ID: SDC-249 Signed-off-by: priyanshu --- .../sdc/toscaparser/api/ImportsLoader.java | 14 ++-- .../sdc/toscaparser/api/ToscaTemplate.java | 3 +- .../openecomp/sdc/toscaparser/api/prereq/CSAR.java | 76 +++++++++++----------- 3 files changed, 45 insertions(+), 48 deletions(-) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java index 6794f9a..1fac3f1 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java @@ -246,9 +246,8 @@ public class ImportsLoader { } if(UrlUtils.validateUrl(fileName)) { - try { - al[0] = fileName; - InputStream input = new URL(fileName).openStream(); + try (InputStream input = new URL(fileName).openStream();) { + al[0] = fileName; Yaml yaml = new Yaml(); al[1] = yaml.load(input); return al; @@ -354,9 +353,9 @@ public class ImportsLoader { al[0] = al[1] = null; return al; } - try { + try (InputStream input = new FileInputStream(new File(importTemplate));) { al[0] = importTemplate; - InputStream input = new FileInputStream(new File(importTemplate)); + Yaml yaml = new Yaml(); al[1] = yaml.load(input); return al; @@ -417,9 +416,8 @@ public class ImportsLoader { } } if(UrlUtils.validateUrl(fullUrl)) { - try { - al[0] = fullUrl; - InputStream input = new URL(fullUrl).openStream(); + try (InputStream input = new URL(fullUrl).openStream();) { + al[0] = fullUrl; Yaml yaml = new Yaml(); al[1] = yaml.load(input); return al; diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java index 4c6ba3a..d4506e1 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java @@ -144,10 +144,9 @@ public class ToscaTemplate extends Object { path = _getPath(_path); // load the YAML template if (path != null && !path.isEmpty()) { - try { + try (InputStream input = new FileInputStream(new File(path));){ //System.out.println("Loading YAML file " + path); log.debug("ToscaTemplate Loading YAMEL file {}", path); - InputStream input = new FileInputStream(new File(path)); Yaml yaml = new Yaml(); Object data = yaml.load(input); this.tpl = (LinkedHashMap) data; diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java b/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java index ef29b53..b40eded 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java @@ -273,8 +273,7 @@ public class CSAR { public LinkedHashMap getMainTemplateYaml() throws JToscaException { String mainTemplate = tempDir + File.separator + getMainTemplate(); if(mainTemplate != null) { - try { - InputStream input = new FileInputStream(new File(mainTemplate)); + try (InputStream input = new FileInputStream(new File(mainTemplate));){ Yaml yaml = new Yaml(); Object data = yaml.load(input); if(!(data instanceof LinkedHashMap)) { @@ -459,34 +458,35 @@ public class CSAR { if (!destDir.exists()) { destDir.mkdir(); } - ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFilePath)); - ZipEntry entry = zipIn.getNextEntry(); - // iterates over entries in the zip file - while (entry != null) { - // create all directories needed for nested items - String[] parts = entry.getName().split("/"); - String s = destDirectory + File.separator ; - for(int i=0; i< parts.length-1; i++) { - s += parts[i]; - File idir = new File(s); - if(!idir.exists()) { - idir.mkdir(); - } - s += File.separator; - } - String filePath = destDirectory + File.separator + entry.getName(); - if (!entry.isDirectory()) { - // if the entry is a file, extracts it - extractFile(zipIn, filePath); - } else { - // if the entry is a directory, make the directory - File dir = new File(filePath); - dir.mkdir(); - } - zipIn.closeEntry(); - entry = zipIn.getNextEntry(); - } - zipIn.close(); + + try (ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFilePath));){ + ZipEntry entry = zipIn.getNextEntry(); + // iterates over entries in the zip file + while (entry != null) { + // create all directories needed for nested items + String[] parts = entry.getName().split("/"); + String s = destDirectory + File.separator ; + for(int i=0; i< parts.length-1; i++) { + s += parts[i]; + File idir = new File(s); + if(!idir.exists()) { + idir.mkdir(); + } + s += File.separator; + } + String filePath = destDirectory + File.separator + entry.getName(); + if (!entry.isDirectory()) { + // if the entry is a file, extracts it + extractFile(zipIn, filePath); + } else { + // if the entry is a directory, make the directory + File dir = new File(filePath); + dir.mkdir(); + } + zipIn.closeEntry(); + entry = zipIn.getNextEntry(); + } + } } /** @@ -499,14 +499,14 @@ public class CSAR { private void extractFile(ZipInputStream zipIn, String filePath) throws IOException { //BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath)); - FileOutputStream fos = new FileOutputStream(filePath); - BufferedOutputStream bos = new BufferedOutputStream(fos); - byte[] bytesIn = new byte[BUFFER_SIZE]; - int read = 0; - while ((read = zipIn.read(bytesIn)) != -1) { - bos.write(bytesIn, 0, read); - } - bos.close(); + try (FileOutputStream fos = new FileOutputStream(filePath); + BufferedOutputStream bos = new BufferedOutputStream(fos);){ + byte[] bytesIn = new byte[BUFFER_SIZE]; + int read = 0; + while ((read = zipIn.read(bytesIn)) != -1) { + bos.write(bytesIn, 0, read); + } + } } } -- cgit 1.2.3-korg From 0fbd261601775fa6ba66eee6164b0b2cd9242f08 Mon Sep 17 00:00:00 2001 From: Michael Lando Date: Wed, 6 Dec 2017 21:29:41 +0200 Subject: update version Change-Id: I89c7a6fd26497e0af6c31e3550fbfccda91a6c8f Issue-ID: SDC-684 Signed-off-by: Michael Lando --- pom.xml | 2 +- version.properties | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index c277b0c..6e67b81 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.openecomp.sdc.jtosca jtosca - 1.1.19-SNAPSHOT + 1.2.0-SNAPSHOT sdc-jtosca diff --git a/version.properties b/version.properties index f842c49..8471f55 100644 --- a/version.properties +++ b/version.properties @@ -4,8 +4,8 @@ # because they are used in Jenkins, whose plug-in doesn't support major=1 -minor=1 -patch=19 +minor=2 +patch=0 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From e303f8cbbec61cd7deb7576073e601e2c919367e Mon Sep 17 00:00:00 2001 From: Yuli Shlosberg Date: Sun, 31 Dec 2017 16:30:45 +0200 Subject: Update code after grep filters execution Change-Id: I1dd47b7c32621222220dc68e5d9829b55a6b50e0 Issue-ID: SDC-733 Signed-off-by: Yuli Shlosberg --- pom.xml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/pom.xml b/pom.xml index 6e67b81..53d085d 100644 --- a/pom.xml +++ b/pom.xml @@ -187,17 +187,17 @@ http://repo2.maven.org/maven2/ - ecomp-releases + onap-releases Release Repository ${nexus.proxy}/content/repositories/releases/ - ecomp-staging + onap-staging Staging Repository ${nexus.proxy}/content/repositories/staging/ - ecomp-snapshots + onap-snapshots Snapshots Repository ${nexus.proxy}/content/repositories/snapshots/ @@ -205,17 +205,17 @@ - ecomp-releases + onap-releases Release Repository ${nexus.proxy}/content/repositories/${releases.path}/ - ecomp-snapshots + onap-snapshots Snapshot Repository ${nexus.proxy}/content/repositories/${snapshots.path}/ - ecomp-site + onap-site dav:${nexus.proxy}${sitePath} -- cgit 1.2.3-korg From fda1e52700ef32986173cf0273fc94ebafb16cb5 Mon Sep 17 00:00:00 2001 From: priyanshu Date: Thu, 28 Dec 2017 11:16:08 +0530 Subject: YAML syntax errors are not being sent. YAML syntax errors are not being sent in Validation Issue List. Change-Id: I2a52ed4a26b29ca0a4f01bdd3abd8830b012f5af Issue-ID: SDC-566 Signed-off-by: priyanshu Signed-off-by: Yuli Shlosberg --- .../java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java | 4 ++-- .../java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java | 8 +++++++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java index 1fac3f1..5e94378 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java @@ -362,13 +362,13 @@ public class ImportsLoader { } catch(FileNotFoundException e) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE197", String.format( - "ImportError: Failed to load YAML from \"%s\"",importName))); + "ImportError: Failed to load YAML from \"%s\"" + e,importName))); al[0] = al[1] = null; return al; } catch(Exception e) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE198", String.format( - "ImportError: Exception from SnakeYAML file = \"%s\"",importName))); + "ImportError: Exception from SnakeYAML file = \"%s\"" + e,importName))); al[0] = al[1] = null; return al; } diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java index d4506e1..07cce1c 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java @@ -153,10 +153,16 @@ public class ToscaTemplate extends Object { } catch (FileNotFoundException e) { log.error("ToscaTemplate - Exception loading yaml: {}", e.getMessage()); + log.error("Exception", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", + "ToscaTemplate - Exception loading yaml: -> " + e.getMessage())); return; } catch(Exception e) { - log.error("ToscaTemplate - Error loading yaml, aborting"); + log.error("ToscaTemplate - Error loading yaml, aborting -> ", e.getMessage()); + log.error("Exception", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", + "ToscaTemplate - Error loading yaml, aborting -> " + e.getMessage())); return; } -- cgit 1.2.3-korg From fb2c889566a00ab3311b8c5bf9958f332db4385d Mon Sep 17 00:00:00 2001 From: Jessica Wagantall Date: Tue, 2 Jan 2018 11:10:57 -0800 Subject: Update staging server ID Update serverID for staging to match the rest Change-Id: Id8a1652b432635a54da74ceba99de7664e26bad4 Issue-ID: SDC-733 Signed-off-by: Jessica Wagantall --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 53d085d..442686a 100644 --- a/pom.xml +++ b/pom.xml @@ -141,7 +141,7 @@ ${nexus.proxy} ${staging.profile.id} - ecomp-staging + onap-staging -- cgit 1.2.3-korg From dba9f5edd78a8c7aafc9742ba15d271deca4d944 Mon Sep 17 00:00:00 2001 From: priyanshu Date: Wed, 6 Dec 2017 21:29:41 +0200 Subject: Extension loading is not working. Updated files in JTosca Library. Change-Id: I3323e34238228451bd1d396271fc89fdf28b3fd9 Issue-ID: SDC-565 Signed-off-by: priyanshu Signed-off-by: Yuli Shlosberg --- pom.xml | 6 + .../sdc/toscaparser/api/elements/EntityType.java | 46 ++-- .../sdc/toscaparser/api/extensions/ExtTools.java | 156 ++++++-------- .../TOSCA_simple_yaml_definition_1_0_0.py | 19 ++ .../TOSCA_simple_yaml_definition_1_0_0.yaml | 240 +++++++++++++++++++++ .../toscaparser/api/elements/EntityTypeTest.java | 56 +++++ 6 files changed, 415 insertions(+), 108 deletions(-) create mode 100644 src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py create mode 100644 src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml create mode 100644 src/test/java/org/openecomp/sdc/toscaparser/api/elements/EntityTypeTest.java diff --git a/pom.xml b/pom.xml index 442686a..6246e2f 100644 --- a/pom.xml +++ b/pom.xml @@ -75,6 +75,12 @@ commons-io 1.3.2 + + + org.reflections + reflections + 0.9.11 + diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java index 70f7ae7..50ef715 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java @@ -1,15 +1,15 @@ package org.openecomp.sdc.toscaparser.api.elements; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; +import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; import org.openecomp.sdc.toscaparser.api.extensions.ExtTools; import org.openecomp.sdc.toscaparser.api.utils.CopyUtils; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.yaml.snakeyaml.Yaml; @@ -238,30 +238,30 @@ public class EntityType { public static void updateDefinitions(String version) { ExtTools exttools = new ExtTools(); String extensionDefsFile = exttools.getDefsFile(version); - - InputStream input = null; - try { - input = new FileInputStream(new File(extensionDefsFile)); - } - catch (FileNotFoundException e) { - log.error("EntityType - updateDefinitions - Failed to open extension defs file ", extensionDefsFile); - return; - } - Yaml yaml = new Yaml(); - LinkedHashMap nfvDefFile = (LinkedHashMap)yaml.load(input); - LinkedHashMap nfvDef = new LinkedHashMap<>(); - for(String section: TOSCA_DEF_SECTIONS) { - if(nfvDefFile.get(section) != null) { - LinkedHashMap value = - (LinkedHashMap)nfvDefFile.get(section); - for(String key: value.keySet()) { - nfvDef.put(key, value.get(key)); + + try (InputStream input = EntityType.class.getClassLoader().getResourceAsStream(extensionDefsFile);){ + Yaml yaml = new Yaml(); + LinkedHashMap nfvDefFile = (LinkedHashMap)yaml.load(input); + LinkedHashMap nfvDef = new LinkedHashMap<>(); + for(String section: TOSCA_DEF_SECTIONS) { + if(nfvDefFile.get(section) != null) { + LinkedHashMap value = + (LinkedHashMap)nfvDefFile.get(section); + for(String key: value.keySet()) { + nfvDef.put(key, value.get(key)); + } } } + TOSCA_DEF.putAll(nfvDef); + } + catch (IOException e) { + log.error("EntityType - updateDefinitions - Failed to update definitions from defs file {}",extensionDefsFile); + log.error("Exception:", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE280", + String.format("Failed to update definitions from defs file \"%s\" ",extensionDefsFile))); + return; } - TOSCA_DEF.putAll(nfvDef); } - } /*python diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java b/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java index 90aa35c..f0e0afa 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java @@ -1,16 +1,19 @@ package org.openecomp.sdc.toscaparser.api.extensions; +import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.reflections.Reflections; +import org.reflections.scanners.ResourcesScanner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedReader; -import java.io.File; -import java.io.FileInputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.LinkedHashMap; +import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -24,89 +27,72 @@ public class ExtTools { EXTENSION_INFO = _loadExtensions(); } - - private LinkedHashMap _loadExtensions() { - - LinkedHashMap extensions = new LinkedHashMap<>(); - - String path = ExtTools.class.getProtectionDomain().getCodeSource().getLocation().getPath(); - //String extdir = path + File.separator + "resources/extensions"; - - String extdir = ExtTools.class.getClassLoader().getResource("extensions").getFile(); - - // for all folders in extdir - File extDir = new File(extdir); - File extDirList[] = extDir.listFiles(); - if (extDirList != null) { - for(File f: extDirList) { - if(f.isDirectory()) { - // for all .py files in folder - File extFileList[] = f.listFiles(); - for(File pyf: extFileList) { - String pyfName = pyf.getName(); - String pyfPath = pyf.getAbsolutePath(); - if(pyfName.endsWith(".py")) { - // get VERSION,SECTIONS,DEF_FILE - try { - String version = null; - ArrayList sections = null; - String defsFile = null; - String line; - InputStream fis = new FileInputStream(pyfPath); - InputStreamReader isr = new InputStreamReader(fis, Charset.forName("UTF-8")); - BufferedReader br = new BufferedReader(isr); - Pattern pattern = Pattern.compile("^([^#]\\S+)\\s*=\\s*(\\S.*)$"); - while((line = br.readLine()) != null) { - line = line.replace("'","\""); - Matcher matcher = pattern.matcher(line.toString()); - if(matcher.find()) { - if(matcher.group(1).equals("VERSION")) { - version = matcher.group(2); - if(version.startsWith("'") || version.startsWith("\"")) { - version = version.substring(1,version.length()-1); - } - } - else if(matcher.group(1).equals("DEFS_FILE")) { - String fn = matcher.group(2); - if(fn.startsWith("'") || fn.startsWith("\"")) { - fn = fn.substring(1,fn.length()-1); - } - defsFile = pyf.getParent() + File.separator + fn;//matcher.group(2); - } - else if(matcher.group(1).equals("SECTIONS")) { - sections = new ArrayList<>(); - Pattern secpat = Pattern.compile("\"([^\"]+)\""); - Matcher secmat = secpat.matcher(matcher.group(2)); - while(secmat.find()) { - sections.add(secmat.group(1)); - } - } - } - } - br.close(); - - if(version != null && defsFile != null) { - LinkedHashMap ext = new LinkedHashMap<>(); - ext.put("defs_file", defsFile); - if(sections != null) { - ext.put("sections", sections); - } - extensions.put(version, ext); - } - else { - // error - } - } - catch(Exception e) { - log.error("ExtTools - _loadExtensions - {}", e.getMessage()); - // ... - } - } - } - } - } - } - return extensions; + + private LinkedHashMap _loadExtensions() { + + LinkedHashMap extensions = new LinkedHashMap<>(); + + Reflections reflections = new Reflections("extensions", new ResourcesScanner()); + Set resourcePaths = reflections.getResources(Pattern.compile(".*\\.py$")); + + for(String resourcePath : resourcePaths) { + try (InputStream is = ExtTools.class.getClassLoader().getResourceAsStream(resourcePath); + InputStreamReader isr = new InputStreamReader(is, Charset.forName("UTF-8")); + BufferedReader br = new BufferedReader(isr);){ + String version = null; + ArrayList sections = null; + String defsFile = null; + String line; + + Pattern pattern = Pattern.compile("^([^#]\\S+)\\s*=\\s*(\\S.*)$"); + while ((line = br.readLine()) != null) { + line = line.replace("'", "\""); + Matcher matcher = pattern.matcher(line.toString()); + if (matcher.find()) { + if (matcher.group(1).equals("VERSION")) { + version = matcher.group(2); + if (version.startsWith("'") || version.startsWith("\"")) { + version = version.substring(1, version.length() - 1); + } + } + else if (matcher.group(1).equals("DEFS_FILE")) { + String fn = matcher.group(2); + if (fn.startsWith("'") || fn.startsWith("\"")) { + fn = fn.substring(1, fn.length() - 1); + } + defsFile = resourcePath.replaceFirst("\\w*.py$", fn); + } + else if (matcher.group(1).equals("SECTIONS")) { + sections = new ArrayList<>(); + Pattern secpat = Pattern.compile("\"([^\"]+)\""); + Matcher secmat = secpat.matcher(matcher.group(2)); + while (secmat.find()) { + sections.add(secmat.group(1)); + } + } + } + } + + if (version != null && defsFile != null) { + LinkedHashMap ext = new LinkedHashMap<>(); + ext.put("defs_file", defsFile); + if (sections != null) { + ext.put("sections", sections); + } + extensions.put(version, ext); + } + else { + // error + } + } + catch (Exception e) { + log.error("ExtTools - _loadExtensions - {}", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue + ("JE281", "Failed to load extensions" + e.getMessage())); + // ... + } + } + return extensions; } public ArrayList getVersions() { diff --git a/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py b/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py new file mode 100644 index 0000000..a5bda4a --- /dev/null +++ b/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py @@ -0,0 +1,19 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# VERSION and DEFS_FILE are required for all extensions + +VERSION = 'tosca_simple_yaml_1_0_0' + +DEFS_FILE = "TOSCA_simple_yaml_definition_1_0_0.yaml" + +SECTIONS = ('metadata') diff --git a/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml b/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml new file mode 100644 index 0000000..c645e27 --- /dev/null +++ b/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml @@ -0,0 +1,240 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +########################################################################## +# The content of this file reflects TOSCA NFV Profile in YAML version +# 1.0.0. It describes the definition for TOSCA NFV types including Node Type, +# Relationship Type, CapabilityAssignment Type and Interfaces. +########################################################################## +tosca_definitions_version: tosca_simple_yaml_1_0_0 + +########################################################################## +# Node Type. +# A Node Type is a reusable entity that defines the type of one or more +# Node Templates. +########################################################################## +node_types: + tosca.nodes.nfv.VNF: + derived_from: tosca.nodes.Root # Or should this be its own top - level type? + properties: + id: + type: string + description: ID of this VNF + vendor: + type: string + description: name of the vendor who generate this VNF + version: + type: version + description: version of the software for this VNF + requirements: + - virtualLink: + capability: tosca.capabilities.nfv.VirtualLinkable + relationship: tosca.relationships.nfv.VirtualLinksTo + node: tosca.nodes.nfv.VL + + tosca.nodes.nfv.VDU: + derived_from: tosca.nodes.Compute + capabilities: + high_availability: + type: tosca.capabilities.nfv.HA + virtualbinding: + type: tosca.capabilities.nfv.VirtualBindable + monitoring_parameter: + type: tosca.capabilities.nfv.Metric + requirements: + - high_availability: + capability: tosca.capabilities.nfv.HA + relationship: tosca.relationships.nfv.HA + node: tosca.nodes.nfv.VDU + occurrences: [ 0, 1 ] + + tosca.nodes.nfv.CP: + derived_from: tosca.nodes.network.Port + properties: + type: + type: string + required: false + requirements: + - virtualLink: + capability: tosca.capabilities.nfv.VirtualLinkable + relationship: tosca.relationships.nfv.VirtualLinksTo + node: tosca.nodes.nfv.VL + - virtualBinding: + capability: tosca.capabilities.nfv.VirtualBindable + relationship: tosca.relationships.nfv.VirtualBindsTo + node: tosca.nodes.nfv.VDU + attributes: + address: + type: string + + tosca.nodes.nfv.VL: + derived_from: tosca.nodes.network.Network + properties: + vendor: + type: string + required: true + description: name of the vendor who generate this VL + capabilities: + virtual_linkable: + type: tosca.capabilities.nfv.VirtualLinkable + + tosca.nodes.nfv.VL.ELine: + derived_from: tosca.nodes.nfv.VL + capabilities: + virtual_linkable: + occurrences: 2 + + tosca.nodes.nfv.VL.ELAN: + derived_from: tosca.nodes.nfv.VL + + tosca.nodes.nfv.VL.ETree: + derived_from: tosca.nodes.nfv.VL + + tosca.nodes.nfv.FP: + derived_from: tosca.nodes.Root + properties: + policy: + type: string + required: false + description: name of the vendor who generate this VL + requirements: + - forwarder: + capability: tosca.capabilities.nfv.Forwarder + relationship: tosca.relationships.nfv.ForwardsTo + +########################################################################## +# Relationship Type. +# A Relationship Type is a reusable entity that defines the type of one +# or more relationships between Node Types or Node Templates. +########################################################################## + +relationship_types: + tosca.relationships.nfv.VirtualLinksTo: + derived_from: tosca.relationships.network.LinksTo + valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ] + + tosca.relationships.nfv.VirtualBindsTo: + derived_from: tosca.relationships.network.BindsTo + valid_target_types: [ tosca.capabilities.nfv.VirtualBindable ] + + tosca.relationships.nfv.HA: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.nfv.HA ] + + tosca.relationships.nfv.Monitor: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.nfv.Metric ] + + tosca.relationships.nfv.ForwardsTo: + derived_from: tosca.relationships.root + valid_target_types: [ tosca.capabilities.nfv.Forwarder] + +########################################################################## +# CapabilityAssignment Type. +# A CapabilityAssignment Type is a reusable entity that describes a kind of +# capability that a Node Type can declare to expose. +########################################################################## + +capability_types: + tosca.capabilities.nfv.VirtualLinkable: + derived_from: tosca.capabilities.network.Linkable + + tosca.capabilities.nfv.VirtualBindable: + derived_from: tosca.capabilities.network.Bindable + + tosca.capabilities.nfv.HA: + derived_from: tosca.capabilities.Root + valid_source_types: [ tosca.nodes.nfv.VDU ] + + tosca.capabilities.nfv.HA.ActiveActive: + derived_from: tosca.capabilities.nfv.HA + + tosca.capabilities.nfv.HA.ActivePassive: + derived_from: tosca.capabilities.nfv.HA + + tosca.capabilities.nfv.Metric: + derived_from: tosca.capabilities.Root + + tosca.capabilities.nfv.Forwarder: + derived_from: tosca.capabilities.Root + +########################################################################## + # Interfaces Type. + # The Interfaces element describes a list of one or more interface + # definitions for a modelable entity (e.g., a Node or Relationship Type) + # as defined within the TOSCA Simple Profile specification. +########################################################################## + +########################################################################## + # Data Type. + # A Datatype is a complex data type declaration which contains other + # complex or simple data types. +########################################################################## + +########################################################################## + # Artifact Type. + # An Artifact Type is a reusable entity that defines the type of one or more + # files which Node Types or Node Templates can have dependent relationships + # and used during operations such as during installation or deployment. +########################################################################## + +########################################################################## + # Policy Type. + # TOSCA Policy Types represent logical grouping of TOSCA nodes that have + # an implied relationship and need to be orchestrated or managed together + # to achieve some result. +########################################################################## + +########################################################################## + # Group Type + # +########################################################################## +group_types: + tosca.groups.nfv.VNFFG: + derived_from: tosca.groups.Root + + properties: + vendor: + type: string + required: true + description: name of the vendor who generate this VNFFG + + version: + type: string + required: true + description: version of this VNFFG + + number_of_endpoints: + type: integer + required: true + description: count of the external endpoints included in this VNFFG + + dependent_virtual_link: + type: list + entry_schema: + type: string + required: true + description: Reference to a VLD used in this Forwarding Graph + + connection_point: + type: list + entry_schema: + type: string + required: true + description: Reference to Connection Points forming the VNFFG + + constituent_vnfs: + type: list + entry_schema: + type: string + required: true + description: Reference to a list of VNFD used in this VNF Forwarding Graph diff --git a/src/test/java/org/openecomp/sdc/toscaparser/api/elements/EntityTypeTest.java b/src/test/java/org/openecomp/sdc/toscaparser/api/elements/EntityTypeTest.java new file mode 100644 index 0000000..8e74e99 --- /dev/null +++ b/src/test/java/org/openecomp/sdc/toscaparser/api/elements/EntityTypeTest.java @@ -0,0 +1,56 @@ +package org.openecomp.sdc.toscaparser.api.elements; + +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + +import static org.junit.Assert.assertEquals; + +public class EntityTypeTest { + + private static final Map origMap = EntityType.TOSCA_DEF; + + @Test + public void testUpdateDefinitions() throws Exception { + + Map testData = new HashMap<>(); + testData.put("tosca.nodes.nfv.VNF", "{derived_from=tosca.nodes.Root, properties={id={type=string, description=ID of this VNF}, vendor={type=string, description=name of the vendor who generate this VNF}, version={type=version, description=version of the software for this VNF}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}]}"); + testData.put("tosca.nodes.nfv.VDU", "{derived_from=tosca.nodes.Compute, capabilities={high_availability={type=tosca.capabilities.nfv.HA}, virtualbinding={type=tosca.capabilities.nfv.VirtualBindable}, monitoring_parameter={type=tosca.capabilities.nfv.Metric}}, requirements=[{high_availability={capability=tosca.capabilities.nfv.HA, relationship=tosca.relationships.nfv.HA, node=tosca.nodes.nfv.VDU, occurrences=[0, 1]}}]}"); + testData.put("tosca.nodes.nfv.CP", "{derived_from=tosca.nodes.network.Port, properties={type={type=string, required=false}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}, {virtualBinding={capability=tosca.capabilities.nfv.VirtualBindable, relationship=tosca.relationships.nfv.VirtualBindsTo, node=tosca.nodes.nfv.VDU}}], attributes={address={type=string}}}"); + testData.put("tosca.nodes.nfv.VL", "{derived_from=tosca.nodes.network.Network, properties={vendor={type=string, required=true, description=name of the vendor who generate this VL}}, capabilities={virtual_linkable={type=tosca.capabilities.nfv.VirtualLinkable}}}"); + testData.put("tosca.nodes.nfv.VL.ELine", "{derived_from=tosca.nodes.nfv.VL, capabilities={virtual_linkable={occurrences=2}}}"); + testData.put("tosca.nodes.nfv.VL.ELAN", "{derived_from=tosca.nodes.nfv.VL}"); + testData.put("tosca.nodes.nfv.VL.ETree", "{derived_from=tosca.nodes.nfv.VL}"); + testData.put("tosca.nodes.nfv.FP", "{derived_from=tosca.nodes.Root, properties={policy={type=string, required=false, description=name of the vendor who generate this VL}}, requirements=[{forwarder={capability=tosca.capabilities.nfv.Forwarder, relationship=tosca.relationships.nfv.ForwardsTo}}]}"); + testData.put("tosca.groups.nfv.VNFFG", "{derived_from=tosca.groups.Root, properties={vendor={type=string, required=true, description=name of the vendor who generate this VNFFG}, version={type=string, required=true, description=version of this VNFFG}, number_of_endpoints={type=integer, required=true, description=count of the external endpoints included in this VNFFG}, dependent_virtual_link={type=list, entry_schema={type=string}, required=true, description=Reference to a VLD used in this Forwarding Graph}, connection_point={type=list, entry_schema={type=string}, required=true, description=Reference to Connection Points forming the VNFFG}, constituent_vnfs={type=list, entry_schema={type=string}, required=true, description=Reference to a list of VNFD used in this VNF Forwarding Graph}}}"); + testData.put("tosca.relationships.nfv.VirtualLinksTo", "{derived_from=tosca.relationships.network.LinksTo, valid_target_types=[tosca.capabilities.nfv.VirtualLinkable]}"); + testData.put("tosca.relationships.nfv.VirtualBindsTo", "{derived_from=tosca.relationships.network.BindsTo, valid_target_types=[tosca.capabilities.nfv.VirtualBindable]}"); + testData.put("tosca.relationships.nfv.HA", "{derived_from=tosca.relationships.Root, valid_target_types=[tosca.capabilities.nfv.HA]}"); + testData.put("tosca.relationships.nfv.Monitor", "{derived_from=tosca.relationships.ConnectsTo, valid_target_types=[tosca.capabilities.nfv.Metric]}"); + testData.put("tosca.relationships.nfv.ForwardsTo", "{derived_from=tosca.relationships.root, valid_target_types=[tosca.capabilities.nfv.Forwarder]}"); + testData.put("tosca.capabilities.nfv.VirtualLinkable", "{derived_from=tosca.capabilities.network.Linkable}"); + testData.put("tosca.capabilities.nfv.VirtualBindable", "{derived_from=tosca.capabilities.network.Bindable}"); + testData.put("tosca.capabilities.nfv.HA", "{derived_from=tosca.capabilities.Root, valid_source_types=[tosca.nodes.nfv.VDU]}"); + testData.put("tosca.capabilities.nfv.HA.ActiveActive", "{derived_from=tosca.capabilities.nfv.HA}"); + testData.put("tosca.capabilities.nfv.HA.ActivePassive", "{derived_from=tosca.capabilities.nfv.HA}"); + testData.put("tosca.capabilities.nfv.Metric", "{derived_from=tosca.capabilities.Root}"); + testData.put("tosca.capabilities.nfv.Forwarder", "{derived_from=tosca.capabilities.Root}"); + + Map expectedDefMap = origMap; + expectedDefMap.putAll(testData); + EntityType.updateDefinitions("tosca_simple_profile_for_nfv_1_0_0"); + + assertEquals(expectedDefMap, EntityType.TOSCA_DEF); + + } + + @After + public void tearDown() throws Exception { + EntityType.TOSCA_DEF = (LinkedHashMap) origMap; + } + +} \ No newline at end of file -- cgit 1.2.3-korg From eed89a056e2c51bfbc1df78995f757f3d1ed8231 Mon Sep 17 00:00:00 2001 From: Idan Amit Date: Thu, 4 Jan 2018 10:46:26 +0200 Subject: Update staging server ID Update serverID for staging to match the rest Change-Id: I6277dfaf651a26097d9066be1ce85b756a730743 Issue-ID: SDC-844 Signed-off-by: Idan Amit --- pom.xml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/pom.xml b/pom.xml index 6246e2f..5d062d7 100644 --- a/pom.xml +++ b/pom.xml @@ -197,11 +197,6 @@ Release Repository ${nexus.proxy}/content/repositories/releases/ - - onap-staging - Staging Repository - ${nexus.proxy}/content/repositories/staging/ - onap-snapshots Snapshots Repository -- cgit 1.2.3-korg From 9542ec5708daad3dd43052d326780f7f5c9a987d Mon Sep 17 00:00:00 2001 From: Tal Gitelman Date: Thu, 4 Jan 2018 16:11:30 +0200 Subject: CSAR files are decompressed twice in the same thread fixed Change-Id: Id59cc7250d8431114ab90d14e03049f86a4d49ca Issue-ID: SDC-250 Signed-off-by: Tal Gitelman --- .../sdc/toscaparser/api/ToscaTemplate.java | 4 +-- .../openecomp/sdc/toscaparser/api/prereq/CSAR.java | 9 ++++--- .../sdc/toscaparser/api/JToscaMetadataParse.java | 30 ++++++++++++++++++--- src/test/resources/csars/emptyCsar.csar | Bin 0 -> 22 bytes .../resources/csars/service-ServiceFdnt-csar.csar | Bin 40171 -> 0 bytes .../csars/tmpCSAR_Huawei_vSPGW_fixed.csar | Bin 0 -> 44576 bytes .../csars/tmpCSAR_Huawei_vSPGW_fixed.csar.csar | Bin 44576 -> 0 bytes 7 files changed, 33 insertions(+), 10 deletions(-) create mode 100644 src/test/resources/csars/emptyCsar.csar delete mode 100644 src/test/resources/csars/service-ServiceFdnt-csar.csar create mode 100644 src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar delete mode 100644 src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar.csar diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java index 07cce1c..8d7d130 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java @@ -172,8 +172,7 @@ public class ToscaTemplate extends Object { //log.info(msg) log.debug("ToscaTemplate - Both path and yaml_dict_tpl arguments were provided. Using path and ignoring yaml_dict_tpl"); } - } - else { + } else { // no input to process... _abort(); } @@ -225,6 +224,7 @@ public class ToscaTemplate extends Object { verifyTemplate(); throw new JToscaException("jtosca aborting", JToscaErrorCodes.PATH_NOT_VALID.getValue()); } + private TopologyTemplate _topologyTemplate() { return new TopologyTemplate( _tplTopologyTemplate(), diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java b/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java index b40eded..b64bd9a 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java @@ -54,7 +54,6 @@ public class CSAR { metaProperties = new LinkedHashMap<>(); } - @SuppressWarnings("unchecked") public boolean validate() throws JToscaException { isValidated = true; @@ -310,9 +309,11 @@ public class CSAR { if(!isValidated) { validate(); } - tempDir = Files.createTempDirectory("JTP").toString(); - unzip(path,tempDir); - + + if(tempDir == null || tempDir.isEmpty()) { + tempDir = Files.createTempDirectory("JTP").toString(); + unzip(path,tempDir); + } } private void _validateExternalReferences() throws JToscaException { diff --git a/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java b/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java index 37292e5..8f55fa4 100644 --- a/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java +++ b/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java @@ -5,12 +5,11 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.File; -import java.util.Collection; import java.util.LinkedHashMap; import org.junit.Test; import org.openecomp.sdc.toscaparser.api.common.JToscaException; -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.openecomp.sdc.toscaparser.api.utils.JToscaErrorCodes; import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class JToscaMetadataParse { @@ -29,11 +28,34 @@ public class JToscaMetadataParse { @Test public void noWarningsAfterParse() throws JToscaException { - String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/tmpCSAR_Huawei_vSPGW_fixed.csar.csar").getFile(); + String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/tmpCSAR_Huawei_vSPGW_fixed.csar").getFile(); File file = new File(fileStr); ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); assertTrue(validationIssuesCaught == 0 ); } - + + @Test + public void testEmptyCsar() throws JToscaException { + String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/emptyCsar.csar").getFile(); + File file = new File(fileStr); + try { + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + } catch (JToscaException e) { + assertTrue(e.getCode().equals(JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue())); + } + int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); + assertTrue(validationIssuesCaught == 0 ); + } + + @Test + public void testEmptyPath() throws JToscaException { + String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("").getFile(); + File file = new File(fileStr); + try { + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + }catch (JToscaException e) { + assertTrue(e.getCode().equals(JToscaErrorCodes.PATH_NOT_VALID.getValue())); + } + } } diff --git a/src/test/resources/csars/emptyCsar.csar b/src/test/resources/csars/emptyCsar.csar new file mode 100644 index 0000000..15cb0ec Binary files /dev/null and b/src/test/resources/csars/emptyCsar.csar differ diff --git a/src/test/resources/csars/service-ServiceFdnt-csar.csar b/src/test/resources/csars/service-ServiceFdnt-csar.csar deleted file mode 100644 index 983dc9b..0000000 Binary files a/src/test/resources/csars/service-ServiceFdnt-csar.csar and /dev/null differ diff --git a/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar b/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar new file mode 100644 index 0000000..70f8cc4 Binary files /dev/null and b/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar differ diff --git a/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar.csar b/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar.csar deleted file mode 100644 index 70f8cc4..0000000 Binary files a/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar.csar and /dev/null differ -- cgit 1.2.3-korg From f3128e7e128235c078e393881556fb5b6e89e19a Mon Sep 17 00:00:00 2001 From: Tal Gitelman Date: Mon, 15 Jan 2018 17:03:47 +0200 Subject: fix licensing issues Change-Id: I5d4457d913d6530a0abe741d390638d21835785f Issue-ID: SDC-928 Signed-off-by: Tal Gitelman --- .../resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar | Bin 44576 -> 43627 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar b/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar index 70f8cc4..194fabb 100644 Binary files a/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar and b/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar differ -- cgit 1.2.3-korg From 929374488fa036b9c60c0ab5e9e9f83ccf931955 Mon Sep 17 00:00:00 2001 From: Tal Gitelman Date: Tue, 16 Jan 2018 17:57:49 +0200 Subject: Review security issues: sdc-jtosca Change-Id: I79c883357e02b892395904305a38e12ad77f71cd Issue-ID: SDC-809 Signed-off-by: Tal Gitelman --- pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/pom.xml b/pom.xml index 5d062d7..2b2f8bb 100644 --- a/pom.xml +++ b/pom.xml @@ -67,6 +67,7 @@ com.opencsv opencsv 3.10 + test -- cgit 1.2.3-korg From e614317c7147947441b31e773604a6cb6ddfe059 Mon Sep 17 00:00:00 2001 From: Tal Gitelman Date: Thu, 18 Jan 2018 14:08:29 +0200 Subject: Review security issues: sdc-jtosca Change-Id: Ia8fd505b61a95cecd0607ac5b6e65e04a576cbfb Issue-ID: SDC-809 Signed-off-by: Tal Gitelman --- pom.xml | 2 +- version.properties | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 2b2f8bb..a6afeee 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.openecomp.sdc.jtosca jtosca - 1.2.0-SNAPSHOT + 1.2.1-SNAPSHOT sdc-jtosca diff --git a/version.properties b/version.properties index 8471f55..10a6323 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=2 -patch=0 +patch=1 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 1537e926aa1b994b07f9587b699c143a9bb3da20 Mon Sep 17 00:00:00 2001 From: PriyanshuAgarwal Date: Tue, 6 Feb 2018 11:22:30 +0530 Subject: Initialize metaProperties in JTosca. To enable SDC Parser to parse individual Yamls. Change-Id: I94035dc0d2bd58fc192df7dc2f572cef5181b2a6 Issue-ID: SDC-999 Signed-off-by: priyanshu --- src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java index 8d7d130..4c19be6 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java @@ -136,6 +136,7 @@ public class ToscaTemplate extends Object { nestedToscaTplsWithTopology = new ConcurrentHashMap<>(); nestedToscaTemplatesWithTopology = new ArrayList(); resolveGetInput = _resolveGetInput; + metaProperties = new LinkedHashMap<>(); if(_path != null && !_path.isEmpty()) { // save the original input path -- cgit 1.2.3-korg From c181d7863fc1cf62d651e4ff09d9e52828b7f921 Mon Sep 17 00:00:00 2001 From: PriyanshuAgarwal Date: Tue, 6 Feb 2018 11:22:30 +0530 Subject: Import feature is ignoring multiple imports. Merged SDC-666 and SDC-668 as both are dependent. Change-Id: Idd4f67724d03bad79bab4a39b75a8145658ef8b9 Issue-ID: SDC-666 Signed-off-by: priyanshu --- .../sdc/toscaparser/api/ImportsLoader.java | 73 ++++--- .../sdc/toscaparser/api/ToscaTemplate.java | 212 +++++++++++++++++++-- .../sdc/toscaparser/api/JToscaImportTest.java | 64 +++++++ .../resources/csars/resource-Spgw-csar-ZTE.csar | Bin 0 -> 31639 bytes src/test/resources/csars/sdc-onboarding_csar.csar | Bin 0 -> 80596 bytes 5 files changed, 306 insertions(+), 43 deletions(-) create mode 100644 src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java create mode 100644 src/test/resources/csars/resource-Spgw-csar-ZTE.csar create mode 100644 src/test/resources/csars/sdc-onboarding_csar.csar diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java index 5e94378..b2a0da7 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java @@ -28,6 +28,7 @@ public class ImportsLoader { private ArrayList typeDefinitionList; private LinkedHashMap customDefs; + private LinkedHashMap allCustomDefs; private ArrayList> nestedToscaTpls; private LinkedHashMap repositories; @@ -39,6 +40,7 @@ public class ImportsLoader { this.importslist = _importslist; customDefs = new LinkedHashMap(); + allCustomDefs = new LinkedHashMap(); nestedToscaTpls = new ArrayList>(); if((_path == null || _path.isEmpty()) && tpl == null) { //msg = _('Input tosca template is not provided.') @@ -65,7 +67,7 @@ public class ImportsLoader { } public LinkedHashMap getCustomDefs() { - return customDefs; + return allCustomDefs; } public ArrayList> getNestedToscaTpls() { @@ -131,33 +133,50 @@ public class ImportsLoader { } } - @SuppressWarnings("unchecked") + /** + * This method is used to get consolidated custom definitions by passing custom Types from + * each import. The resultant collection is then passed back which contains all import + * definitions + * + * @param customType the custom type + * @param namespacePrefix the namespace prefix + */ + @SuppressWarnings("unchecked") private void _updateCustomDefs(LinkedHashMap customType, String namespacePrefix) { - LinkedHashMap outerCustomTypes;// = new LinkedHashMap(); - for(String typeDef: typeDefinitionList) { - if(typeDef.equals("imports")) { - // imports are ArrayList... - customDefs.put("imports",(ArrayList)customType.get(typeDef)); - } - else { - outerCustomTypes = (LinkedHashMap)customType.get(typeDef); - if(outerCustomTypes != null) { - if(namespacePrefix != null && !namespacePrefix.isEmpty()) { - LinkedHashMap prefixCustomTypes = new LinkedHashMap(); - for(Map.Entry me: outerCustomTypes.entrySet()) { - String typeDefKey = me.getKey(); - String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey; - prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey)); - } - customDefs.putAll(prefixCustomTypes); - } - else { - customDefs.putAll(outerCustomTypes); - } - } - } - } - } + LinkedHashMap outerCustomTypes; + for(String typeDef: typeDefinitionList) { + if(typeDef.equals("imports")) { + customDefs.put("imports", customType.get(typeDef)); + if (allCustomDefs.isEmpty() || allCustomDefs.get("imports") == null){ + allCustomDefs.put("imports",customType.get(typeDef)); + } + else if (customType.get(typeDef) != null){ + Set allCustomImports = new HashSet<>((ArrayList)allCustomDefs.get("imports")); + allCustomImports.addAll((ArrayList) customType.get(typeDef)); + allCustomDefs.put("imports", new ArrayList<>(allCustomImports)); + } + } + else { + outerCustomTypes = (LinkedHashMap)customType.get(typeDef); + if(outerCustomTypes != null) { + if(namespacePrefix != null && !namespacePrefix.isEmpty()) { + LinkedHashMap prefixCustomTypes = new LinkedHashMap(); + for(Map.Entry me: outerCustomTypes.entrySet()) { + String typeDefKey = me.getKey(); + String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey; + prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey)); + } + customDefs.putAll(prefixCustomTypes); + allCustomDefs.putAll(prefixCustomTypes); + } + else { + customDefs.putAll(outerCustomTypes); + allCustomDefs.putAll(outerCustomTypes); + } + } + } + } + } private void _updateNestedToscaTpls(String fullFileName,LinkedHashMap customTpl) { if(fullFileName != null && customTpl != null) { diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java index 4c19be6..e96ca56 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java @@ -9,6 +9,9 @@ import java.io.IOException; import java.io.InputStream; import java.util.*; import java.util.concurrent.ConcurrentHashMap; +import java.nio.file.Files; +import java.util.function.Predicate; +import java.nio.file.Paths; import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector; import org.openecomp.sdc.toscaparser.api.common.JToscaException; @@ -70,6 +73,7 @@ public class ToscaTemplate extends Object { private boolean isFile; private String path; private String inputPath; + private String rootPath; private LinkedHashMap parsedParams; private boolean resolveGetInput; private LinkedHashMap tpl; @@ -91,6 +95,7 @@ public class ToscaTemplate extends Object { private String csarTempDir; private int nestingLoopCounter; private LinkedHashMap> metaProperties; + private Set processedImports; public ToscaTemplate(String _path, LinkedHashMap _parsedParams, @@ -193,6 +198,9 @@ public class ToscaTemplate extends Object { if(tpl != null) { parsedParams = _parsedParams; _validateField(); + this.rootPath = path; + this.processedImports = new HashSet(); + this.imports = _tplImports(); this.version = _tplVersion(); this.metaData = _tplMetaData(); this.relationshipTypes = _tplRelationshipTypes(); @@ -305,30 +313,200 @@ public class ToscaTemplate extends Object { private ArrayList _policies() { return topologyTemplate.getPolicies(); } - - private LinkedHashMap _getAllCustomDefs(ArrayList alImports) { - + + /** + * This method is used to get consolidated custom definitions from all imports + * It is logically divided in two parts to handle imports; map and list formats. + * Before processing the imports; it sorts them to make sure the current directory imports are + * being processed first and then others. Once sorted; it processes each import one by one in + * recursive manner. + * To avoid cyclic dependency among imports; this method uses a set to keep track of all + * imports which are already processed and filters the imports which occurs more than once. + * + * @param alImports all imports which needs to be processed + * @return the linked hash map containing all import definitions + */ + private LinkedHashMap _getAllCustomDefs(Object alImports) { + String types[] = { - IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, - DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES + IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, + DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES }; - LinkedHashMap customDefsFinal = new LinkedHashMap(); - LinkedHashMap customDefs = _getCustomTypes(types,alImports); - if(customDefs != null) { - customDefsFinal.putAll(customDefs); - if(customDefs.get(IMPORTS) != null) { - @SuppressWarnings("unchecked") - LinkedHashMap importDefs = _getAllCustomDefs((ArrayList)customDefs.get(IMPORTS)); - customDefsFinal.putAll(importDefs); + LinkedHashMap customDefsFinal = new LinkedHashMap<>(); + + List> imports = (List>) alImports; + if (imports != null && !imports.isEmpty()) { + if (imports.get(0) instanceof LinkedHashMap) { + imports = sortImports(imports); + + for (Map map : imports) { + List> singleImportList = new ArrayList(); + singleImportList.add(map); + + Map importNameDetails = getValidFileNameForImportReference(singleImportList); + singleImportList = filterImportsForRecursion(singleImportList, importNameDetails); + + if(!singleImportList.get(0).isEmpty()){ + LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(singleImportList)); + processedImports.add(importNameDetails.get("importFileName")); + + if (customDefs != null) { + customDefsFinal.putAll(customDefs); + + if (customDefs.get(IMPORTS) != null) { + resetPathForRecursiveImports(importNameDetails.get("importRelativeName")); + LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); + customDefsFinal.putAll(importDefs); + } + } + } + } + } else { + LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(imports)); + if (customDefs != null) { + customDefsFinal.putAll(customDefs); + + if (customDefs.get(IMPORTS) != null) { + LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); + customDefsFinal.putAll(importDefs); + } + } } } - - // As imports are not custom_types, remove from the dict - customDefsFinal.remove(IMPORTS); + + // As imports are not custom_types, remove from the dict + customDefsFinal.remove(IMPORTS); return customDefsFinal; } + /** + * This method is used to sort the imports in order so that same directory + * imports will be processed first + * + * @param customImports the custom imports + * @return the sorted list of imports + */ + private List> sortImports(List> customImports){ + List> finalList1 = new ArrayList<>(); + List> finalList2 = new ArrayList<>(); + Iterator> itr = customImports.iterator(); + while(itr.hasNext()) { + Map innerMap = itr.next(); + if (innerMap.toString().contains("../")) { + finalList2.add(innerMap); + itr.remove(); + } + else if (innerMap.toString().contains("/")) { + finalList1.add(innerMap); + itr.remove(); + } + } + + customImports.addAll(finalList1); + customImports.addAll(finalList2); + return customImports; + } + + /** + * This method is used to reset PATH variable after processing of current import file is done + * This is required because of relative path nature of imports present in files. + * + * @param currImportRelativeName the current import relative name + */ + private void resetPathForRecursiveImports(String currImportRelativeName){ + path = getPath(path, currImportRelativeName); + } + + /** + * This is a recursive method which starts from current import and then recursively finds a + * valid path relative to current import file name. + * By doing this it handles all nested hierarchy of imports defined in CSARs + * + * @param path the path + * @param importFileName the import file name + * @return the string containing updated path value + */ + private String getPath(String path, String importFileName){ + String tempFullPath = (Paths.get(path).toAbsolutePath().getParent() + .toString() + File.separator + importFileName.replace("../", "")).replace('\\', '/'); + String tempPartialPath = (Paths.get(path).toAbsolutePath().getParent().toString()).replace('\\', '/'); + if(Files.exists(Paths.get(tempFullPath))) + return tempFullPath; + else + return getPath(tempPartialPath, importFileName); + } + + /** + * This method is used to get full path name for the file which needs to be processed. It helps + * in situation where files are present in different directory and are references as relative + * paths. + * + * @param customImports the custom imports + * @return the map containing import file full and relative paths + */ + private Map getValidFileNameForImportReference(List> + customImports){ + String importFileName; + Map retMap = new HashMap<>(); + for (Map map1 : customImports) { + for (Map.Entry entry : map1.entrySet()) { + Map innerMostMap = (Map) entry.getValue(); + Iterator> it = innerMostMap.entrySet().iterator(); + while (it.hasNext()) { + Map.Entry val = it.next(); + if(val.getValue().contains("/")){ + importFileName = (Paths.get(rootPath).toAbsolutePath().getParent().toString() + File + .separator + val.getValue().replace("../", "")).replace('\\', '/'); + } + else { + importFileName = (Paths.get(path).toAbsolutePath().getParent().toString() + File + .separator + val.getValue().replace("../", "")).replace('\\', '/'); + } + retMap.put("importFileName", importFileName); + retMap.put("importRelativeName", val.getValue()); + } + } + } + return retMap; + } + + /** + * This method is used to filter the imports which already gets processed in previous step. + * It handles the use case of cyclic dependency in imports which may cause Stack Overflow + * exception + * + * @param customImports the custom imports + * @param importNameDetails the import name details + * @return the list containing filtered imports + */ + private List> filterImportsForRecursion(List> + customImports, Map importNameDetails){ + for (Map map1 : customImports) { + for (Map.Entry entry : map1.entrySet()) { + Map innerMostMap = (Map) entry.getValue(); + Iterator> it = innerMostMap.entrySet().iterator(); + while (it.hasNext()) { + it.next(); + if (processedImports.contains(importNameDetails.get("importFileName"))) { + it.remove(); + } + } + } + } + + // Remove Empty elements + Iterator> itr = customImports.iterator(); + while(itr.hasNext()) { + Map innerMap = itr.next(); + Predicate predicate = p-> p.values().isEmpty(); + innerMap.values().removeIf(predicate); + } + + return customImports; + } + @SuppressWarnings("unchecked") private LinkedHashMap _getCustomTypes(Object typeDefinitions,ArrayList alImports) { @@ -396,6 +574,8 @@ public class ToscaTemplate extends Object { log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting"); return; } + // Reset Processed Imports for nested templates + this.processedImports = new HashSet<>(); for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { String fname = me.getKey(); LinkedHashMap toscaTpl = diff --git a/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java b/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java new file mode 100644 index 0000000..c8a30fa --- /dev/null +++ b/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java @@ -0,0 +1,64 @@ +package org.openecomp.sdc.toscaparser.api; + +import org.junit.Test; +import org.openecomp.sdc.toscaparser.api.common.JToscaException; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.io.File; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import static org.junit.Assert.assertEquals; + +public class JToscaImportTest { + + @Test + public void testNoMissingTypeValidationError() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource + ("csars/sdc-onboarding_csar.csar").getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List missingTypeErrors = ThreadLocalsHolder.getCollector() + .getValidationIssueReport() + .stream() + .filter(s -> s.contains("JE136")) + .collect(Collectors.toList()); + assertEquals(0, missingTypeErrors.size()); + } + + @Test + public void testNoStackOverFlowError() { + Exception jte = null; + try { + String fileStr = JToscaImportTest.class.getClassLoader().getResource + ("csars/sdc-onboarding_csar.csar").getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + } catch (Exception e){ + jte = e; + } + assertEquals(null, jte); + } + + @Test + public void testNoInvalidImports() throws JToscaException { + List fileNames = new ArrayList<>(); + fileNames.add("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); + fileNames.add("csars/sdc-onboarding_csar.csar"); + fileNames.add("csars/resource-Spgw-csar-ZTE.csar"); + + for (String fileName : fileNames) { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(fileName).getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List invalidImportErrors = ThreadLocalsHolder.getCollector() + .getValidationIssueReport() + .stream() + .filter(s -> s.contains("JE195")) + .collect(Collectors.toList()); + assertEquals(0, invalidImportErrors.size()); + } + } + +} diff --git a/src/test/resources/csars/resource-Spgw-csar-ZTE.csar b/src/test/resources/csars/resource-Spgw-csar-ZTE.csar new file mode 100644 index 0000000..58c3ddd Binary files /dev/null and b/src/test/resources/csars/resource-Spgw-csar-ZTE.csar differ diff --git a/src/test/resources/csars/sdc-onboarding_csar.csar b/src/test/resources/csars/sdc-onboarding_csar.csar new file mode 100644 index 0000000..e1c3267 Binary files /dev/null and b/src/test/resources/csars/sdc-onboarding_csar.csar differ -- cgit 1.2.3-korg From 224c7e15ed15cb7fd7250c8428e66b3c9b5cb0b1 Mon Sep 17 00:00:00 2001 From: Michael Lando Date: Thu, 15 Feb 2018 19:28:02 +0200 Subject: update year Change-Id: I12b01b335b70bcf368a5f4687a83999d55de652c Issue-ID: SDC-1046 Signed-off-by: Michael Lando --- LICENSE.TXT | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/LICENSE.TXT b/LICENSE.TXT index 724329f..2b91311 100644 --- a/LICENSE.TXT +++ b/LICENSE.TXT @@ -1,8 +1,8 @@ -/* +/* * ============LICENSE_START========================================== * =================================================================== -* Copyright © 2017 AT&T Intellectual Property. -* Copyright © 2017 Amdocs +* Copyright © 2018 AT&T Intellectual Property. +* Copyright © 2018 Amdocs * All rights reserved. * =================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); -- cgit 1.2.3-korg From 5e7130be2350a29d8dbc65f52cdca596a188114f Mon Sep 17 00:00:00 2001 From: Natalia Sheshukov Date: Thu, 22 Feb 2018 16:40:51 +0200 Subject: [367741] vLAN Tagging - Support Tosca Policies Change-Id: Icb0739eaf5abd071c45b4d7c49d7b412e10c8c4d Issue-ID: SDC-1056 Signed-off-by: Natalia Sheshukov --- LICENSE.TXT | 6 +- pom.xml | 2 +- .../sdc/toscaparser/api/ImportsLoader.java | 73 +++---- .../org/openecomp/sdc/toscaparser/api/Policy.java | 6 + .../sdc/toscaparser/api/ToscaTemplate.java | 216 ++------------------- .../sdc/toscaparser/api/JToscaImportTest.java | 64 ------ .../resources/csars/resource-Spgw-csar-ZTE.csar | Bin 31639 -> 0 bytes src/test/resources/csars/sdc-onboarding_csar.csar | Bin 80596 -> 0 bytes version.properties | 2 +- 9 files changed, 58 insertions(+), 311 deletions(-) delete mode 100644 src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java delete mode 100644 src/test/resources/csars/resource-Spgw-csar-ZTE.csar delete mode 100644 src/test/resources/csars/sdc-onboarding_csar.csar diff --git a/LICENSE.TXT b/LICENSE.TXT index 2b91311..724329f 100644 --- a/LICENSE.TXT +++ b/LICENSE.TXT @@ -1,8 +1,8 @@ -/* +/* * ============LICENSE_START========================================== * =================================================================== -* Copyright © 2018 AT&T Intellectual Property. -* Copyright © 2018 Amdocs +* Copyright © 2017 AT&T Intellectual Property. +* Copyright © 2017 Amdocs * All rights reserved. * =================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/pom.xml b/pom.xml index a6afeee..63da98a 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.openecomp.sdc.jtosca jtosca - 1.2.1-SNAPSHOT + 1.2.2-SNAPSHOT sdc-jtosca diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java index b2a0da7..5e94378 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java @@ -28,7 +28,6 @@ public class ImportsLoader { private ArrayList typeDefinitionList; private LinkedHashMap customDefs; - private LinkedHashMap allCustomDefs; private ArrayList> nestedToscaTpls; private LinkedHashMap repositories; @@ -40,7 +39,6 @@ public class ImportsLoader { this.importslist = _importslist; customDefs = new LinkedHashMap(); - allCustomDefs = new LinkedHashMap(); nestedToscaTpls = new ArrayList>(); if((_path == null || _path.isEmpty()) && tpl == null) { //msg = _('Input tosca template is not provided.') @@ -67,7 +65,7 @@ public class ImportsLoader { } public LinkedHashMap getCustomDefs() { - return allCustomDefs; + return customDefs; } public ArrayList> getNestedToscaTpls() { @@ -133,50 +131,33 @@ public class ImportsLoader { } } - /** - * This method is used to get consolidated custom definitions by passing custom Types from - * each import. The resultant collection is then passed back which contains all import - * definitions - * - * @param customType the custom type - * @param namespacePrefix the namespace prefix - */ - @SuppressWarnings("unchecked") + @SuppressWarnings("unchecked") private void _updateCustomDefs(LinkedHashMap customType, String namespacePrefix) { - LinkedHashMap outerCustomTypes; - for(String typeDef: typeDefinitionList) { - if(typeDef.equals("imports")) { - customDefs.put("imports", customType.get(typeDef)); - if (allCustomDefs.isEmpty() || allCustomDefs.get("imports") == null){ - allCustomDefs.put("imports",customType.get(typeDef)); - } - else if (customType.get(typeDef) != null){ - Set allCustomImports = new HashSet<>((ArrayList)allCustomDefs.get("imports")); - allCustomImports.addAll((ArrayList) customType.get(typeDef)); - allCustomDefs.put("imports", new ArrayList<>(allCustomImports)); - } - } - else { - outerCustomTypes = (LinkedHashMap)customType.get(typeDef); - if(outerCustomTypes != null) { - if(namespacePrefix != null && !namespacePrefix.isEmpty()) { - LinkedHashMap prefixCustomTypes = new LinkedHashMap(); - for(Map.Entry me: outerCustomTypes.entrySet()) { - String typeDefKey = me.getKey(); - String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey; - prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey)); - } - customDefs.putAll(prefixCustomTypes); - allCustomDefs.putAll(prefixCustomTypes); - } - else { - customDefs.putAll(outerCustomTypes); - allCustomDefs.putAll(outerCustomTypes); - } - } - } - } - } + LinkedHashMap outerCustomTypes;// = new LinkedHashMap(); + for(String typeDef: typeDefinitionList) { + if(typeDef.equals("imports")) { + // imports are ArrayList... + customDefs.put("imports",(ArrayList)customType.get(typeDef)); + } + else { + outerCustomTypes = (LinkedHashMap)customType.get(typeDef); + if(outerCustomTypes != null) { + if(namespacePrefix != null && !namespacePrefix.isEmpty()) { + LinkedHashMap prefixCustomTypes = new LinkedHashMap(); + for(Map.Entry me: outerCustomTypes.entrySet()) { + String typeDefKey = me.getKey(); + String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey; + prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey)); + } + customDefs.putAll(prefixCustomTypes); + } + else { + customDefs.putAll(outerCustomTypes); + } + } + } + } + } private void _updateNestedToscaTpls(String fullFileName,LinkedHashMap customTpl) { if(fullFileName != null && customTpl != null) { diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java index 26805bd..1f536f8 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java @@ -119,6 +119,12 @@ public class Policy extends EntityTemplate { ", properties=" + properties + '}'; } + + public int compareTo(Policy other){ + if(this.equals(other)) + return 0; + return this.getName().compareTo(other.getName()) == 0 ? this.getType().compareTo(other.getType()) : this.getName().compareTo(other.getName()); + } } /*python diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java index e96ca56..7553414 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java @@ -9,9 +9,6 @@ import java.io.IOException; import java.io.InputStream; import java.util.*; import java.util.concurrent.ConcurrentHashMap; -import java.nio.file.Files; -import java.util.function.Predicate; -import java.nio.file.Paths; import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector; import org.openecomp.sdc.toscaparser.api.common.JToscaException; @@ -73,7 +70,6 @@ public class ToscaTemplate extends Object { private boolean isFile; private String path; private String inputPath; - private String rootPath; private LinkedHashMap parsedParams; private boolean resolveGetInput; private LinkedHashMap tpl; @@ -95,7 +91,6 @@ public class ToscaTemplate extends Object { private String csarTempDir; private int nestingLoopCounter; private LinkedHashMap> metaProperties; - private Set processedImports; public ToscaTemplate(String _path, LinkedHashMap _parsedParams, @@ -198,9 +193,6 @@ public class ToscaTemplate extends Object { if(tpl != null) { parsedParams = _parsedParams; _validateField(); - this.rootPath = path; - this.processedImports = new HashSet(); - this.imports = _tplImports(); this.version = _tplVersion(); this.metaData = _tplMetaData(); this.relationshipTypes = _tplRelationshipTypes(); @@ -313,200 +305,30 @@ public class ToscaTemplate extends Object { private ArrayList _policies() { return topologyTemplate.getPolicies(); } - - /** - * This method is used to get consolidated custom definitions from all imports - * It is logically divided in two parts to handle imports; map and list formats. - * Before processing the imports; it sorts them to make sure the current directory imports are - * being processed first and then others. Once sorted; it processes each import one by one in - * recursive manner. - * To avoid cyclic dependency among imports; this method uses a set to keep track of all - * imports which are already processed and filters the imports which occurs more than once. - * - * @param alImports all imports which needs to be processed - * @return the linked hash map containing all import definitions - */ - private LinkedHashMap _getAllCustomDefs(Object alImports) { - + + private LinkedHashMap _getAllCustomDefs(ArrayList alImports) { + String types[] = { - IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, - DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES + IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, + DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES }; - LinkedHashMap customDefsFinal = new LinkedHashMap<>(); - - List> imports = (List>) alImports; - if (imports != null && !imports.isEmpty()) { - if (imports.get(0) instanceof LinkedHashMap) { - imports = sortImports(imports); - - for (Map map : imports) { - List> singleImportList = new ArrayList(); - singleImportList.add(map); - - Map importNameDetails = getValidFileNameForImportReference(singleImportList); - singleImportList = filterImportsForRecursion(singleImportList, importNameDetails); - - if(!singleImportList.get(0).isEmpty()){ - LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(singleImportList)); - processedImports.add(importNameDetails.get("importFileName")); - - if (customDefs != null) { - customDefsFinal.putAll(customDefs); - - if (customDefs.get(IMPORTS) != null) { - resetPathForRecursiveImports(importNameDetails.get("importRelativeName")); - LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); - customDefsFinal.putAll(importDefs); - } - } - } - } - } else { - LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(imports)); - if (customDefs != null) { - customDefsFinal.putAll(customDefs); - - if (customDefs.get(IMPORTS) != null) { - LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); - customDefsFinal.putAll(importDefs); - } - } + LinkedHashMap customDefsFinal = new LinkedHashMap(); + LinkedHashMap customDefs = _getCustomTypes(types,alImports); + if(customDefs != null) { + customDefsFinal.putAll(customDefs); + if(customDefs.get(IMPORTS) != null) { + @SuppressWarnings("unchecked") + LinkedHashMap importDefs = _getAllCustomDefs((ArrayList)customDefs.get(IMPORTS)); + customDefsFinal.putAll(importDefs); } } - - // As imports are not custom_types, remove from the dict - customDefsFinal.remove(IMPORTS); + + // As imports are not custom_types, remove from the dict + customDefsFinal.remove(IMPORTS); return customDefsFinal; } - /** - * This method is used to sort the imports in order so that same directory - * imports will be processed first - * - * @param customImports the custom imports - * @return the sorted list of imports - */ - private List> sortImports(List> customImports){ - List> finalList1 = new ArrayList<>(); - List> finalList2 = new ArrayList<>(); - Iterator> itr = customImports.iterator(); - while(itr.hasNext()) { - Map innerMap = itr.next(); - if (innerMap.toString().contains("../")) { - finalList2.add(innerMap); - itr.remove(); - } - else if (innerMap.toString().contains("/")) { - finalList1.add(innerMap); - itr.remove(); - } - } - - customImports.addAll(finalList1); - customImports.addAll(finalList2); - return customImports; - } - - /** - * This method is used to reset PATH variable after processing of current import file is done - * This is required because of relative path nature of imports present in files. - * - * @param currImportRelativeName the current import relative name - */ - private void resetPathForRecursiveImports(String currImportRelativeName){ - path = getPath(path, currImportRelativeName); - } - - /** - * This is a recursive method which starts from current import and then recursively finds a - * valid path relative to current import file name. - * By doing this it handles all nested hierarchy of imports defined in CSARs - * - * @param path the path - * @param importFileName the import file name - * @return the string containing updated path value - */ - private String getPath(String path, String importFileName){ - String tempFullPath = (Paths.get(path).toAbsolutePath().getParent() - .toString() + File.separator + importFileName.replace("../", "")).replace('\\', '/'); - String tempPartialPath = (Paths.get(path).toAbsolutePath().getParent().toString()).replace('\\', '/'); - if(Files.exists(Paths.get(tempFullPath))) - return tempFullPath; - else - return getPath(tempPartialPath, importFileName); - } - - /** - * This method is used to get full path name for the file which needs to be processed. It helps - * in situation where files are present in different directory and are references as relative - * paths. - * - * @param customImports the custom imports - * @return the map containing import file full and relative paths - */ - private Map getValidFileNameForImportReference(List> - customImports){ - String importFileName; - Map retMap = new HashMap<>(); - for (Map map1 : customImports) { - for (Map.Entry entry : map1.entrySet()) { - Map innerMostMap = (Map) entry.getValue(); - Iterator> it = innerMostMap.entrySet().iterator(); - while (it.hasNext()) { - Map.Entry val = it.next(); - if(val.getValue().contains("/")){ - importFileName = (Paths.get(rootPath).toAbsolutePath().getParent().toString() + File - .separator + val.getValue().replace("../", "")).replace('\\', '/'); - } - else { - importFileName = (Paths.get(path).toAbsolutePath().getParent().toString() + File - .separator + val.getValue().replace("../", "")).replace('\\', '/'); - } - retMap.put("importFileName", importFileName); - retMap.put("importRelativeName", val.getValue()); - } - } - } - return retMap; - } - - /** - * This method is used to filter the imports which already gets processed in previous step. - * It handles the use case of cyclic dependency in imports which may cause Stack Overflow - * exception - * - * @param customImports the custom imports - * @param importNameDetails the import name details - * @return the list containing filtered imports - */ - private List> filterImportsForRecursion(List> - customImports, Map importNameDetails){ - for (Map map1 : customImports) { - for (Map.Entry entry : map1.entrySet()) { - Map innerMostMap = (Map) entry.getValue(); - Iterator> it = innerMostMap.entrySet().iterator(); - while (it.hasNext()) { - it.next(); - if (processedImports.contains(importNameDetails.get("importFileName"))) { - it.remove(); - } - } - } - } - - // Remove Empty elements - Iterator> itr = customImports.iterator(); - while(itr.hasNext()) { - Map innerMap = itr.next(); - Predicate predicate = p-> p.values().isEmpty(); - innerMap.values().removeIf(predicate); - } - - return customImports; - } - @SuppressWarnings("unchecked") private LinkedHashMap _getCustomTypes(Object typeDefinitions,ArrayList alImports) { @@ -574,8 +396,6 @@ public class ToscaTemplate extends Object { log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting"); return; } - // Reset Processed Imports for nested templates - this.processedImports = new HashSet<>(); for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { String fname = me.getKey(); LinkedHashMap toscaTpl = @@ -836,6 +656,10 @@ public class ToscaTemplate extends Object { return nestedToscaTemplatesWithTopology; } + public ConcurrentHashMap getNestedTopologyTemplates() { + return nestedToscaTplsWithTopology; + } + @Override public String toString() { return "ToscaTemplate{" + diff --git a/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java b/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java deleted file mode 100644 index c8a30fa..0000000 --- a/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java +++ /dev/null @@ -1,64 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import org.junit.Test; -import org.openecomp.sdc.toscaparser.api.common.JToscaException; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.io.File; -import java.util.ArrayList; -import java.util.List; -import java.util.stream.Collectors; - -import static org.junit.Assert.assertEquals; - -public class JToscaImportTest { - - @Test - public void testNoMissingTypeValidationError() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource - ("csars/sdc-onboarding_csar.csar").getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List missingTypeErrors = ThreadLocalsHolder.getCollector() - .getValidationIssueReport() - .stream() - .filter(s -> s.contains("JE136")) - .collect(Collectors.toList()); - assertEquals(0, missingTypeErrors.size()); - } - - @Test - public void testNoStackOverFlowError() { - Exception jte = null; - try { - String fileStr = JToscaImportTest.class.getClassLoader().getResource - ("csars/sdc-onboarding_csar.csar").getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - } catch (Exception e){ - jte = e; - } - assertEquals(null, jte); - } - - @Test - public void testNoInvalidImports() throws JToscaException { - List fileNames = new ArrayList<>(); - fileNames.add("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); - fileNames.add("csars/sdc-onboarding_csar.csar"); - fileNames.add("csars/resource-Spgw-csar-ZTE.csar"); - - for (String fileName : fileNames) { - String fileStr = JToscaImportTest.class.getClassLoader().getResource(fileName).getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List invalidImportErrors = ThreadLocalsHolder.getCollector() - .getValidationIssueReport() - .stream() - .filter(s -> s.contains("JE195")) - .collect(Collectors.toList()); - assertEquals(0, invalidImportErrors.size()); - } - } - -} diff --git a/src/test/resources/csars/resource-Spgw-csar-ZTE.csar b/src/test/resources/csars/resource-Spgw-csar-ZTE.csar deleted file mode 100644 index 58c3ddd..0000000 Binary files a/src/test/resources/csars/resource-Spgw-csar-ZTE.csar and /dev/null differ diff --git a/src/test/resources/csars/sdc-onboarding_csar.csar b/src/test/resources/csars/sdc-onboarding_csar.csar deleted file mode 100644 index e1c3267..0000000 Binary files a/src/test/resources/csars/sdc-onboarding_csar.csar and /dev/null differ diff --git a/version.properties b/version.properties index 10a6323..a6be0db 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=2 -patch=1 +patch=2 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 121a7a375d1cea2764dfc34d82f4e2f252099a33 Mon Sep 17 00:00:00 2001 From: Michael Lando Date: Tue, 27 Feb 2018 16:10:32 +0200 Subject: Revert "vLAN Tagging - Support Policies" This reverts commit 5e7130be2350a29d8dbc65f52cdca596a188114f. Issue-ID: SDC-1056 Change-Id: I8526a8965e2a69c9d9189e6628cbc36e92282228 Signed-off-by: Michael Lando --- LICENSE.TXT | 6 +- .../sdc/toscaparser/api/ImportsLoader.java | 73 ++++--- .../org/openecomp/sdc/toscaparser/api/Policy.java | 6 - .../sdc/toscaparser/api/ToscaTemplate.java | 216 +++++++++++++++++++-- .../sdc/toscaparser/api/JToscaImportTest.java | 64 ++++++ .../resources/csars/resource-Spgw-csar-ZTE.csar | Bin 0 -> 31639 bytes src/test/resources/csars/sdc-onboarding_csar.csar | Bin 0 -> 80596 bytes 7 files changed, 309 insertions(+), 56 deletions(-) create mode 100644 src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java create mode 100644 src/test/resources/csars/resource-Spgw-csar-ZTE.csar create mode 100644 src/test/resources/csars/sdc-onboarding_csar.csar diff --git a/LICENSE.TXT b/LICENSE.TXT index 724329f..2b91311 100644 --- a/LICENSE.TXT +++ b/LICENSE.TXT @@ -1,8 +1,8 @@ -/* +/* * ============LICENSE_START========================================== * =================================================================== -* Copyright © 2017 AT&T Intellectual Property. -* Copyright © 2017 Amdocs +* Copyright © 2018 AT&T Intellectual Property. +* Copyright © 2018 Amdocs * All rights reserved. * =================================================================== * Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java index 5e94378..b2a0da7 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java @@ -28,6 +28,7 @@ public class ImportsLoader { private ArrayList typeDefinitionList; private LinkedHashMap customDefs; + private LinkedHashMap allCustomDefs; private ArrayList> nestedToscaTpls; private LinkedHashMap repositories; @@ -39,6 +40,7 @@ public class ImportsLoader { this.importslist = _importslist; customDefs = new LinkedHashMap(); + allCustomDefs = new LinkedHashMap(); nestedToscaTpls = new ArrayList>(); if((_path == null || _path.isEmpty()) && tpl == null) { //msg = _('Input tosca template is not provided.') @@ -65,7 +67,7 @@ public class ImportsLoader { } public LinkedHashMap getCustomDefs() { - return customDefs; + return allCustomDefs; } public ArrayList> getNestedToscaTpls() { @@ -131,33 +133,50 @@ public class ImportsLoader { } } - @SuppressWarnings("unchecked") + /** + * This method is used to get consolidated custom definitions by passing custom Types from + * each import. The resultant collection is then passed back which contains all import + * definitions + * + * @param customType the custom type + * @param namespacePrefix the namespace prefix + */ + @SuppressWarnings("unchecked") private void _updateCustomDefs(LinkedHashMap customType, String namespacePrefix) { - LinkedHashMap outerCustomTypes;// = new LinkedHashMap(); - for(String typeDef: typeDefinitionList) { - if(typeDef.equals("imports")) { - // imports are ArrayList... - customDefs.put("imports",(ArrayList)customType.get(typeDef)); - } - else { - outerCustomTypes = (LinkedHashMap)customType.get(typeDef); - if(outerCustomTypes != null) { - if(namespacePrefix != null && !namespacePrefix.isEmpty()) { - LinkedHashMap prefixCustomTypes = new LinkedHashMap(); - for(Map.Entry me: outerCustomTypes.entrySet()) { - String typeDefKey = me.getKey(); - String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey; - prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey)); - } - customDefs.putAll(prefixCustomTypes); - } - else { - customDefs.putAll(outerCustomTypes); - } - } - } - } - } + LinkedHashMap outerCustomTypes; + for(String typeDef: typeDefinitionList) { + if(typeDef.equals("imports")) { + customDefs.put("imports", customType.get(typeDef)); + if (allCustomDefs.isEmpty() || allCustomDefs.get("imports") == null){ + allCustomDefs.put("imports",customType.get(typeDef)); + } + else if (customType.get(typeDef) != null){ + Set allCustomImports = new HashSet<>((ArrayList)allCustomDefs.get("imports")); + allCustomImports.addAll((ArrayList) customType.get(typeDef)); + allCustomDefs.put("imports", new ArrayList<>(allCustomImports)); + } + } + else { + outerCustomTypes = (LinkedHashMap)customType.get(typeDef); + if(outerCustomTypes != null) { + if(namespacePrefix != null && !namespacePrefix.isEmpty()) { + LinkedHashMap prefixCustomTypes = new LinkedHashMap(); + for(Map.Entry me: outerCustomTypes.entrySet()) { + String typeDefKey = me.getKey(); + String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey; + prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey)); + } + customDefs.putAll(prefixCustomTypes); + allCustomDefs.putAll(prefixCustomTypes); + } + else { + customDefs.putAll(outerCustomTypes); + allCustomDefs.putAll(outerCustomTypes); + } + } + } + } + } private void _updateNestedToscaTpls(String fullFileName,LinkedHashMap customTpl) { if(fullFileName != null && customTpl != null) { diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java index 1f536f8..26805bd 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java @@ -119,12 +119,6 @@ public class Policy extends EntityTemplate { ", properties=" + properties + '}'; } - - public int compareTo(Policy other){ - if(this.equals(other)) - return 0; - return this.getName().compareTo(other.getName()) == 0 ? this.getType().compareTo(other.getType()) : this.getName().compareTo(other.getName()); - } } /*python diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java index 7553414..e96ca56 100644 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java @@ -9,6 +9,9 @@ import java.io.IOException; import java.io.InputStream; import java.util.*; import java.util.concurrent.ConcurrentHashMap; +import java.nio.file.Files; +import java.util.function.Predicate; +import java.nio.file.Paths; import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector; import org.openecomp.sdc.toscaparser.api.common.JToscaException; @@ -70,6 +73,7 @@ public class ToscaTemplate extends Object { private boolean isFile; private String path; private String inputPath; + private String rootPath; private LinkedHashMap parsedParams; private boolean resolveGetInput; private LinkedHashMap tpl; @@ -91,6 +95,7 @@ public class ToscaTemplate extends Object { private String csarTempDir; private int nestingLoopCounter; private LinkedHashMap> metaProperties; + private Set processedImports; public ToscaTemplate(String _path, LinkedHashMap _parsedParams, @@ -193,6 +198,9 @@ public class ToscaTemplate extends Object { if(tpl != null) { parsedParams = _parsedParams; _validateField(); + this.rootPath = path; + this.processedImports = new HashSet(); + this.imports = _tplImports(); this.version = _tplVersion(); this.metaData = _tplMetaData(); this.relationshipTypes = _tplRelationshipTypes(); @@ -305,30 +313,200 @@ public class ToscaTemplate extends Object { private ArrayList _policies() { return topologyTemplate.getPolicies(); } - - private LinkedHashMap _getAllCustomDefs(ArrayList alImports) { - + + /** + * This method is used to get consolidated custom definitions from all imports + * It is logically divided in two parts to handle imports; map and list formats. + * Before processing the imports; it sorts them to make sure the current directory imports are + * being processed first and then others. Once sorted; it processes each import one by one in + * recursive manner. + * To avoid cyclic dependency among imports; this method uses a set to keep track of all + * imports which are already processed and filters the imports which occurs more than once. + * + * @param alImports all imports which needs to be processed + * @return the linked hash map containing all import definitions + */ + private LinkedHashMap _getAllCustomDefs(Object alImports) { + String types[] = { - IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, - DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES + IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, + DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES }; - LinkedHashMap customDefsFinal = new LinkedHashMap(); - LinkedHashMap customDefs = _getCustomTypes(types,alImports); - if(customDefs != null) { - customDefsFinal.putAll(customDefs); - if(customDefs.get(IMPORTS) != null) { - @SuppressWarnings("unchecked") - LinkedHashMap importDefs = _getAllCustomDefs((ArrayList)customDefs.get(IMPORTS)); - customDefsFinal.putAll(importDefs); + LinkedHashMap customDefsFinal = new LinkedHashMap<>(); + + List> imports = (List>) alImports; + if (imports != null && !imports.isEmpty()) { + if (imports.get(0) instanceof LinkedHashMap) { + imports = sortImports(imports); + + for (Map map : imports) { + List> singleImportList = new ArrayList(); + singleImportList.add(map); + + Map importNameDetails = getValidFileNameForImportReference(singleImportList); + singleImportList = filterImportsForRecursion(singleImportList, importNameDetails); + + if(!singleImportList.get(0).isEmpty()){ + LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(singleImportList)); + processedImports.add(importNameDetails.get("importFileName")); + + if (customDefs != null) { + customDefsFinal.putAll(customDefs); + + if (customDefs.get(IMPORTS) != null) { + resetPathForRecursiveImports(importNameDetails.get("importRelativeName")); + LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); + customDefsFinal.putAll(importDefs); + } + } + } + } + } else { + LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(imports)); + if (customDefs != null) { + customDefsFinal.putAll(customDefs); + + if (customDefs.get(IMPORTS) != null) { + LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); + customDefsFinal.putAll(importDefs); + } + } } } - - // As imports are not custom_types, remove from the dict - customDefsFinal.remove(IMPORTS); + + // As imports are not custom_types, remove from the dict + customDefsFinal.remove(IMPORTS); return customDefsFinal; } + /** + * This method is used to sort the imports in order so that same directory + * imports will be processed first + * + * @param customImports the custom imports + * @return the sorted list of imports + */ + private List> sortImports(List> customImports){ + List> finalList1 = new ArrayList<>(); + List> finalList2 = new ArrayList<>(); + Iterator> itr = customImports.iterator(); + while(itr.hasNext()) { + Map innerMap = itr.next(); + if (innerMap.toString().contains("../")) { + finalList2.add(innerMap); + itr.remove(); + } + else if (innerMap.toString().contains("/")) { + finalList1.add(innerMap); + itr.remove(); + } + } + + customImports.addAll(finalList1); + customImports.addAll(finalList2); + return customImports; + } + + /** + * This method is used to reset PATH variable after processing of current import file is done + * This is required because of relative path nature of imports present in files. + * + * @param currImportRelativeName the current import relative name + */ + private void resetPathForRecursiveImports(String currImportRelativeName){ + path = getPath(path, currImportRelativeName); + } + + /** + * This is a recursive method which starts from current import and then recursively finds a + * valid path relative to current import file name. + * By doing this it handles all nested hierarchy of imports defined in CSARs + * + * @param path the path + * @param importFileName the import file name + * @return the string containing updated path value + */ + private String getPath(String path, String importFileName){ + String tempFullPath = (Paths.get(path).toAbsolutePath().getParent() + .toString() + File.separator + importFileName.replace("../", "")).replace('\\', '/'); + String tempPartialPath = (Paths.get(path).toAbsolutePath().getParent().toString()).replace('\\', '/'); + if(Files.exists(Paths.get(tempFullPath))) + return tempFullPath; + else + return getPath(tempPartialPath, importFileName); + } + + /** + * This method is used to get full path name for the file which needs to be processed. It helps + * in situation where files are present in different directory and are references as relative + * paths. + * + * @param customImports the custom imports + * @return the map containing import file full and relative paths + */ + private Map getValidFileNameForImportReference(List> + customImports){ + String importFileName; + Map retMap = new HashMap<>(); + for (Map map1 : customImports) { + for (Map.Entry entry : map1.entrySet()) { + Map innerMostMap = (Map) entry.getValue(); + Iterator> it = innerMostMap.entrySet().iterator(); + while (it.hasNext()) { + Map.Entry val = it.next(); + if(val.getValue().contains("/")){ + importFileName = (Paths.get(rootPath).toAbsolutePath().getParent().toString() + File + .separator + val.getValue().replace("../", "")).replace('\\', '/'); + } + else { + importFileName = (Paths.get(path).toAbsolutePath().getParent().toString() + File + .separator + val.getValue().replace("../", "")).replace('\\', '/'); + } + retMap.put("importFileName", importFileName); + retMap.put("importRelativeName", val.getValue()); + } + } + } + return retMap; + } + + /** + * This method is used to filter the imports which already gets processed in previous step. + * It handles the use case of cyclic dependency in imports which may cause Stack Overflow + * exception + * + * @param customImports the custom imports + * @param importNameDetails the import name details + * @return the list containing filtered imports + */ + private List> filterImportsForRecursion(List> + customImports, Map importNameDetails){ + for (Map map1 : customImports) { + for (Map.Entry entry : map1.entrySet()) { + Map innerMostMap = (Map) entry.getValue(); + Iterator> it = innerMostMap.entrySet().iterator(); + while (it.hasNext()) { + it.next(); + if (processedImports.contains(importNameDetails.get("importFileName"))) { + it.remove(); + } + } + } + } + + // Remove Empty elements + Iterator> itr = customImports.iterator(); + while(itr.hasNext()) { + Map innerMap = itr.next(); + Predicate predicate = p-> p.values().isEmpty(); + innerMap.values().removeIf(predicate); + } + + return customImports; + } + @SuppressWarnings("unchecked") private LinkedHashMap _getCustomTypes(Object typeDefinitions,ArrayList alImports) { @@ -396,6 +574,8 @@ public class ToscaTemplate extends Object { log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting"); return; } + // Reset Processed Imports for nested templates + this.processedImports = new HashSet<>(); for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { String fname = me.getKey(); LinkedHashMap toscaTpl = @@ -656,10 +836,6 @@ public class ToscaTemplate extends Object { return nestedToscaTemplatesWithTopology; } - public ConcurrentHashMap getNestedTopologyTemplates() { - return nestedToscaTplsWithTopology; - } - @Override public String toString() { return "ToscaTemplate{" + diff --git a/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java b/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java new file mode 100644 index 0000000..c8a30fa --- /dev/null +++ b/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java @@ -0,0 +1,64 @@ +package org.openecomp.sdc.toscaparser.api; + +import org.junit.Test; +import org.openecomp.sdc.toscaparser.api.common.JToscaException; +import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.io.File; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import static org.junit.Assert.assertEquals; + +public class JToscaImportTest { + + @Test + public void testNoMissingTypeValidationError() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource + ("csars/sdc-onboarding_csar.csar").getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List missingTypeErrors = ThreadLocalsHolder.getCollector() + .getValidationIssueReport() + .stream() + .filter(s -> s.contains("JE136")) + .collect(Collectors.toList()); + assertEquals(0, missingTypeErrors.size()); + } + + @Test + public void testNoStackOverFlowError() { + Exception jte = null; + try { + String fileStr = JToscaImportTest.class.getClassLoader().getResource + ("csars/sdc-onboarding_csar.csar").getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + } catch (Exception e){ + jte = e; + } + assertEquals(null, jte); + } + + @Test + public void testNoInvalidImports() throws JToscaException { + List fileNames = new ArrayList<>(); + fileNames.add("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); + fileNames.add("csars/sdc-onboarding_csar.csar"); + fileNames.add("csars/resource-Spgw-csar-ZTE.csar"); + + for (String fileName : fileNames) { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(fileName).getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List invalidImportErrors = ThreadLocalsHolder.getCollector() + .getValidationIssueReport() + .stream() + .filter(s -> s.contains("JE195")) + .collect(Collectors.toList()); + assertEquals(0, invalidImportErrors.size()); + } + } + +} diff --git a/src/test/resources/csars/resource-Spgw-csar-ZTE.csar b/src/test/resources/csars/resource-Spgw-csar-ZTE.csar new file mode 100644 index 0000000..58c3ddd Binary files /dev/null and b/src/test/resources/csars/resource-Spgw-csar-ZTE.csar differ diff --git a/src/test/resources/csars/sdc-onboarding_csar.csar b/src/test/resources/csars/sdc-onboarding_csar.csar new file mode 100644 index 0000000..e1c3267 Binary files /dev/null and b/src/test/resources/csars/sdc-onboarding_csar.csar differ -- cgit 1.2.3-korg From 76bdc498d5e7a1e5515714fe042bf111f10d6c26 Mon Sep 17 00:00:00 2001 From: Yuli Shlosberg Date: Tue, 6 Mar 2018 17:51:15 +0200 Subject: update JTOSCA package names Change-Id: I8e9ed44a57521047c5c8218dfeef0a3193d570ee Issue-ID: SDC-950 Signed-off-by: Yuli Shlosberg --- README.md | 10 +- pom.xml | 6 +- .../sdc/toscaparser/api/CapabilityAssignment.java | 148 +++ .../sdc/toscaparser/api/CapabilityAssignments.java | 49 + .../org/onap/sdc/toscaparser/api/DataEntity.java | 449 ++++++++ .../onap/sdc/toscaparser/api/EntityTemplate.java | 851 ++++++++++++++ .../java/org/onap/sdc/toscaparser/api/Group.java | 137 +++ .../onap/sdc/toscaparser/api/ImportsLoader.java | 746 ++++++++++++ .../org/onap/sdc/toscaparser/api/NodeTemplate.java | 737 ++++++++++++ .../java/org/onap/sdc/toscaparser/api/Policy.java | 188 +++ .../org/onap/sdc/toscaparser/api/Property.java | 177 +++ .../sdc/toscaparser/api/RelationshipTemplate.java | 199 ++++ .../org/onap/sdc/toscaparser/api/Repository.java | 117 ++ .../sdc/toscaparser/api/RequirementAssignment.java | 85 ++ .../toscaparser/api/RequirementAssignments.java | 37 + .../sdc/toscaparser/api/SubstitutionMappings.java | 519 +++++++++ .../onap/sdc/toscaparser/api/TopologyTemplate.java | 858 ++++++++++++++ .../org/onap/sdc/toscaparser/api/ToscaGraph.java | 109 ++ .../onap/sdc/toscaparser/api/ToscaTemplate.java | 1200 ++++++++++++++++++++ .../org/onap/sdc/toscaparser/api/Triggers.java | 183 +++ .../onap/sdc/toscaparser/api/UnsupportedType.java | 78 ++ .../toscaparser/api/common/JToscaException.java | 27 + .../api/common/JToscaValidationIssue.java | 35 + .../sdc/toscaparser/api/common/TOSCAException.java | 39 + .../api/common/ValidationIssueCollector.java | 35 + .../toscaparser/api/elements/ArtifactTypeDef.java | 105 ++ .../sdc/toscaparser/api/elements/AttributeDef.java | 40 + .../api/elements/CapabilityTypeDef.java | 222 ++++ .../sdc/toscaparser/api/elements/DataType.java | 116 ++ .../sdc/toscaparser/api/elements/EntityType.java | 418 +++++++ .../sdc/toscaparser/api/elements/GroupType.java | 215 ++++ .../toscaparser/api/elements/InterfacesDef.java | 228 ++++ .../sdc/toscaparser/api/elements/Metadata.java | 41 + .../sdc/toscaparser/api/elements/NodeType.java | 525 +++++++++ .../sdc/toscaparser/api/elements/PolicyType.java | 291 +++++ .../sdc/toscaparser/api/elements/PortSpec.java | 160 +++ .../sdc/toscaparser/api/elements/PropertyDef.java | 231 ++++ .../toscaparser/api/elements/RelationshipType.java | 101 ++ .../sdc/toscaparser/api/elements/ScalarUnit.java | 262 +++++ .../api/elements/ScalarUnitFrequency.java | 14 + .../toscaparser/api/elements/ScalarUnitSize.java | 19 + .../toscaparser/api/elements/ScalarUnitTime.java | 17 + .../api/elements/StatefulEntityType.java | 218 ++++ .../toscaparser/api/elements/TypeValidation.java | 153 +++ .../api/elements/constraints/Constraint.java | 243 ++++ .../api/elements/constraints/Equal.java | 61 + .../api/elements/constraints/GreaterOrEqual.java | 114 ++ .../api/elements/constraints/GreaterThan.java | 103 ++ .../api/elements/constraints/InRange.java | 171 +++ .../api/elements/constraints/Length.java | 79 ++ .../api/elements/constraints/LessOrEqual.java | 107 ++ .../api/elements/constraints/LessThan.java | 104 ++ .../api/elements/constraints/MaxLength.java | 91 ++ .../api/elements/constraints/MinLength.java | 91 ++ .../api/elements/constraints/Pattern.java | 97 ++ .../api/elements/constraints/Schema.java | 278 +++++ .../api/elements/constraints/ValidValues.java | 84 ++ .../sdc/toscaparser/api/extensions/ExtTools.java | 192 ++++ .../onap/sdc/toscaparser/api/functions/Concat.java | 77 ++ .../sdc/toscaparser/api/functions/Function.java | 235 ++++ .../toscaparser/api/functions/GetAttribute.java | 524 +++++++++ .../sdc/toscaparser/api/functions/GetInput.java | 137 +++ .../api/functions/GetOperationOutput.java | 229 ++++ .../sdc/toscaparser/api/functions/GetProperty.java | 628 ++++++++++ .../onap/sdc/toscaparser/api/functions/Token.java | 111 ++ .../onap/sdc/toscaparser/api/parameters/Input.java | 233 ++++ .../sdc/toscaparser/api/parameters/Output.java | 110 ++ .../org/onap/sdc/toscaparser/api/prereq/CSAR.java | 785 +++++++++++++ .../onap/sdc/toscaparser/api/utils/CopyUtils.java | 29 + .../onap/sdc/toscaparser/api/utils/DumpUtils.java | 55 + .../toscaparser/api/utils/JToscaErrorCodes.java | 32 + .../api/utils/TOSCAVersionProperty.java | 182 +++ .../toscaparser/api/utils/ThreadLocalsHolder.java | 24 + .../onap/sdc/toscaparser/api/utils/UrlUtils.java | 123 ++ .../sdc/toscaparser/api/utils/ValidateUtils.java | 425 +++++++ .../sdc/toscaparser/api/CapabilityAssignment.java | 148 --- .../sdc/toscaparser/api/CapabilityAssignments.java | 51 - .../openecomp/sdc/toscaparser/api/DataEntity.java | 449 -------- .../sdc/toscaparser/api/EntityTemplate.java | 850 -------------- .../org/openecomp/sdc/toscaparser/api/Group.java | 138 --- .../sdc/toscaparser/api/ImportsLoader.java | 746 ------------ .../sdc/toscaparser/api/NodeTemplate.java | 737 ------------ .../org/openecomp/sdc/toscaparser/api/Policy.java | 188 --- .../openecomp/sdc/toscaparser/api/Property.java | 177 --- .../sdc/toscaparser/api/RelationshipTemplate.java | 199 ---- .../openecomp/sdc/toscaparser/api/Repository.java | 118 -- .../sdc/toscaparser/api/RequirementAssignment.java | 85 -- .../toscaparser/api/RequirementAssignments.java | 39 - .../sdc/toscaparser/api/SubstitutionMappings.java | 519 --------- .../sdc/toscaparser/api/TopologyTemplate.java | 858 -------------- .../openecomp/sdc/toscaparser/api/ToscaGraph.java | 109 -- .../sdc/toscaparser/api/ToscaTemplate.java | 1200 -------------------- .../openecomp/sdc/toscaparser/api/Triggers.java | 184 --- .../sdc/toscaparser/api/UnsupportedType.java | 79 -- .../toscaparser/api/common/JToscaException.java | 27 - .../api/common/JToscaValidationIssue.java | 35 - .../sdc/toscaparser/api/common/TOSCAException.java | 39 - .../api/common/ValidationIssueCollector.java | 35 - .../toscaparser/api/elements/ArtifactTypeDef.java | 105 -- .../sdc/toscaparser/api/elements/AttributeDef.java | 40 - .../api/elements/CapabilityTypeDef.java | 224 ---- .../sdc/toscaparser/api/elements/DataType.java | 116 -- .../sdc/toscaparser/api/elements/EntityType.java | 418 ------- .../sdc/toscaparser/api/elements/GroupType.java | 216 ---- .../toscaparser/api/elements/InterfacesDef.java | 228 ---- .../sdc/toscaparser/api/elements/Metadata.java | 41 - .../sdc/toscaparser/api/elements/NodeType.java | 525 --------- .../sdc/toscaparser/api/elements/PolicyType.java | 291 ----- .../sdc/toscaparser/api/elements/PortSpec.java | 161 --- .../sdc/toscaparser/api/elements/PropertyDef.java | 231 ---- .../toscaparser/api/elements/RelationshipType.java | 102 -- .../sdc/toscaparser/api/elements/ScalarUnit.java | 262 ----- .../api/elements/ScalarUnitFrequency.java | 14 - .../toscaparser/api/elements/ScalarUnitSize.java | 19 - .../toscaparser/api/elements/ScalarUnitTime.java | 17 - .../api/elements/StatefulEntityType.java | 218 ---- .../toscaparser/api/elements/TypeValidation.java | 153 --- .../api/elements/constraints/Constraint.java | 243 ---- .../api/elements/constraints/Equal.java | 61 - .../api/elements/constraints/GreaterOrEqual.java | 114 -- .../api/elements/constraints/GreaterThan.java | 103 -- .../api/elements/constraints/InRange.java | 172 --- .../api/elements/constraints/Length.java | 80 -- .../api/elements/constraints/LessOrEqual.java | 107 -- .../api/elements/constraints/LessThan.java | 105 -- .../api/elements/constraints/MaxLength.java | 91 -- .../api/elements/constraints/MinLength.java | 91 -- .../api/elements/constraints/Pattern.java | 97 -- .../api/elements/constraints/Schema.java | 279 ----- .../api/elements/constraints/Schema.java.orig | 281 ----- .../api/elements/constraints/ValidValues.java | 84 -- .../sdc/toscaparser/api/extensions/ExtTools.java | 192 ---- .../sdc/toscaparser/api/functions/Concat.java | 78 -- .../sdc/toscaparser/api/functions/Function.java | 236 ---- .../toscaparser/api/functions/GetAttribute.java | 523 --------- .../sdc/toscaparser/api/functions/GetInput.java | 137 --- .../api/functions/GetOperationOutput.java | 226 ---- .../sdc/toscaparser/api/functions/GetProperty.java | 627 ---------- .../sdc/toscaparser/api/functions/Token.java | 111 -- .../sdc/toscaparser/api/parameters/Input.java | 233 ---- .../sdc/toscaparser/api/parameters/Output.java | 110 -- .../openecomp/sdc/toscaparser/api/prereq/CSAR.java | 785 ------------- .../sdc/toscaparser/api/prereq/CSAR.java.orig | 767 ------------- .../sdc/toscaparser/api/utils/CopyUtils.java | 29 - .../sdc/toscaparser/api/utils/DumpUtils.java | 55 - .../toscaparser/api/utils/JToscaErrorCodes.java | 32 - .../api/utils/TOSCAVersionProperty.java | 182 --- .../toscaparser/api/utils/ThreadLocalsHolder.java | 24 - .../sdc/toscaparser/api/utils/UrlUtils.java | 123 -- .../sdc/toscaparser/api/utils/ValidateUtils.java | 425 ------- .../sdc/toscaparser/api/GetValidationIssues.java | 79 ++ .../onap/sdc/toscaparser/api/JToscaImportTest.java | 64 ++ .../sdc/toscaparser/api/JToscaMetadataParse.java | 61 + .../toscaparser/api/elements/EntityTypeTest.java | 55 + .../sdc/toscaparser/api/GetValidationIssues.java | 81 -- .../sdc/toscaparser/api/JToscaImportTest.java | 64 -- .../sdc/toscaparser/api/JToscaMetadataParse.java | 61 - .../toscaparser/api/elements/EntityTypeTest.java | 56 - version.properties | 4 +- 159 files changed, 16402 insertions(+), 17466 deletions(-) create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/Group.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/Policy.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/Property.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/Repository.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/Triggers.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignment.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignments.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/Group.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/Property.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/RelationshipTemplate.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignment.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignments.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/ToscaGraph.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/UnsupportedType.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaException.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaValidationIssue.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/common/TOSCAException.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/common/ValidationIssueCollector.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/ArtifactTypeDef.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/AttributeDef.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/DataType.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitFrequency.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitSize.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitTime.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Equal.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java.orig delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java.orig delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/utils/CopyUtils.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/utils/DumpUtils.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java delete mode 100644 src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java create mode 100644 src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java create mode 100644 src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java create mode 100644 src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java create mode 100644 src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java delete mode 100644 src/test/java/org/openecomp/sdc/toscaparser/api/GetValidationIssues.java delete mode 100644 src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java delete mode 100644 src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java delete mode 100644 src/test/java/org/openecomp/sdc/toscaparser/api/elements/EntityTypeTest.java diff --git a/README.md b/README.md index 6913f03..a3a150f 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# OpenECOMP JTOSCA +# ONAP JTOSCA --- @@ -6,21 +6,21 @@ # Introduction -OpenECOMP JTOSCA is delivered as helper JAR that can be used by clients that work with TOSCA CSAR files. +ONAP JTOSCA is delivered as helper JAR that can be used by clients that work with TOSCA CSAR files. It parses the CSAR and returns the model object which represents the CSAR contents. Prior to that, it performs validations on the CSAR to check its TOSCA compliance. -# Compiling OpenECOMP JTOSCA +# Compiling ONAP JTOSCA -OpenECOMP JTOSCA can be compiled easily using maven command: `mvn clean install` +ONAP JTOSCA can be compiled easily using maven command: `mvn clean install` The result is JAR file under "target" folder # Getting Help *** to be completed on release *** -SDC@lists.openecomp.org +SDC@lists.onap.org SDC Javadoc and Maven site diff --git a/pom.xml b/pom.xml index 63da98a..8589eb3 100644 --- a/pom.xml +++ b/pom.xml @@ -2,9 +2,9 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 - org.openecomp.sdc.jtosca + org.onap.sdc.jtosca jtosca - 1.2.2-SNAPSHOT + 1.3.0 sdc-jtosca @@ -23,7 +23,7 @@ ${project.basedir}/target/jacoco.exec https://nexus.onap.org - /content/sites/site/org/openecomp/sdc/jtosca/${project.version} + /content/sites/site/org/onap/sdc/jtosca/${project.version} snapshots releases 176c31dfe190a diff --git a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java new file mode 100644 index 0000000..8f18cc3 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java @@ -0,0 +1,148 @@ +package org.onap.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef; +import org.onap.sdc.toscaparser.api.elements.PropertyDef; + +public class CapabilityAssignment { + + private String name; + private LinkedHashMap _properties; + private CapabilityTypeDef _definition; + private LinkedHashMap _customDef; + + public CapabilityAssignment(String cname, + LinkedHashMap cproperties, + CapabilityTypeDef cdefinition, LinkedHashMap customDef) { + name = cname; + _properties = cproperties; + _definition = cdefinition; + _customDef = customDef; + } + + /** + * Get the properties list for capability + * @return list of property objects for capability + */ + public ArrayList getPropertiesObjects() { + // Return a list of property objects + ArrayList properties = new ArrayList(); + LinkedHashMap props = _properties; + if(props != null) { + for(Map.Entry me: props.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + + LinkedHashMap propsDef = _definition.getPropertiesDef(); + if(propsDef != null) { + PropertyDef pd = (PropertyDef)propsDef.get(pname); + if(pd != null) { + properties.add(new Property(pname,pvalue,pd.getSchema(), _customDef)); + } + } + } + } + return properties; + } + + /** + * Get the map of properties + * @return map of all properties contains dictionary of property name and property object + */ + public LinkedHashMap getProperties() { + // Return a dictionary of property name-object pairs + LinkedHashMap npps = new LinkedHashMap<>(); + for(Property p: getPropertiesObjects()) { + npps.put(p.getName(),p); + } + return npps; + } + + /** + * Get the property value by name + * @param pname - the property name for capability + * @return the property value for this name + */ + public Object getPropertyValue(String pname) { + // Return the value of a given property name + LinkedHashMap props = getProperties(); + if(props != null && props.get(pname) != null) { + return props.get(name).getValue(); + } + return null; + } + + /** + * Get the name for capability + * @return the name for capability + */ + public String getName() { + return name; + } + + /** + * Get the definition for capability + * @return CapabilityTypeDef - contain definition for capability + */ + public CapabilityTypeDef getDefinition() { + return _definition; + } + + /** + * Set the property for capability + * @param pname - the property name for capability to set + * @param pvalue - the property valiue for capability to set + */ + public void setProperty(String pname,Object pvalue) { + _properties.put(pname,pvalue); + } + + @Override + public String toString() { + return "CapabilityAssignment{" + + "name='" + name + '\'' + + ", _properties=" + _properties + + ", _definition=" + _definition + + '}'; + } +} + +/*python + +from toscaparser.properties import Property + + +class CapabilityAssignment(object): + '''TOSCA built-in capabilities type.''' + + def __init__(self, name, properties, definition): + self.name = name + self._properties = properties + self.definition = definition + + def get_properties_objects(self): + '''Return a list of property objects.''' + properties = [] + props = self._properties + if props: + for name, value in props.items(): + props_def = self.definition.get_properties_def() + if props_def and name in props_def: + properties.append(Property(name, value, + props_def[name].schema)) + return properties + + def get_properties(self): + '''Return a dictionary of property name-object pairs.''' + return {prop.name: prop + for prop in self.get_properties_objects()} + + def get_property_value(self, name): + '''Return the value of a given property name.''' + props = self.get_properties() + if props and name in props: + return props[name].value +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java new file mode 100644 index 0000000..d0c6a7f --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java @@ -0,0 +1,49 @@ +package org.onap.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class CapabilityAssignments { + + private Map capabilityAssignments; + + public CapabilityAssignments(Map capabilityAssignments) { + this.capabilityAssignments = capabilityAssignments != null ? new HashMap<>(capabilityAssignments) : new HashMap<>(); + } + + /** + * Get all capability assignments for node template.
+ * This object can be either the original one, holding all capability assignments for this node template,or a filtered one, holding a filtered subset.
+ * @return list of capability assignments for the node template.
+ * If there are no capability assignments, empty list is returned. + */ + public List getAll() { + return new ArrayList<>(capabilityAssignments.values()); + } + + /** + * Filter capability assignments by capability tosca type. + * @param type - The tosca type of capability assignments. + * @return CapabilityAssignments object, containing capability assignments of this type.
+ * If no such found, filtering will result in an empty collection. + */ + public CapabilityAssignments getCapabilitiesByType(String type) { + Map capabilityAssignmentsMap = capabilityAssignments.entrySet().stream() + .filter(cap -> cap.getValue().getDefinition().getType().equals(type)).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + + return new CapabilityAssignments(capabilityAssignmentsMap); + } + + /** + * Get capability assignment by capability name. + * @param name - The name of capability assignment + * @return capability assignment with this name, or null if no such capability assignment was found. + */ + public CapabilityAssignment getCapabilityByName(String name) { + return capabilityAssignments.get(name); + } + +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java b/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java new file mode 100644 index 0000000..2a12a71 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java @@ -0,0 +1,449 @@ +package org.onap.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.*; +import org.onap.sdc.toscaparser.api.elements.constraints.Constraint; +import org.onap.sdc.toscaparser.api.elements.constraints.Schema; +import org.onap.sdc.toscaparser.api.functions.Function; +import org.onap.sdc.toscaparser.api.utils.TOSCAVersionProperty; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.ValidateUtils; + +public class DataEntity { + // A complex data value entity + + private LinkedHashMap customDef; + private DataType dataType; + private LinkedHashMap schema; + private Object value; + private String propertyName; + + public DataEntity(String _dataTypeName,Object _valueDict, + LinkedHashMap _customDef,String _propName) { + + customDef = _customDef; + dataType = new DataType(_dataTypeName,_customDef); + schema = dataType.getAllProperties(); + value = _valueDict; + propertyName = _propName; + } + + @SuppressWarnings("unchecked") + public Object validate() { + // Validate the value by the definition of the datatype + + // A datatype can not have both 'type' and 'properties' definitions. + // If the datatype has 'type' definition + if(dataType.getValueType() != null) { + value = DataEntity.validateDatatype(dataType.getValueType(),value,null,customDef,null); + Schema schemaCls = new Schema(propertyName,dataType.getDefs()); + for(Constraint constraint: schemaCls.getConstraints()) { + constraint.validate(value); + } + } + // If the datatype has 'properties' definition + else { + if(!(value instanceof LinkedHashMap)) { + //ERROR under investigation + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE001", String.format( + "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"", + value.toString(),dataType.getType()))); + + if (value instanceof List && ((List) value).size() > 0) { + value = ((List) value).get(0); + } + + if (!(value instanceof LinkedHashMap)) { + return value; + } + } + + + + LinkedHashMap valueDict = (LinkedHashMap)value; + ArrayList allowedProps = new ArrayList<>(); + ArrayList requiredProps = new ArrayList<>(); + LinkedHashMap defaultProps = new LinkedHashMap<>(); + if(schema != null) { + allowedProps.addAll(schema.keySet()); + for(String name: schema.keySet()) { + PropertyDef propDef = schema.get(name); + if(propDef.isRequired()) { + requiredProps.add(name); + } + if(propDef.getDefault() != null) { + defaultProps.put(name,propDef.getDefault()); + } + } + } + + // check allowed field + for(String valueKey: valueDict.keySet()) { + //1710 devlop JSON validation + if(!("json").equals(dataType.getType()) && !allowedProps.contains(valueKey)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE100", String.format( + "UnknownFieldError: Data value of type \"%s\" contains unknown field \"%s\"", + dataType.getType(),valueKey))); + } + } + + // check default field + for(String defKey: defaultProps.keySet()) { + Object defValue = defaultProps.get(defKey); + if(valueDict.get(defKey) == null) { + valueDict.put(defKey, defValue); + } + + } + + // check missing field + ArrayList missingProp = new ArrayList<>(); + for(String reqKey: requiredProps) { + if(!valueDict.keySet().contains(reqKey)) { + missingProp.add(reqKey); + } + } + if(missingProp.size() > 0) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003",String.format( + "MissingRequiredFieldError: Data value of type \"%s\" is missing required field(s) \"%s\"", + dataType.getType(),missingProp.toString()))); + } + + // check every field + for(String vname: valueDict.keySet()) { + Object vvalue = valueDict.get(vname); + LinkedHashMap schemaName = _findSchema(vname); + if(schemaName == null) { + continue; + } + Schema propSchema = new Schema(vname,schemaName); + // check if field value meets type defined + DataEntity.validateDatatype(propSchema.getType(), + vvalue, + propSchema.getEntrySchema(), + customDef, + null); + + // check if field value meets constraints defined + if(propSchema.getConstraints() != null) { + for(Constraint constraint: propSchema.getConstraints()) { + if(vvalue instanceof ArrayList) { + for(Object val: (ArrayList)vvalue) { + constraint.validate(val); + } + } + else { + constraint.validate(vvalue); + } + } + } + } + } + return value; + } + + private LinkedHashMap _findSchema(String name) { + if(schema != null && schema.get(name) != null) { + return schema.get(name).getSchema(); + } + return null; + } + + public static Object validateDatatype(String type, + Object value, + LinkedHashMap entrySchema, + LinkedHashMap customDef, + String propName) { + // Validate value with given type + + // If type is list or map, validate its entry by entry_schema(if defined) + // If type is a user-defined complex datatype, custom_def is required. + + if(Function.isFunction(value)) { + return value; + } + else if (type == null) { + //NOT ANALYZED + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE002", String.format( + "MissingType: Type is missing for value \"%s\"", + value.toString()))); + return value; + } + else if(type.equals(Schema.STRING)) { + return ValidateUtils.validateString(value); + } + else if(type.equals(Schema.INTEGER)) { + return ValidateUtils.validateInteger(value); + } + else if(type.equals(Schema.FLOAT)) { + return ValidateUtils.validateFloat(value); + } + else if(type.equals(Schema.NUMBER)) { + return ValidateUtils.validateNumeric(value); + } + else if(type.equals(Schema.BOOLEAN)) { + return ValidateUtils.validateBoolean(value); + } + else if(type.equals(Schema.RANGE)) { + return ValidateUtils.validateRange(value); + } + else if(type.equals(Schema.TIMESTAMP)) { + ValidateUtils.validateTimestamp(value); + return value; + } + else if(type.equals(Schema.LIST)) { + ValidateUtils.validateList(value); + if(entrySchema != null) { + DataEntity.validateEntry(value,entrySchema,customDef); + } + return value; + } + else if(type.equals(Schema.SCALAR_UNIT_SIZE)) { + return (new ScalarUnitSize(value)).validateScalarUnit(); + } + else if(type.equals(Schema.SCALAR_UNIT_FREQUENCY)) { + return (new ScalarUnitFrequency(value)).validateScalarUnit(); + } + else if(type.equals(Schema.SCALAR_UNIT_TIME)) { + return (new ScalarUnitTime(value)).validateScalarUnit(); + } + else if(type.equals(Schema.VERSION)) { + return (new TOSCAVersionProperty(value)).getVersion(); + } + else if(type.equals(Schema.MAP)) { + ValidateUtils.validateMap(value); + if(entrySchema != null) { + DataEntity.validateEntry(value,entrySchema,customDef); + } + return value; + } + else if(type.equals(Schema.PORTSPEC)) { + // tODO(TBD) bug 1567063, validate source & target as PortDef type + // as complex types not just as integers + PortSpec.validateAdditionalReq(value,propName,customDef); + } + else { + DataEntity data = new DataEntity(type,value,customDef,null); + return data.validate(); + } + + return value; + } + + @SuppressWarnings("unchecked") + public static Object validateEntry(Object value, + LinkedHashMap entrySchema, + LinkedHashMap customDef) { + + // Validate entries for map and list + Schema schema = new Schema(null,entrySchema); + Object valueob = value; + ArrayList valueList = null; + if(valueob instanceof LinkedHashMap) { + valueList = new ArrayList(((LinkedHashMap)valueob).values()); + } + else if(valueob instanceof ArrayList) { + valueList = (ArrayList)valueob; + } + if(valueList != null) { + for(Object v: valueList) { + DataEntity.validateDatatype(schema.getType(),v,schema.getEntrySchema(),customDef,null); + if(schema.getConstraints() != null) { + for(Constraint constraint: schema.getConstraints()) { + constraint.validate(v); + } + } + } + } + return value; + } + + @Override + public String toString() { + return "DataEntity{" + + "customDef=" + customDef + + ", dataType=" + dataType + + ", schema=" + schema + + ", value=" + value + + ", propertyName='" + propertyName + '\'' + + '}'; + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import TypeMismatchError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.constraints import Schema +from toscaparser.elements.datatype import DataType +from toscaparser.elements.portspectype import PortSpec +from toscaparser.elements.scalarunit import ScalarUnit_Frequency +from toscaparser.elements.scalarunit import ScalarUnit_Size +from toscaparser.elements.scalarunit import ScalarUnit_Time +from toscaparser.utils.gettextutils import _ +from toscaparser.utils import validateutils + + +class DataEntity(object): + '''A complex data value entity.''' + + def __init__(self, datatypename, value_dict, custom_def=None, + prop_name=None): + self.custom_def = custom_def + self.datatype = DataType(datatypename, custom_def) + self.schema = self.datatype.get_all_properties() + self.value = value_dict + self.property_name = prop_name + + def validate(self): + '''Validate the value by the definition of the datatype.''' + + # A datatype can not have both 'type' and 'properties' definitions. + # If the datatype has 'type' definition + if self.datatype.value_type: + self.value = DataEntity.validate_datatype(self.datatype.value_type, + self.value, + None, + self.custom_def) + schema = Schema(self.property_name, self.datatype.defs) + for constraint in schema.constraints: + constraint.validate(self.value) + # If the datatype has 'properties' definition + else: + if not isinstance(self.value, dict): + ValidationIssueCollector.appendException( + TypeMismatchError(what=self.value, + type=self.datatype.type)) + allowed_props = [] + required_props = [] + default_props = {} + if self.schema: + allowed_props = self.schema.keys() + for name, prop_def in self.schema.items(): + if prop_def.required: + required_props.append(name) + if prop_def.default: + default_props[name] = prop_def.default + + # check allowed field + for value_key in list(self.value.keys()): + if value_key not in allowed_props: + ValidationIssueCollector.appendException( + UnknownFieldError(what=(_('Data value of type "%s"') + % self.datatype.type), + field=value_key)) + + # check default field + for def_key, def_value in list(default_props.items()): + if def_key not in list(self.value.keys()): + self.value[def_key] = def_value + + # check missing field + missingprop = [] + for req_key in required_props: + if req_key not in list(self.value.keys()): + missingprop.append(req_key) + if missingprop: + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what=(_('Data value of type "%s"') + % self.datatype.type), required=missingprop)) + + # check every field + for name, value in list(self.value.items()): + schema_name = self._find_schema(name) + if not schema_name: + continue + prop_schema = Schema(name, schema_name) + # check if field value meets type defined + DataEntity.validate_datatype(prop_schema.type, value, + prop_schema.entry_schema, + self.custom_def) + # check if field value meets constraints defined + if prop_schema.constraints: + for constraint in prop_schema.constraints: + if isinstance(value, list): + for val in value: + constraint.validate(val) + else: + constraint.validate(value) + + return self.value + + def _find_schema(self, name): + if self.schema and name in self.schema.keys(): + return self.schema[name].schema + + @staticmethod + def validate_datatype(type, value, entry_schema=None, custom_def=None, + prop_name=None): + '''Validate value with given type. + + If type is list or map, validate its entry by entry_schema(if defined) + If type is a user-defined complex datatype, custom_def is required. + ''' + from toscaparser.functions import is_function + if is_function(value): + return value + if type == Schema.STRING: + return validateutils.validate_string(value) + elif type == Schema.INTEGER: + return validateutils.validate_integer(value) + elif type == Schema.FLOAT: + return validateutils.validate_float(value) + elif type == Schema.NUMBER: + return validateutils.validate_numeric(value) + elif type == Schema.BOOLEAN: + return validateutils.validate_boolean(value) + elif type == Schema.RANGE: + return validateutils.validate_range(value) + elif type == Schema.TIMESTAMP: + validateutils.validate_timestamp(value) + return value + elif type == Schema.LIST: + validateutils.validate_list(value) + if entry_schema: + DataEntity.validate_entry(value, entry_schema, custom_def) + return value + elif type == Schema.SCALAR_UNIT_SIZE: + return ScalarUnit_Size(value).validate_scalar_unit() + elif type == Schema.SCALAR_UNIT_FREQUENCY: + return ScalarUnit_Frequency(value).validate_scalar_unit() + elif type == Schema.SCALAR_UNIT_TIME: + return ScalarUnit_Time(value).validate_scalar_unit() + elif type == Schema.VERSION: + return validateutils.TOSCAVersionProperty(value).get_version() + elif type == Schema.MAP: + validateutils.validate_map(value) + if entry_schema: + DataEntity.validate_entry(value, entry_schema, custom_def) + return value + elif type == Schema.PORTSPEC: + # tODO(TBD) bug 1567063, validate source & target as PortDef type + # as complex types not just as integers + PortSpec.validate_additional_req(value, prop_name, custom_def) + else: + data = DataEntity(type, value, custom_def) + return data.validate() + + @staticmethod + def validate_entry(value, entry_schema, custom_def=None): + '''Validate entries for map and list.''' + schema = Schema(None, entry_schema) + valuelist = value + if isinstance(value, dict): + valuelist = list(value.values()) + for v in valuelist: + DataEntity.validate_datatype(schema.type, v, schema.entry_schema, + custom_def) + if schema.constraints: + for constraint in schema.constraints: + constraint.validate(v) + return value +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java new file mode 100644 index 0000000..95c97dd --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java @@ -0,0 +1,851 @@ +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.*; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public abstract class EntityTemplate { + // Base class for TOSCA templates + + protected static final String DERIVED_FROM = "derived_from"; + protected static final String PROPERTIES = "properties"; + protected static final String REQUIREMENTS = "requirements"; + protected static final String INTERFACES = "interfaces"; + protected static final String CAPABILITIES = "capabilities"; + protected static final String TYPE = "type"; + protected static final String DESCRIPTION = "description"; + protected static final String DIRECTIVES = "directives"; + protected static final String ATTRIBUTES = "attributes"; + protected static final String ARTIFACTS = "artifacts"; + protected static final String NODE_FILTER = "node_filter"; + protected static final String COPY = "copy"; + + protected static final String SECTIONS[] = { + DERIVED_FROM, PROPERTIES, REQUIREMENTS,INTERFACES, + CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES, + ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY}; + + private static final String NODE = "node"; + private static final String CAPABILITY = "capability"; + private static final String RELATIONSHIP = "relationship"; + private static final String OCCURRENCES = "occurrences"; + + protected static final String REQUIREMENTS_SECTION[] = { + NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER}; + + //# Special key names + private static final String METADATA = "metadata"; + protected static final String SPECIAL_SECTIONS[] = {METADATA}; + + protected String name; + protected LinkedHashMap entityTpl; + protected LinkedHashMap customDef; + protected StatefulEntityType typeDefinition; + private ArrayList _properties; + private ArrayList _interfaces; + private ArrayList _requirements; + private ArrayList _capabilities; + + // dummy constructor for subclasses that don't want super + public EntityTemplate() { + return; + } + + @SuppressWarnings("unchecked") + public EntityTemplate(String _name, + LinkedHashMap _template, + String _entityName, + LinkedHashMap _customDef) { + name = _name; + entityTpl = _template; + customDef = _customDef; + _validateField(entityTpl); + String type = (String)entityTpl.get("type"); + UnsupportedType.validateType(type); + if(_entityName.equals("node_type")) { + if(type != null) { + typeDefinition = new NodeType(type, customDef); + } + else { + typeDefinition = null; + } + } + if(_entityName.equals("relationship_type")) { + Object relationship = _template.get("relationship"); + type = null; + if(relationship != null && relationship instanceof LinkedHashMap) { + type = (String)((LinkedHashMap)relationship).get("type"); + } + else if(relationship instanceof String) { + type = (String)entityTpl.get("relationship"); + } + else { + type = (String)entityTpl.get("type"); + } + UnsupportedType.validateType(type); + typeDefinition = new RelationshipType(type,null, customDef); + } + if(_entityName.equals("policy_type")) { + if(type == null) { + //msg = (_('Policy definition of "%(pname)s" must have' + // ' a "type" ''attribute.') % dict(pname=name)) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE140", String.format( + "ValidationError: Policy definition of \"%s\" must have a \"type\" attribute",name))); + } + typeDefinition = new PolicyType(type, customDef); + } + if(_entityName.equals("group_type")) { + if(type != null) { + typeDefinition = new GroupType(type, customDef); + } + else { + typeDefinition = null; + } + } + _properties = null; + _interfaces = null; + _requirements = null; + _capabilities = null; + } + + public String getType() { + if(typeDefinition != null) { + String clType = typeDefinition.getClass().getSimpleName(); + if(clType.equals("NodeType")) { + return (String)((NodeType)typeDefinition).getType(); + } + else if(clType.equals("PolicyType")) { + return (String)((PolicyType)typeDefinition).getType(); + } + else if(clType.equals("GroupType")) { + return (String)((GroupType)typeDefinition).getType(); + } + else if(clType.equals("RelationshipType")) { + return (String)((RelationshipType)typeDefinition).getType(); + } + } + return null; + } + + public Object getParentType() { + if(typeDefinition != null) { + String clType = typeDefinition.getClass().getSimpleName(); + if(clType.equals("NodeType")) { + return ((NodeType)typeDefinition).getParentType(); + } + else if(clType.equals("PolicyType")) { + return ((PolicyType)typeDefinition).getParentType(); + } + else if(clType.equals("GroupType")) { + return ((GroupType)typeDefinition).getParentType(); + } + else if(clType.equals("RelationshipType")) { + return ((RelationshipType)typeDefinition).getParentType(); + } + } + return null; + } + + @SuppressWarnings("unchecked") + public RequirementAssignments getRequirements() { + if(_requirements == null) { + _requirements = _createRequirements(); + } + return new RequirementAssignments(_requirements); + } + + private ArrayList _createRequirements() { + ArrayList reqs = new ArrayList<>(); + ArrayList> requirements = (ArrayList>) + typeDefinition.getValue(REQUIREMENTS,entityTpl,false); + if(requirements == null) { + requirements = new ArrayList<>(); + } + for (Map req: requirements) { + for(String reqName: req.keySet()) { + Object reqItem = req.get(reqName); + if(reqItem instanceof LinkedHashMap) { + Object rel = ((LinkedHashMap)reqItem).get("relationship"); +// LinkedHashMap relationship = rel instanceof LinkedHashMap ? (LinkedHashMap) rel : null; + String nodeName = ((LinkedHashMap)reqItem).get("node").toString(); + Object capability = ((LinkedHashMap)reqItem).get("capability"); + String capabilityString = capability != null ? capability.toString() : null; + + reqs.add(new RequirementAssignment(reqName, nodeName, capabilityString, rel)); + } else if (reqItem instanceof String) { //short notation + String nodeName = String.valueOf(reqItem); + reqs.add(new RequirementAssignment(reqName, nodeName)); + } + } + } + return reqs; + } + + public ArrayList getPropertiesObjects() { + // Return properties objects for this template + if(_properties ==null) { + _properties = _createProperties(); + } + return _properties; + } + + public LinkedHashMap getProperties() { + LinkedHashMap props = new LinkedHashMap<>(); + for(Property po: getPropertiesObjects()) { + props.put(((Property)po).getName(),po); + } + return props; + } + + public Object getPropertyValue(String name) { + LinkedHashMap props = getProperties(); + Property p = (Property)props.get(name); + return p != null ? p.getValue() : null; + } + + public ArrayList getInterfaces() { + if(_interfaces == null) { + _interfaces = _createInterfaces(); + } + return _interfaces; + } + + public ArrayList getCapabilitiesObjects() { + // Return capabilities objects for this template + if(_capabilities == null) { + _capabilities = _createCapabilities(); + } + return _capabilities; + + } + + public CapabilityAssignments getCapabilities() { + LinkedHashMap caps = new LinkedHashMap(); + for(CapabilityAssignment cap: getCapabilitiesObjects()) { + caps.put(cap.getName(),cap); + } + return new CapabilityAssignments(caps); + } + + public boolean isDerivedFrom(String typeStr) { + // Returns true if this object is derived from 'type_str'. + // False otherwise + + if(getType() == null) { + return false; + } + else if(getType().equals(typeStr)) { + return true; + } + else if(getParentType() != null) { + return ((EntityType)getParentType()).isDerivedFrom(typeStr); + } + return false; + } + + @SuppressWarnings("unchecked") + private ArrayList _createCapabilities() { + ArrayList capability = new ArrayList(); + LinkedHashMap caps = (LinkedHashMap) + ((EntityType)typeDefinition).getValue(CAPABILITIES,entityTpl,true); + if(caps != null) { + //?!? getCapabilities defined only for NodeType... + LinkedHashMap capabilities = ((NodeType)typeDefinition).getCapabilities(); + for(Map.Entry me: caps.entrySet()) { + String name = me. getKey(); + LinkedHashMap props = (LinkedHashMap)me.getValue(); + if(capabilities.get(name) != null) { + CapabilityTypeDef c = capabilities.get(name); // a CapabilityTypeDef + LinkedHashMap properties = new LinkedHashMap(); + // first use the definition default value + LinkedHashMap cprops = c.getProperties(); + if(cprops != null) { + for(Map.Entry cpe: cprops.entrySet()) { + String propertyName = cpe.getKey(); + LinkedHashMap propertyDef = (LinkedHashMap)cpe.getValue(); + Object dob = propertyDef.get("default"); + if(dob != null) { + properties.put(propertyName, dob); + + } + } + } + // then update (if available) with the node properties + LinkedHashMap pp = (LinkedHashMap)props.get("properties"); + if(pp != null) { + properties.putAll(pp); + } + CapabilityAssignment cap = new CapabilityAssignment(name, properties, c, customDef); + capability.add(cap); + } + } + } + return capability; + } + + protected void _validateProperties(LinkedHashMap template,StatefulEntityType entityType) { + @SuppressWarnings("unchecked") + LinkedHashMap properties = (LinkedHashMap)entityType.getValue(PROPERTIES,template,false); + _commonValidateProperties(entityType,properties); + } + + protected void _validateCapabilities() { + //BUG??? getCapabilities only defined in NodeType... + LinkedHashMap typeCapabilities = ((NodeType)typeDefinition).getCapabilities(); + ArrayList allowedCaps = new ArrayList(); + if(typeCapabilities != null) { + allowedCaps.addAll(typeCapabilities.keySet()); + } + @SuppressWarnings("unchecked") + LinkedHashMap capabilities = (LinkedHashMap) + ((EntityType)typeDefinition).getValue(CAPABILITIES, entityTpl, false); + if(capabilities != null) { + _commonValidateField(capabilities, allowedCaps, "capabilities"); + _validateCapabilitiesProperties(capabilities); + } + } + + @SuppressWarnings("unchecked") + private void _validateCapabilitiesProperties(LinkedHashMap capabilities) { + for(Map.Entry me: capabilities.entrySet()) { + String cap = me.getKey(); + LinkedHashMap props = (LinkedHashMap)me.getValue(); + CapabilityAssignment capability = getCapability(cap); + if(capability == null) { + continue; + } + CapabilityTypeDef capabilitydef = capability.getDefinition(); + _commonValidateProperties(capabilitydef,(LinkedHashMap)props.get(PROPERTIES)); + + // validating capability properties values + for(Property prop: getCapability(cap).getPropertiesObjects()) { + prop.validate(); + + if(cap.equals("scalable") && prop.getName().equals("default_instances")) { + LinkedHashMap propDict = (LinkedHashMap)props.get(PROPERTIES); + int minInstances = (int)propDict.get("min_instances"); + int maxInstances = (int)propDict.get("max_instances"); + int defaultInstances = (int)propDict.get("default_instances"); + if(defaultInstances < minInstances || defaultInstances > maxInstances) { + //err_msg = ('"properties" of template "%s": ' + // '"default_instances" value is not between ' + // '"min_instances" and "max_instances".' % + // self.name) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE141", String.format( + "ValidationError: \"properties\" of template \"%s\": \"default_instances\" value is not between \"min_instances\" and \"max_instances\"", + name))); + } + } + } + } + } + + private void _commonValidateProperties(StatefulEntityType entityType,LinkedHashMap properties) { + ArrayList allowedProps = new ArrayList(); + ArrayList requiredProps = new ArrayList(); + for(PropertyDef p: entityType.getPropertiesDefObjects()) { + allowedProps.add(p.getName()); + // If property is 'required' and has no 'default' value then record + if(p.isRequired() && p.getDefault() == null) { + requiredProps.add(p.getName()); + } + } + // validate all required properties have values + if(properties != null) { + ArrayList reqPropsNoValueOrDefault = new ArrayList(); + _commonValidateField(properties, allowedProps, "properties"); + // make sure it's not missing any property required by a tosca type + for(String r: requiredProps) { + if(properties.get(r) == null) { + reqPropsNoValueOrDefault.add(r); + } + } + // Required properties found without value or a default value + if(!reqPropsNoValueOrDefault.isEmpty()) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", String.format( + "MissingRequiredFieldError: properties of template \"%s\" are missing field(s): %s", + name,reqPropsNoValueOrDefault.toString()))); + } + } + else { + // Required properties in schema, but not in template + if(!requiredProps.isEmpty()) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE004", String.format( + "MissingRequiredFieldError2: properties of template \"%s\" are missing field(s): %s", + name,requiredProps.toString()))); + } + } + } + + @SuppressWarnings("unchecked") + private void _validateField(LinkedHashMap template) { + if(!(template instanceof LinkedHashMap)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE142", String.format( + "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE))); + return;//??? + } + boolean bBad = false; + Object relationship = ((LinkedHashMap)template).get("relationship"); + if(relationship != null) { + if(!(relationship instanceof String)) { + bBad = (((LinkedHashMap)relationship).get(TYPE) == null); + } + else if(relationship instanceof String) { + bBad = (template.get("relationship") == null); + } + } + else { + bBad = (template.get(TYPE) == null); + } + if(bBad) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE143", String.format( + "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE))); + } + } + + protected void _commonValidateField(LinkedHashMap schema, ArrayList allowedList,String section) { + for(String sname: schema.keySet()) { + boolean bFound = false; + for(String allowed: allowedList) { + if(sname.equals(allowed)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE144", String.format( + "UnknownFieldError: Section \"%s\" of template \"%s\" contains unknown field \"%s\"",section,name,sname))); + } + } + + } + + @SuppressWarnings("unchecked") + private ArrayList _createProperties() { + ArrayList props = new ArrayList(); + LinkedHashMap properties = (LinkedHashMap) + ((EntityType)typeDefinition).getValue(PROPERTIES,entityTpl,false); + if(properties == null) { + properties = new LinkedHashMap(); + } + for(Map.Entry me: properties.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + LinkedHashMap propsDef = ((StatefulEntityType)typeDefinition).getPropertiesDef(); + if(propsDef != null && propsDef.get(pname) != null) { + PropertyDef pd = (PropertyDef)propsDef.get(pname); + Property prop = new Property(pname,pvalue,pd.getSchema(),customDef); + props.add(prop); + } + } + ArrayList pds = ((StatefulEntityType)typeDefinition).getPropertiesDefObjects(); + for(Object pdo: pds) { + PropertyDef pd = (PropertyDef)pdo; + if(pd.getDefault() != null && properties.get(pd.getName()) == null) { + Property prop = new Property(pd.getName(),pd.getDefault(),pd.getSchema(),customDef); + props.add(prop); + } + } + return props; + } + + @SuppressWarnings("unchecked") + private ArrayList _createInterfaces() { + ArrayList interfaces = new ArrayList<>(); + LinkedHashMap typeInterfaces = new LinkedHashMap(); + if(typeDefinition instanceof RelationshipType) { + if(entityTpl instanceof LinkedHashMap) { + typeInterfaces = (LinkedHashMap)entityTpl.get(INTERFACES); + if(typeInterfaces == null) { + for(String relName: entityTpl.keySet()) { + Object relValue = entityTpl.get(relName); + if(!relName.equals("type")) { + Object relDef = relValue; + LinkedHashMap rel = null; + if(relDef instanceof LinkedHashMap) { + Object relob = ((LinkedHashMap)relDef).get("relationship"); + if(relob instanceof LinkedHashMap) { + rel = (LinkedHashMap)relob; + } + } + if(rel != null) { + if(rel.get(INTERFACES) != null) { + typeInterfaces = (LinkedHashMap)rel.get(INTERFACES); + break; + } + } + } + } + } + } + } + else { + typeInterfaces = (LinkedHashMap) + ((EntityType)typeDefinition).getValue(INTERFACES,entityTpl,false); + } + if(typeInterfaces != null) { + for(Map.Entry me: typeInterfaces.entrySet()) { + String interfaceType = me.getKey(); + LinkedHashMap value = (LinkedHashMap)me.getValue(); + for(Map.Entry ve: value.entrySet()) { + String op = ve.getKey(); + Object opDef = ve.getValue(); + InterfacesDef iface = new InterfacesDef((EntityType)typeDefinition, + interfaceType, + this, + op, + opDef); + interfaces.add(iface); + } + + } + } + return interfaces; + } + + public CapabilityAssignment getCapability(String name) { + // Provide named capability + // :param name: name of capability + // :return: capability object if found, None otherwise + return getCapabilities().getCapabilityByName(name); + } + + // getter + public String getName() { + return name; + } + + public StatefulEntityType getTypeDefinition() { + return typeDefinition; + } + + public LinkedHashMap getCustomDef() { + return customDef; + } + + @Override + public String toString() { + return "EntityTemplate{" + + "name='" + name + '\'' + + ", entityTpl=" + entityTpl + + ", customDef=" + customDef + + ", typeDefinition=" + typeDefinition + + ", _properties=" + _properties + + ", _interfaces=" + _interfaces + + ", _requirements=" + _requirements + + ", _capabilities=" + _capabilities + + '}'; + } +} + +/*python + +class EntityTemplate(object): + '''Base class for TOSCA templates.''' + + SECTIONS = (DERIVED_FROM, PROPERTIES, REQUIREMENTS, + INTERFACES, CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES, + ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY) = \ + ('derived_from', 'properties', 'requirements', 'interfaces', + 'capabilities', 'type', 'description', 'directives', + 'attributes', 'artifacts', 'node_filter', 'copy') + REQUIREMENTS_SECTION = (NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER) = \ + ('node', 'capability', 'relationship', + 'occurrences', 'node_filter') + # Special key names + SPECIAL_SECTIONS = (METADATA) = ('metadata') + + def __init__(self, name, template, entity_name, custom_def=None): + self.name = name + self.entity_tpl = template + self.custom_def = custom_def + self._validate_field(self.entity_tpl) + type = self.entity_tpl.get('type') + UnsupportedType.validate_type(type) + if entity_name == 'node_type': + self.type_definition = NodeType(type, custom_def) \ + if type is not None else None + if entity_name == 'relationship_type': + relationship = template.get('relationship') + type = None + if relationship and isinstance(relationship, dict): + type = relationship.get('type') + elif isinstance(relationship, str): + type = self.entity_tpl['relationship'] + else: + type = self.entity_tpl['type'] + UnsupportedType.validate_type(type) + self.type_definition = RelationshipType(type, + None, custom_def) + if entity_name == 'policy_type': + if not type: + msg = (_('Policy definition of "%(pname)s" must have' + ' a "type" ''attribute.') % dict(pname=name)) + ValidationIssueCollector.appendException( + ValidationError(msg)) + + self.type_definition = PolicyType(type, custom_def) + if entity_name == 'group_type': + self.type_definition = GroupType(type, custom_def) \ + if type is not None else None + self._properties = None + self._interfaces = None + self._requirements = None + self._capabilities = None + + @property + def type(self): + if self.type_definition: + return self.type_definition.type + + @property + def parent_type(self): + if self.type_definition: + return self.type_definition.parent_type + + @property + def requirements(self): + if self._requirements is None: + self._requirements = self.type_definition.get_value( + self.REQUIREMENTS, + self.entity_tpl) or [] + return self._requirements + + def get_properties_objects(self): + '''Return properties objects for this template.''' + if self._properties is None: + self._properties = self._create_properties() + return self._properties + + def get_properties(self): + '''Return a dictionary of property name-object pairs.''' + return {prop.name: prop + for prop in self.get_properties_objects()} + + def get_property_value(self, name): + '''Return the value of a given property name.''' + props = self.get_properties() + if props and name in props.keys(): + return props[name].value + + @property + def interfaces(self): + if self._interfaces is None: + self._interfaces = self._create_interfaces() + return self._interfaces + + def get_capabilities_objects(self): + '''Return capabilities objects for this template.''' + if not self._capabilities: + self._capabilities = self._create_capabilities() + return self._capabilities + + def get_capabilities(self): + '''Return a dictionary of capability name-object pairs.''' + return {cap.name: cap + for cap in self.get_capabilities_objects()} + + def is_derived_from(self, type_str): + '''Check if object inherits from the given type. + + Returns true if this object is derived from 'type_str'. + False otherwise. + ''' + if not self.type: + return False + elif self.type == type_str: + return True + elif self.parent_type: + return self.parent_type.is_derived_from(type_str) + else: + return False + + def _create_capabilities(self): + capability = [] + caps = self.type_definition.get_value(self.CAPABILITIES, + self.entity_tpl, True) + if caps: + for name, props in caps.items(): + capabilities = self.type_definition.get_capabilities() + if name in capabilities.keys(): + c = capabilities[name] + properties = {} + # first use the definition default value + if c.properties: + for property_name in c.properties.keys(): + prop_def = c.properties[property_name] + if 'default' in prop_def: + properties[property_name] = prop_def['default'] + # then update (if available) with the node properties + if 'properties' in props and props['properties']: + properties.update(props['properties']) + + cap = CapabilityAssignment(name, properties, c) + capability.append(cap) + return capability + + def _validate_properties(self, template, entitytype): + properties = entitytype.get_value(self.PROPERTIES, template) + self._common_validate_properties(entitytype, properties) + + def _validate_capabilities(self): + type_capabilities = self.type_definition.get_capabilities() + allowed_caps = \ + type_capabilities.keys() if type_capabilities else [] + capabilities = self.type_definition.get_value(self.CAPABILITIES, + self.entity_tpl) + if capabilities: + self._common_validate_field(capabilities, allowed_caps, + 'capabilities') + self._validate_capabilities_properties(capabilities) + + def _validate_capabilities_properties(self, capabilities): + for cap, props in capabilities.items(): + capability = self.get_capability(cap) + if not capability: + continue + capabilitydef = capability.definition + self._common_validate_properties(capabilitydef, + props[self.PROPERTIES]) + + # validating capability properties values + for prop in self.get_capability(cap).get_properties_objects(): + prop.validate() + + # tODO(srinivas_tadepalli): temporary work around to validate + # default_instances until standardized in specification + if cap == "scalable" and prop.name == "default_instances": + prop_dict = props[self.PROPERTIES] + min_instances = prop_dict.get("min_instances") + max_instances = prop_dict.get("max_instances") + default_instances = prop_dict.get("default_instances") + if not (min_instances <= default_instances + <= max_instances): + err_msg = ('"properties" of template "%s": ' + '"default_instances" value is not between ' + '"min_instances" and "max_instances".' % + self.name) + ValidationIssueCollector.appendException( + ValidationError(message=err_msg)) + + def _common_validate_properties(self, entitytype, properties): + allowed_props = [] + required_props = [] + for p in entitytype.get_properties_def_objects(): + allowed_props.append(p.name) + # If property is 'required' and has no 'default' value then record + if p.required and p.default is None: + required_props.append(p.name) + # validate all required properties have values + if properties: + req_props_no_value_or_default = [] + self._common_validate_field(properties, allowed_props, + 'properties') + # make sure it's not missing any property required by a tosca type + for r in required_props: + if r not in properties.keys(): + req_props_no_value_or_default.append(r) + # Required properties found without value or a default value + if req_props_no_value_or_default: + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what='"properties" of template "%s"' % self.name, + required=req_props_no_value_or_default)) + else: + # Required properties in schema, but not in template + if required_props: + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what='"properties" of template "%s"' % self.name, + required=required_props)) + + def _validate_field(self, template): + if not isinstance(template, dict): + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what='Template "%s"' % self.name, required=self.TYPE)) + try: + relationship = template.get('relationship') + if relationship and not isinstance(relationship, str): + relationship[self.TYPE] + elif isinstance(relationship, str): + template['relationship'] + else: + template[self.TYPE] + except KeyError: + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what='Template "%s"' % self.name, required=self.TYPE)) + + def _common_validate_field(self, schema, allowedlist, section): + for name in schema: + if name not in allowedlist: + ValidationIssueCollector.appendException( + UnknownFieldError( + what=('"%(section)s" of template "%(nodename)s"' + % {'section': section, 'nodename': self.name}), + field=name)) + + def _create_properties(self): + props = [] + properties = self.type_definition.get_value(self.PROPERTIES, + self.entity_tpl) or {} + for name, value in properties.items(): + props_def = self.type_definition.get_properties_def() + if props_def and name in props_def: + prop = Property(name, value, + props_def[name].schema, self.custom_def) + props.append(prop) + for p in self.type_definition.get_properties_def_objects(): + if p.default is not None and p.name not in properties.keys(): + prop = Property(p.name, p.default, p.schema, self.custom_def) + props.append(prop) + return props + + def _create_interfaces(self): + interfaces = [] + type_interfaces = None + if isinstance(self.type_definition, RelationshipType): + if isinstance(self.entity_tpl, dict): + if self.INTERFACES in self.entity_tpl: + type_interfaces = self.entity_tpl[self.INTERFACES] + else: + for rel_def, value in self.entity_tpl.items(): + if rel_def != 'type': + rel_def = self.entity_tpl.get(rel_def) + rel = None + if isinstance(rel_def, dict): + rel = rel_def.get('relationship') + if rel: + if self.INTERFACES in rel: + type_interfaces = rel[self.INTERFACES] + break + else: + type_interfaces = self.type_definition.get_value(self.INTERFACES, + self.entity_tpl) + if type_interfaces: + for interface_type, value in type_interfaces.items(): + for op, op_def in value.items(): + iface = InterfacesDef(self.type_definition, + interfacetype=interface_type, + node_template=self, + name=op, + value=op_def) + interfaces.append(iface) + return interfaces + + def get_capability(self, name): + """Provide named capability + + :param name: name of capability + :return: capability object if found, None otherwise + """ + caps = self.get_capabilities() + if caps and name in caps.keys(): + return caps[name] +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Group.java b/src/main/java/org/onap/sdc/toscaparser/api/Group.java new file mode 100644 index 0000000..f678083 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/Group.java @@ -0,0 +1,137 @@ +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.Metadata; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.ValidateUtils; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public class Group extends EntityTemplate { + + private static final String TYPE = "type"; + private static final String METADATA = "metadata"; + private static final String DESCRIPTION = "description"; + private static final String PROPERTIES = "properties"; + private static final String MEMBERS = "members"; + private static final String INTERFACES = "interfaces"; + private static final String SECTIONS[] = { + TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; + + private String name; + LinkedHashMap tpl; + ArrayList memberNodes; + LinkedHashMap customDef; + Metadata metaData; + + + public Group(String _name, LinkedHashMap _templates, + ArrayList _memberNodes, + LinkedHashMap _customDef) { + super(_name, _templates, "group_type", _customDef); + + name = _name; + tpl = _templates; + if(tpl.get(METADATA) != null) { + Object metadataObject = tpl.get(METADATA); + ValidateUtils.validateMap(metadataObject); + metaData = new Metadata((Map)metadataObject); + } + memberNodes = _memberNodes; + _validateKeys(); + } + + public Metadata getMetadata() { + return metaData; + } + + public ArrayList getMembers() { + return (ArrayList)entityTpl.get("members"); + } + + public String getDescription() { + return (String)entityTpl.get("description"); + + } + + public ArrayList getMemberNodes() { + return memberNodes; + } + + private void _validateKeys() { + for(String key: entityTpl.keySet()) { + boolean bFound = false; + for(String sect: SECTIONS) { + if(key.equals(sect)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE183", String.format( + "UnknownFieldError: Groups \"%s\" contains unknown field \"%s\"", + name,key))); + } + } + } + + @Override + public String toString() { + return "Group{" + + "name='" + name + '\'' + + ", tpl=" + tpl + + ", memberNodes=" + memberNodes + + ", customDef=" + customDef + + ", metaData=" + metaData + + '}'; + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import UnknownFieldError +from toscaparser.entity_template import EntityTemplate +from toscaparser.utils import validateutils + +SECTIONS = (TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES) = \ + ('type', 'metadata', 'description', + 'properties', 'members', 'interfaces') + + +class Group(EntityTemplate): + + def __init__(self, name, group_templates, member_nodes, custom_defs=None): + super(Group, self).__init__(name, + group_templates, + 'group_type', + custom_defs) + self.name = name + self.tpl = group_templates + self.meta_data = None + if self.METADATA in self.tpl: + self.meta_data = self.tpl.get(self.METADATA) + validateutils.validate_map(self.meta_data) + self.member_nodes = member_nodes + self._validate_keys() + + @property + def members(self): + return self.entity_tpl.get('members') + + @property + def description(self): + return self.entity_tpl.get('description') + + def get_member_nodes(self): + return self.member_nodes + + def _validate_keys(self): + for key in self.entity_tpl.keys(): + if key not in SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Groups "%s"' % self.name, + field=key)) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java b/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java new file mode 100644 index 0000000..76800f7 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java @@ -0,0 +1,746 @@ +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.UrlUtils; + +import org.onap.sdc.toscaparser.api.elements.TypeValidation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.Yaml; + +import java.io.*; +import java.net.URL; +import java.nio.file.Paths; +import java.util.*; + +public class ImportsLoader { + + private static Logger log = LoggerFactory.getLogger(ImportsLoader.class.getName()); + private static final String FILE = "file"; + private static final String REPOSITORY = "repository"; + private static final String NAMESPACE_URI = "namespace_uri"; + private static final String NAMESPACE_PREFIX = "namespace_prefix"; + private String IMPORTS_SECTION[] = {FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX}; + + private ArrayList importslist; + private String path; + private ArrayList typeDefinitionList; + + private LinkedHashMap customDefs; + private LinkedHashMap allCustomDefs; + private ArrayList> nestedToscaTpls; + private LinkedHashMap repositories; + + @SuppressWarnings("unchecked") + public ImportsLoader(ArrayList_importslist, + String _path, + Object _typeDefinitionList, + LinkedHashMap tpl) { + + this.importslist = _importslist; + customDefs = new LinkedHashMap(); + allCustomDefs = new LinkedHashMap(); + nestedToscaTpls = new ArrayList>(); + if((_path == null || _path.isEmpty()) && tpl == null) { + //msg = _('Input tosca template is not provided.') + //log.warning(msg) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE184", "ValidationError: Input tosca template is not provided")); + } + + this.path = _path; + this.repositories = new LinkedHashMap(); + + if(tpl != null && tpl.get("repositories") != null) { + this.repositories = (LinkedHashMap)tpl.get("repositories"); + } + this.typeDefinitionList = new ArrayList(); + if(_typeDefinitionList != null) { + if(_typeDefinitionList instanceof ArrayList) { + this.typeDefinitionList = (ArrayList)_typeDefinitionList; + } + else { + this.typeDefinitionList.add((String)_typeDefinitionList); + } + } + _validateAndLoadImports(); + } + + public LinkedHashMap getCustomDefs() { + return allCustomDefs; + } + + public ArrayList> getNestedToscaTpls() { + return nestedToscaTpls; + } + + @SuppressWarnings({ "unchecked", "unused" }) + public void _validateAndLoadImports() { + Set importNames = new HashSet(); + + if(importslist == null) { + //msg = _('"imports" keyname is defined without including templates.') + //log.error(msg) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE185", + "ValidationError: \"imports\" keyname is defined without including templates")); + return; + } + + for(Object importDef: importslist) { + String fullFileName = null; + LinkedHashMap customType = null; + if(importDef instanceof LinkedHashMap) { + for(Map.Entry me: ((LinkedHashMap)importDef).entrySet()) { + String importName = me.getKey(); + Object importUri = me.getValue(); + if(importNames.contains(importName)) { + //msg = (_('Duplicate import name "%s" was found.') % import_name) + //log.error(msg) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE186", String.format( + "ValidationError: Duplicate import name \"%s\" was found",importName))); + } + importNames.add(importName); //??? + + // _loadImportTemplate returns 2 objects + Object ffnct[] = _loadImportTemplate(importName, importUri); + fullFileName = (String)ffnct[0]; + customType = (LinkedHashMap)ffnct[1]; + String namespacePrefix = ""; + if(importUri instanceof LinkedHashMap) { + namespacePrefix = (String) + ((LinkedHashMap)importUri).get(NAMESPACE_PREFIX); + } + + if(customType != null) { + TypeValidation tv = new TypeValidation(customType, importDef); + _updateCustomDefs(customType, namespacePrefix); + } + } + } + else { // old style of imports + // _loadImportTemplate returns 2 objects + Object ffnct[] = _loadImportTemplate(null,importDef); + fullFileName = (String)ffnct[0]; + customType = (LinkedHashMap)ffnct[1]; + if(customType != null) { + TypeValidation tv = new TypeValidation(customType,importDef); + _updateCustomDefs(customType,null); + } + } + _updateNestedToscaTpls(fullFileName, customType); + + + } + } + + /** + * This method is used to get consolidated custom definitions by passing custom Types from + * each import. The resultant collection is then passed back which contains all import + * definitions + * + * @param customType the custom type + * @param namespacePrefix the namespace prefix + */ + @SuppressWarnings("unchecked") + private void _updateCustomDefs(LinkedHashMap customType, String namespacePrefix) { + LinkedHashMap outerCustomTypes; + for(String typeDef: typeDefinitionList) { + if(typeDef.equals("imports")) { + customDefs.put("imports", customType.get(typeDef)); + if (allCustomDefs.isEmpty() || allCustomDefs.get("imports") == null){ + allCustomDefs.put("imports",customType.get(typeDef)); + } + else if (customType.get(typeDef) != null){ + Set allCustomImports = new HashSet<>((ArrayList)allCustomDefs.get("imports")); + allCustomImports.addAll((ArrayList) customType.get(typeDef)); + allCustomDefs.put("imports", new ArrayList<>(allCustomImports)); + } + } + else { + outerCustomTypes = (LinkedHashMap)customType.get(typeDef); + if(outerCustomTypes != null) { + if(namespacePrefix != null && !namespacePrefix.isEmpty()) { + LinkedHashMap prefixCustomTypes = new LinkedHashMap(); + for(Map.Entry me: outerCustomTypes.entrySet()) { + String typeDefKey = me.getKey(); + String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey; + prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey)); + } + customDefs.putAll(prefixCustomTypes); + allCustomDefs.putAll(prefixCustomTypes); + } + else { + customDefs.putAll(outerCustomTypes); + allCustomDefs.putAll(outerCustomTypes); + } + } + } + } + } + + private void _updateNestedToscaTpls(String fullFileName,LinkedHashMap customTpl) { + if(fullFileName != null && customTpl != null) { + LinkedHashMap tt = new LinkedHashMap(); + tt.put(fullFileName, customTpl); + nestedToscaTpls.add(tt); + } + } + + private void _validateImportKeys(String importName, LinkedHashMap importUri) { + if(importUri.get(FILE) == null) { + //log.warning(_('Missing keyname "file" in import "%(name)s".') % {'name': import_name}) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE187", String.format( + "MissingRequiredFieldError: Import of template \"%s\" is missing field %s",importName,FILE))); + } + for(String key: importUri.keySet()) { + boolean bFound = false; + for(String is: IMPORTS_SECTION) { + if(is.equals(key)) { + bFound = true; + break; + } + } + if(!bFound) { + //log.warning(_('Unknown keyname "%(key)s" error in ' + // 'imported definition "%(def)s".') + // % {'key': key, 'def': import_name}) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE188", String.format( + "UnknownFieldError: Import of template \"%s\" has unknown fiels %s",importName,key))); + } + } + } + + @SuppressWarnings("unchecked") + private Object[] _loadImportTemplate(String importName, Object importUriDef) { + /* + This method loads the custom type definitions referenced in "imports" + section of the TOSCA YAML template by determining whether each import + is specified via a file reference (by relative or absolute path) or a + URL reference. + + Possibilities: + +----------+--------+------------------------------+ + | template | import | comment | + +----------+--------+------------------------------+ + | file | file | OK | + | file | URL | OK | + | preparsed| file | file must be a full path | + | preparsed| URL | OK | + | URL | file | file must be a relative path | + | URL | URL | OK | + +----------+--------+------------------------------+ + */ + Object al[] = new Object[2]; + + boolean shortImportNotation = false; + String fileName; + String repository; + if(importUriDef instanceof LinkedHashMap) { + _validateImportKeys(importName, (LinkedHashMap)importUriDef); + fileName = (String)((LinkedHashMap)importUriDef).get(FILE); + repository = (String)((LinkedHashMap)importUriDef).get(REPOSITORY); + if(repository != null) { + if(!repositories.keySet().contains(repository)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE189", String.format( + "InvalidPropertyValueError: Repository \"%s\" not found in \"%s\"", + repository,repositories.keySet().toString()))); + } + } + } + else { + fileName = (String)importUriDef; + repository = null; + shortImportNotation = true; + } + + if(fileName == null || fileName.isEmpty()) { + //msg = (_('A template file name is not provided with import ' + // 'definition "%(import_name)s".') + // % {'import_name': import_name}) + //log.error(msg) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE190", String.format( + "ValidationError: A template file name is not provided with import definition \"%s\"",importName))); + al[0] = al[1] = null; + return al; + } + + if(UrlUtils.validateUrl(fileName)) { + try (InputStream input = new URL(fileName).openStream();) { + al[0] = fileName; + Yaml yaml = new Yaml(); + al[1] = yaml.load(input); + return al; + } + catch(IOException e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE191", String.format( + "ImportError: \"%s\" loading YAML import from \"%s\"",e.getClass().getSimpleName(),fileName))); + al[0] = al[1] = null; + return al; + } + } + else if(repository == null || repository.isEmpty()) { + boolean aFile = false; + String importTemplate = null; + if(path != null && !path.isEmpty()) { + if(UrlUtils.validateUrl(path)) { + File fp = new File(path); + if(fp.isAbsolute()) { + String msg = String.format( + "ImportError: Absolute file name \"%s\" cannot be used in the URL-based input template \"%s\"", + fileName,path); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE192", msg)); + al[0] = al[1] = null; + return al; + } + importTemplate = UrlUtils.joinUrl(path,fileName); + aFile = false; + } + else { + + aFile = true; + File fp = new File(path); + if(fp.isFile()) { + File fn = new File(fileName); + if(fn.isFile()) { + importTemplate = fileName; + } + else { + String fullPath = Paths.get(path).toAbsolutePath().getParent().toString() + File.separator + fileName; + File ffp = new File(fullPath); + if(ffp.isFile()) { + importTemplate = fullPath; + } + else { + String dirPath = Paths.get(path).toAbsolutePath().getParent().toString(); + String filePath; + if(Paths.get(fileName).getParent() != null) { + filePath = Paths.get(fileName).getParent().toString(); + } + else { + filePath = ""; + } + if(!filePath.isEmpty() && dirPath.endsWith(filePath)) { + String sFileName = Paths.get(fileName).getFileName().toString(); + importTemplate = dirPath + File.separator + sFileName; + File fit = new File(importTemplate); + if(!fit.isFile()) { + //msg = (_('"%(import_template)s" is' + // 'not a valid file') + // % {'import_template': + // import_template}) + //log.error(msg) + String msg = String.format( + "ValueError: \"%s\" is not a valid file",importTemplate); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE193", msg)); + log.debug("ImportsLoader - _loadImportTemplate - {}", msg); + } + } + } + } + } + } + } + else { // template is pre-parsed + File fn = new File(fileName); + if(fn.isAbsolute() && fn.isFile()) { + aFile = true; + importTemplate = fileName; + } + else { + String msg = String.format( + "Relative file name \"%s\" cannot be used in a pre-parsed input template",fileName); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE194", "ImportError: " + msg)); + al[0] = al[1] = null; + return al; + } + } + + if(importTemplate == null || importTemplate.isEmpty()) { + //log.error(_('Import "%(name)s" is not valid.') % + // {'name': import_uri_def}) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE195", String.format( + "ImportError: Import \"%s\" is not valid",importUriDef))); + al[0] = al[1] = null; + return al; + } + + // for now, this must be a file + if(!aFile) { + log.error("ImportsLoader - _loadImportTemplate - Error!! Expected a file. importUriDef = {}, importTemplate = {}", importUriDef, importTemplate); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE196", String.format( + "ImportError: Import \"%s\" is not a file",importName))); + al[0] = al[1] = null; + return al; + } + try (InputStream input = new FileInputStream(new File(importTemplate));) { + al[0] = importTemplate; + + Yaml yaml = new Yaml(); + al[1] = yaml.load(input); + return al; + } + catch(FileNotFoundException e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE197", String.format( + "ImportError: Failed to load YAML from \"%s\"" + e,importName))); + al[0] = al[1] = null; + return al; + } + catch(Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE198", String.format( + "ImportError: Exception from SnakeYAML file = \"%s\"" + e,importName))); + al[0] = al[1] = null; + return al; + } + } + + if(shortImportNotation) { + //log.error(_('Import "%(name)s" is not valid.') % import_uri_def) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE199", String.format( + "ImportError: Import \"%s\" is not valid",importName))); + al[0] = al[1] = null; + return al; + } + + String fullUrl = ""; + String repoUrl = ""; + if(repository != null && !repository.isEmpty()) { + if(repositories != null) { + for(String repoName: repositories.keySet()) { + if(repoName.equals(repository)) { + Object repoDef = repositories.get(repoName); + if(repoDef instanceof String) { + repoUrl = (String)repoDef; + } + else if(repoDef instanceof LinkedHashMap) { + repoUrl = (String)((LinkedHashMap)repoDef).get("url"); + } + // Remove leading, ending spaces and strip + // the last character if "/" + repoUrl = repoUrl.trim(); + if(repoUrl.endsWith("/")) { + repoUrl = repoUrl.substring(0,repoUrl.length()-1); + } + fullUrl = repoUrl + "/" + fileName; + break; + } + } + } + if(fullUrl.isEmpty()) { + String msg = String.format( + "referenced repository \"%s\" in import definition \"%s\" not found", + repository,importName); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE200", "ImportError: " + msg)); + al[0] = al[1] = null; + return al; + } + } + if(UrlUtils.validateUrl(fullUrl)) { + try (InputStream input = new URL(fullUrl).openStream();) { + al[0] = fullUrl; + Yaml yaml = new Yaml(); + al[1] = yaml.load(input); + return al; + } + catch(IOException e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE201", String.format( + "ImportError: Exception loading YAML import from \"%s\"",fullUrl))); + al[0] = al[1] = null; + return al; + } + } + else { + String msg = String.format( + "repository URL \"%s\" in import definition \"%s\" is not valid", + repoUrl,importName); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE202", "ImportError: " + msg)); + } + + // if we got here something is wrong with the flow... + log.error("ImportsLoader - _loadImportTemplate - got to dead end (importName {})", importName); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE203", String.format( + "ImportError: _loadImportTemplate got to dead end (importName %s)\n",importName))); + al[0] = al[1] = null; + return al; + } + + @Override + public String toString() { + return "ImportsLoader{" + + "IMPORTS_SECTION=" + Arrays.toString(IMPORTS_SECTION) + + ", importslist=" + importslist + + ", path='" + path + '\'' + + ", typeDefinitionList=" + typeDefinitionList + + ", customDefs=" + customDefs + + ", nestedToscaTpls=" + nestedToscaTpls + + ", repositories=" + repositories + + '}'; + } +} + +/*python + +import logging +import os + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidPropertyValueError +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.common.exception import ValidationError +from toscaparser.elements.tosca_type_validation import TypeValidation +from toscaparser.utils.gettextutils import _ +import org.openecomp.sdc.toscaparser.api.utils.urlutils +import org.openecomp.sdc.toscaparser.api.utils.yamlparser + +YAML_LOADER = toscaparser.utils.yamlparser.load_yaml +log = logging.getLogger("tosca") + + +class ImportsLoader(object): + + IMPORTS_SECTION = (FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX) = \ + ('file', 'repository', 'namespace_uri', + 'namespace_prefix') + + def __init__(self, importslist, path, type_definition_list=None, + tpl=None): + self.importslist = importslist + self.custom_defs = {} + if not path and not tpl: + msg = _('Input tosca template is not provided.') + log.warning(msg) + ValidationIssueCollector.appendException(ValidationError(message=msg)) + self.path = path + self.repositories = {} + if tpl and tpl.get('repositories'): + self.repositories = tpl.get('repositories') + self.type_definition_list = [] + if type_definition_list: + if isinstance(type_definition_list, list): + self.type_definition_list = type_definition_list + else: + self.type_definition_list.append(type_definition_list) + self._validate_and_load_imports() + + def get_custom_defs(self): + return self.custom_defs + + def _validate_and_load_imports(self): + imports_names = set() + + if not self.importslist: + msg = _('"imports" keyname is defined without including ' + 'templates.') + log.error(msg) + ValidationIssueCollector.appendException(ValidationError(message=msg)) + return + + for import_def in self.importslist: + if isinstance(import_def, dict): + for import_name, import_uri in import_def.items(): + if import_name in imports_names: + msg = (_('Duplicate import name "%s" was found.') % + import_name) + log.error(msg) + ValidationIssueCollector.appendException( + ValidationError(message=msg)) + imports_names.add(import_name) + + custom_type = self._load_import_template(import_name, + import_uri) + namespace_prefix = None + if isinstance(import_uri, dict): + namespace_prefix = import_uri.get( + self.NAMESPACE_PREFIX) + if custom_type: + TypeValidation(custom_type, import_def) + self._update_custom_def(custom_type, namespace_prefix) + else: # old style of imports + custom_type = self._load_import_template(None, + import_def) + if custom_type: + TypeValidation( + custom_type, import_def) + self._update_custom_def(custom_type, None) + + def _update_custom_def(self, custom_type, namespace_prefix): + outer_custom_types = {} + for type_def in self.type_definition_list: + outer_custom_types = custom_type.get(type_def) + if outer_custom_types: + if type_def == "imports": + self.custom_defs.update({'imports': outer_custom_types}) + else: + if namespace_prefix: + prefix_custom_types = {} + for type_def_key in outer_custom_types.keys(): + namespace_prefix_to_key = (namespace_prefix + + "." + type_def_key) + prefix_custom_types[namespace_prefix_to_key] = \ + outer_custom_types[type_def_key] + self.custom_defs.update(prefix_custom_types) + else: + self.custom_defs.update(outer_custom_types) + + def _validate_import_keys(self, import_name, import_uri_def): + if self.FILE not in import_uri_def.keys(): + log.warning(_('Missing keyname "file" in import "%(name)s".') + % {'name': import_name}) + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what='Import of template "%s"' % import_name, + required=self.FILE)) + for key in import_uri_def.keys(): + if key not in self.IMPORTS_SECTION: + log.warning(_('Unknown keyname "%(key)s" error in ' + 'imported definition "%(def)s".') + % {'key': key, 'def': import_name}) + ValidationIssueCollector.appendException( + UnknownFieldError( + what='Import of template "%s"' % import_name, + field=key)) + + def _load_import_template(self, import_name, import_uri_def): + """Handle custom types defined in imported template files + + This method loads the custom type definitions referenced in "imports" + section of the TOSCA YAML template by determining whether each import + is specified via a file reference (by relative or absolute path) or a + URL reference. + + Possibilities: + +----------+--------+------------------------------+ + | template | import | comment | + +----------+--------+------------------------------+ + | file | file | OK | + | file | URL | OK | + | preparsed| file | file must be a full path | + | preparsed| URL | OK | + | URL | file | file must be a relative path | + | URL | URL | OK | + +----------+--------+------------------------------+ + """ + short_import_notation = False + if isinstance(import_uri_def, dict): + self._validate_import_keys(import_name, import_uri_def) + file_name = import_uri_def.get(self.FILE) + repository = import_uri_def.get(self.REPOSITORY) + repos = self.repositories.keys() + if repository is not None: + if repository not in repos: + ValidationIssueCollector.appendException( + InvalidPropertyValueError( + what=_('Repository is not found in "%s"') % repos)) + else: + file_name = import_uri_def + repository = None + short_import_notation = True + + if not file_name: + msg = (_('A template file name is not provided with import ' + 'definition "%(import_name)s".') + % {'import_name': import_name}) + log.error(msg) + ValidationIssueCollector.appendException(ValidationError(message=msg)) + return + + if toscaparser.utils.urlutils.UrlUtils.validate_url(file_name): + return YAML_LOADER(file_name, False) + elif not repository: + import_template = None + if self.path: + if toscaparser.utils.urlutils.UrlUtils.validate_url(self.path): + if os.path.isabs(file_name): + msg = (_('Absolute file name "%(name)s" cannot be ' + 'used in a URL-based input template ' + '"%(template)s".') + % {'name': file_name, 'template': self.path}) + log.error(msg) + ValidationIssueCollector.appendException(ImportError(msg)) + return + import_template = toscaparser.utils.urlutils.UrlUtils.\ + join_url(self.path, file_name) + a_file = False + else: + a_file = True + main_a_file = os.path.isfile(self.path) + + if main_a_file: + if os.path.isfile(file_name): + import_template = file_name + else: + full_path = os.path.join( + os.path.dirname(os.path.abspath(self.path)), + file_name) + if os.path.isfile(full_path): + import_template = full_path + else: + file_path = file_name.rpartition("/") + dir_path = os.path.dirname(os.path.abspath( + self.path)) + if file_path[0] != '' and dir_path.endswith( + file_path[0]): + import_template = dir_path + "/" +\ + file_path[2] + if not os.path.isfile(import_template): + msg = (_('"%(import_template)s" is' + 'not a valid file') + % {'import_template': + import_template}) + log.error(msg) + ValidationIssueCollector.appendException + (ValueError(msg)) + else: # template is pre-parsed + if os.path.isabs(file_name) and os.path.isfile(file_name): + a_file = True + import_template = file_name + else: + msg = (_('Relative file name "%(name)s" cannot be used ' + 'in a pre-parsed input template.') + % {'name': file_name}) + log.error(msg) + ValidationIssueCollector.appendException(ImportError(msg)) + return + + if not import_template: + log.error(_('Import "%(name)s" is not valid.') % + {'name': import_uri_def}) + ValidationIssueCollector.appendException( + ImportError(_('Import "%s" is not valid.') % + import_uri_def)) + return + return YAML_LOADER(import_template, a_file) + + if short_import_notation: + log.error(_('Import "%(name)s" is not valid.') % import_uri_def) + ValidationIssueCollector.appendException( + ImportError(_('Import "%s" is not valid.') % import_uri_def)) + return + + full_url = "" + if repository: + if self.repositories: + for repo_name, repo_def in self.repositories.items(): + if repo_name == repository: + # Remove leading, ending spaces and strip + # the last character if "/" + repo_url = ((repo_def['url']).strip()).rstrip("//") + full_url = repo_url + "/" + file_name + + if not full_url: + msg = (_('referenced repository "%(n_uri)s" in import ' + 'definition "%(tpl)s" not found.') + % {'n_uri': repository, 'tpl': import_name}) + log.error(msg) + ValidationIssueCollector.appendException(ImportError(msg)) + return + + if toscaparser.utils.urlutils.UrlUtils.validate_url(full_url): + return YAML_LOADER(full_url, False) + else: + msg = (_('repository url "%(n_uri)s" is not valid in import ' + 'definition "%(tpl)s".') + % {'n_uri': repo_url, 'tpl': import_name}) + log.error(msg) + ValidationIssueCollector.appendException(ImportError(msg)) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java new file mode 100644 index 0000000..250ef42 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java @@ -0,0 +1,737 @@ +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import org.onap.sdc.toscaparser.api.elements.*; +import org.onap.sdc.toscaparser.api.utils.CopyUtils; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class NodeTemplate extends EntityTemplate { + + private LinkedHashMap templates; + private LinkedHashMap customDef; + private ArrayList availableRelTpls; + private LinkedHashMap availableRelTypes; + private LinkedHashMap related; + private ArrayList relationshipTpl; + private LinkedHashMap _relationships; + private SubstitutionMappings subMappingToscaTemplate; + private Metadata metadata; + + private static final String METADATA = "metadata"; + + @SuppressWarnings("unchecked") + public NodeTemplate(String name, + LinkedHashMap ntnodeTemplates, + LinkedHashMap ntcustomDef, + ArrayList ntavailableRelTpls, + LinkedHashMap ntavailableRelTypes) { + + super(name, (LinkedHashMap)ntnodeTemplates.get(name), "node_type", ntcustomDef); + + templates = ntnodeTemplates; + _validateFields((LinkedHashMap)templates.get(name)); + customDef = ntcustomDef; + related = new LinkedHashMap(); + relationshipTpl = new ArrayList(); + availableRelTpls = ntavailableRelTpls; + availableRelTypes = ntavailableRelTypes; + _relationships = new LinkedHashMap(); + subMappingToscaTemplate = null; + metadata = _metaData(); + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getRelationships() { + if(_relationships.isEmpty()) { + List requires = getRequirements().getAll(); + if(requires != null && requires instanceof List) { + for(RequirementAssignment r: requires) { + LinkedHashMap explicit = _getExplicitRelationship(r); + if(explicit != null) { + // _relationships.putAll(explicit)... + for(Map.Entry ee: explicit.entrySet()) { + _relationships.put(ee.getKey(), ee.getValue()); + } + } + } + } + } + return _relationships; + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _getExplicitRelationship(RequirementAssignment req) { + // Handle explicit relationship + + // For example, + // - req: + // node: DBMS + // relationship: tosca.relationships.HostedOn + + LinkedHashMap explicitRelation = new LinkedHashMap(); + String node = req.getNodeTemplateName(); + + if(node != null && !node.isEmpty()) { + //msg = _('Lookup by TOSCA types is not supported. ' + // 'Requirement for "%s" can not be full-filled.') % self.name + boolean bFound = false; + for(String k: EntityType.TOSCA_DEF.keySet()) { + if(k.equals(node)) { + bFound = true; + break; + } + } + if(bFound || customDef.get(node) != null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE205", String.format( + "NotImplementedError: Lookup by TOSCA types is not supported. Requirement for \"%s\" can not be full-filled", + getName()))); + return null; + } + if(templates.get(node) == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE206", String.format( + "KeyError: Node template \"%s\" was not found",node))); + return null; + } + NodeTemplate relatedTpl = new NodeTemplate(node,templates,customDef,null,null); + Object relationship = req.getRelationship(); + String relationshipString = null; +// // here relationship can be a string or a LHM with 'type': + + // check if its type has relationship defined + if(relationship == null) { + ArrayList parentReqs = ((NodeType)typeDefinition).getAllRequirements(); + if(parentReqs == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE207", "ValidationError: parent_req is null")); + } + else { +// for(String key: req.keySet()) { +// boolean bFoundRel = false; + for(Object rdo: parentReqs) { + LinkedHashMap reqDict = (LinkedHashMap)rdo; + LinkedHashMap relDict = (LinkedHashMap)reqDict.get(req.getName()); + if(relDict != null) { + relationship = relDict.get("relationship"); + //BUG-python??? need to break twice? +// bFoundRel = true; + break; + } + } +// if(bFoundRel) { +// break; +// } +// } + } + } + + if(relationship != null) { + // here relationship can be a string or a LHM with 'type': + if(relationship instanceof String) { + relationshipString = (String)relationship; + } + else if(relationship instanceof LinkedHashMap) { + relationshipString = (String)((LinkedHashMap)relationship).get("type"); + } + + boolean foundRelationshipTpl = false; + // apply available relationship templates if found + if(availableRelTpls != null) { + for(RelationshipTemplate tpl: availableRelTpls) { + if(tpl.getName().equals(relationshipString)) { + RelationshipType rtype = new RelationshipType(tpl.getType(),null,customDef); + explicitRelation.put(rtype, relatedTpl); + tpl.setTarget(relatedTpl); + tpl.setSource(this); + relationshipTpl.add(tpl); + foundRelationshipTpl = true; + } + } + } + // create relationship template object. + String relPrfx = EntityType.RELATIONSHIP_PREFIX; + if(!foundRelationshipTpl) { + if(relationship instanceof LinkedHashMap) { + relationshipString = (String)((LinkedHashMap)relationship).get("type"); + if(relationshipString != null) { + if(availableRelTypes != null && !availableRelTypes.isEmpty() && + availableRelTypes.get(relationshipString) != null) { + ; + } + else if(!(relationshipString).startsWith(relPrfx)) { + relationshipString = relPrfx + relationshipString; + } + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE208", String.format( + "MissingRequiredFieldError: \"relationship\" used in template \"%s\" is missing required field \"type\"", + relatedTpl.getName()))); + } + } + for(RelationshipType rtype: ((NodeType)typeDefinition).getRelationship().keySet()) { + if(rtype.getType().equals(relationshipString)) { + explicitRelation.put(rtype,relatedTpl); + relatedTpl._addRelationshipTemplate(req,rtype.getType(),this); + } + else if(availableRelTypes != null && !availableRelTypes.isEmpty()) { + LinkedHashMap relTypeDef = (LinkedHashMap)availableRelTypes.get(relationshipString); + if(relTypeDef != null) { + String superType = (String)relTypeDef.get("derived_from"); + if(superType != null) { + if(!superType.startsWith(relPrfx)) { + superType = relPrfx + superType; + } + if(rtype.getType().equals(superType)) { + explicitRelation.put(rtype,relatedTpl); + relatedTpl._addRelationshipTemplate(req,rtype.getType(),this); + } + } + } + } + } + } + } + } + return explicitRelation; + } + + @SuppressWarnings("unchecked") + private void _addRelationshipTemplate(RequirementAssignment requirement, String rtype, NodeTemplate source) { + LinkedHashMap req = new LinkedHashMap<>(); + req.put("relationship", CopyUtils.copyLhmOrAl(requirement.getRelationship())); + req.put("type",rtype); + RelationshipTemplate tpl = new RelationshipTemplate(req, rtype, customDef, this, source); + relationshipTpl.add(tpl); + } + + public ArrayList getRelationshipTemplate() { + return relationshipTpl; + } + + void _addNext(NodeTemplate nodetpl,RelationshipType relationship) { + related.put(nodetpl,relationship); + } + + public ArrayList getRelatedNodes() { + if(related.isEmpty()) { + for(Map.Entry me: ((NodeType)typeDefinition).getRelationship().entrySet()) { + RelationshipType relation = me.getKey(); + NodeType node = me.getValue(); + for(String tpl: templates.keySet()) { + if(tpl.equals(node.getType())) { + //BUG.. python has + // self.related[NodeTemplate(tpl)] = relation + // but NodeTemplate doesn't have a constructor with just name... + //???? + related.put(new NodeTemplate(tpl,null,null,null,null),relation); + } + } + } + } + return new ArrayList(related.keySet()); + } + + public void validate(/*tosca_tpl=none is not used...*/) { + _validateCapabilities(); + _validateRequirements(); + _validateProperties(entityTpl,(NodeType)typeDefinition); + _validateInterfaces(); + for(Property prop: getPropertiesObjects()) { + prop.validate(); + } + } + + private Metadata _metaData() { + if(entityTpl.get(METADATA) != null) { + return new Metadata((Map)entityTpl.get(METADATA)); + } + else { + return null; + } + } + + @SuppressWarnings("unchecked") + private void _validateRequirements() { + ArrayList typeRequires = ((NodeType)typeDefinition).getAllRequirements(); + ArrayList allowedReqs = new ArrayList<>(); + allowedReqs.add("template"); + if(typeRequires != null) { + for(Object to: typeRequires) { + LinkedHashMap treq = (LinkedHashMap)to; + for(Map.Entry me: treq.entrySet()) { + String key = me.getKey(); + Object value = me.getValue(); + allowedReqs.add(key); + if(value instanceof LinkedHashMap) { + allowedReqs.addAll(((LinkedHashMap)value).keySet()); + } + } + + } + } + + ArrayList requires = (ArrayList)((NodeType)typeDefinition).getValue(REQUIREMENTS, entityTpl, false); + if(requires != null) { + if(!(requires instanceof ArrayList)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE209", String.format( + "TypeMismatchError: \"requirements\" of template \"%s\" are not of type \"list\"",name))); + } + else { + for(Object ro: requires) { + LinkedHashMap req = (LinkedHashMap)ro; + for(Map.Entry me: req.entrySet()) { + String rl = me.getKey(); + Object vo = me.getValue(); + if(vo instanceof LinkedHashMap) { + LinkedHashMap value = (LinkedHashMap)vo; + _validateRequirementsKeys(value); + _validateRequirementsProperties(value); + allowedReqs.add(rl); + } + } + _commonValidateField(req,allowedReqs,"requirements"); + } + } + } + } + + @SuppressWarnings("unchecked") + private void _validateRequirementsProperties(LinkedHashMap reqs) { + // TO-DO(anyone): Only occurrences property of the requirements is + // validated here. Validation of other requirement properties are being + // validated in different files. Better to keep all the requirements + // properties validation here. + for(Map.Entry me: reqs.entrySet()) { + if(me.getKey().equals("occurrences")) { + ArrayList val = (ArrayList)me.getValue(); + _validateOccurrences(val); + } + + } + } + + private void _validateOccurrences(ArrayList occurrences) { + DataEntity.validateDatatype("list",occurrences,null,null,null); + for(Object val: occurrences) { + DataEntity.validateDatatype("Integer",val,null,null,null); + } + if(occurrences.size() != 2 || + !(0 <= (int)occurrences.get(0) && (int)occurrences.get(0) <= (int)occurrences.get(1)) || + (int)occurrences.get(1) == 0) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE210", String.format( + "InvalidPropertyValueError: property has invalid value %s",occurrences.toString()))); + } + } + + private void _validateRequirementsKeys(LinkedHashMap reqs) { + for(String key: reqs.keySet()) { + boolean bFound = false; + for(int i=0; i< REQUIREMENTS_SECTION.length; i++) { + if(key.equals(REQUIREMENTS_SECTION[i])) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE211", String.format( + "UnknownFieldError: \"requirements\" of template \"%s\" contains unknown field \"%s\"",name,key))); + } + } + } + + @SuppressWarnings("unchecked") + private void _validateInterfaces() { + LinkedHashMap ifaces = (LinkedHashMap) + ((NodeType)typeDefinition).getValue(INTERFACES, entityTpl, false); + if(ifaces != null) { + for(Map.Entry me: ifaces.entrySet()) { + String iname = me.getKey(); + LinkedHashMap value = (LinkedHashMap)me.getValue(); + if(iname.equals(InterfacesDef.LIFECYCLE) || iname.equals(InterfacesDef.LIFECYCLE_SHORTNAME)) { + // maybe we should convert [] to arraylist??? + ArrayList inlo = new ArrayList<>(); + for(int i=0; i irco = new ArrayList<>(); + for(int i=0; i _collectCustomIfaceOperations(String iname) { + ArrayList allowedOperations = new ArrayList<>(); + LinkedHashMap nodetypeIfaceDef = (LinkedHashMap)((NodeType) + typeDefinition).getInterfaces().get(iname); + allowedOperations.addAll(nodetypeIfaceDef.keySet()); + String ifaceType = (String)nodetypeIfaceDef.get("type"); + if(ifaceType != null) { + LinkedHashMap ifaceTypeDef = null; + if(((NodeType)typeDefinition).customDef != null) { + ifaceTypeDef = (LinkedHashMap)((NodeType)typeDefinition).customDef.get(ifaceType); + } + if(ifaceTypeDef == null) { + ifaceTypeDef = (LinkedHashMap)EntityType.TOSCA_DEF.get(ifaceType); + } + allowedOperations.addAll(ifaceTypeDef.keySet()); + } + // maybe we should convert [] to arraylist??? + ArrayList idrw = new ArrayList<>(); + for(int i=0; i nodetemplate) { + for(String ntname: nodetemplate.keySet()) { + boolean bFound = false; + for(int i=0; i< SECTIONS.length; i++) { + if(ntname.equals(SECTIONS[i])) { + bFound = true; + break; + } + } + if(!bFound) { + for(int i=0; i< SPECIAL_SECTIONS.length; i++) { + if(ntname.equals(SPECIAL_SECTIONS[i])) { + bFound = true; + break; + } + } + + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE213", String.format( + "UnknownFieldError: Node template \"%s\" has unknown field \"%s\"",name,ntname))); + } + } + } + + // getter/setter + + // multilevel nesting + public SubstitutionMappings getSubMappingToscaTemplate() { + return subMappingToscaTemplate; + } + + public void setSubMappingToscaTemplate(SubstitutionMappings sm) { + subMappingToscaTemplate = sm; + } + + public Metadata getMetaData() { + return metadata; + } + + public void setMetaData(Metadata metadata) { + this.metadata = metadata; + } + + @Override + public String toString() { + return getName(); + } + +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidPropertyValueError +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import TypeMismatchError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.common.exception import ValidationError +from toscaparser.dataentity import DataEntity +from toscaparser.elements.interfaces import CONFIGURE +from toscaparser.elements.interfaces import CONFIGURE_SHORTNAME +from toscaparser.elements.interfaces import INTERFACE_DEF_RESERVED_WORDS +from toscaparser.elements.interfaces import InterfacesDef +from toscaparser.elements.interfaces import LIFECYCLE +from toscaparser.elements.interfaces import LIFECYCLE_SHORTNAME +from toscaparser.elements.relationshiptype import RelationshipType +from toscaparser.entity_template import EntityTemplate +from toscaparser.relationship_template import RelationshipTemplate +from toscaparser.utils.gettextutils import _ + +log = logging.getLogger('tosca') + + +class NodeTemplate(EntityTemplate): + '''Node template from a Tosca profile.''' + def __init__(self, name, node_templates, custom_def=None, + available_rel_tpls=None, available_rel_types=None): + super(NodeTemplate, self).__init__(name, node_templates[name], + 'node_type', + custom_def) + self.templates = node_templates + self._validate_fields(node_templates[name]) + self.custom_def = custom_def + self.related = {} + self.relationship_tpl = [] + self.available_rel_tpls = available_rel_tpls + self.available_rel_types = available_rel_types + self._relationships = {} + self.sub_mapping_tosca_template = None + + @property + def relationships(self): + if not self._relationships: + requires = self.requirements + if requires and isinstance(requires, list): + for r in requires: + for r1, value in r.items(): + explicit = self._get_explicit_relationship(r, value) + if explicit: + for key, value in explicit.items(): + self._relationships[key] = value + return self._relationships + + def _get_explicit_relationship(self, req, value): + """Handle explicit relationship + + For example, + - req: + node: DBMS + relationship: tosca.relationships.HostedOn + """ + explicit_relation = {} + node = value.get('node') if isinstance(value, dict) else value + + if node: + # TO-DO(spzala) implement look up once Glance meta data is available + # to find a matching TOSCA node using the TOSCA types + msg = _('Lookup by TOSCA types is not supported. ' + 'Requirement for "%s" can not be full-filled.') % self.name + if (node in list(self.type_definition.TOSCA_DEF.keys()) + or node in self.custom_def): + ValidationIssueCollector.appendException(NotImplementedError(msg)) + return + + if node not in self.templates: + ValidationIssueCollector.appendException( + KeyError(_('Node template "%s" was not found.') % node)) + return + + related_tpl = NodeTemplate(node, self.templates, self.custom_def) + relationship = value.get('relationship') \ + if isinstance(value, dict) else None + # check if it's type has relationship defined + if not relationship: + parent_reqs = self.type_definition.get_all_requirements() + if parent_reqs is None: + ValidationIssueCollector.appendException( + ValidationError(message='parent_req is ' + + str(parent_reqs))) + else: + for key in req.keys(): + for req_dict in parent_reqs: + if key in req_dict.keys(): + relationship = (req_dict.get(key). + get('relationship')) + break + if relationship: + found_relationship_tpl = False + # apply available relationship templates if found + if self.available_rel_tpls: + for tpl in self.available_rel_tpls: + if tpl.name == relationship: + rtype = RelationshipType(tpl.type, None, + self.custom_def) + explicit_relation[rtype] = related_tpl + tpl.target = related_tpl + tpl.source = self + self.relationship_tpl.append(tpl) + found_relationship_tpl = True + # create relationship template object. + rel_prfx = self.type_definition.RELATIONSHIP_PREFIX + if not found_relationship_tpl: + if isinstance(relationship, dict): + relationship = relationship.get('type') + if relationship: + if self.available_rel_types and \ + relationship in self.available_rel_types.keys(): + pass + elif not relationship.startswith(rel_prfx): + relationship = rel_prfx + relationship + else: + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what=_('"relationship" used in template ' + '"%s"') % related_tpl.name, + required=self.TYPE)) + for rtype in self.type_definition.relationship.keys(): + if rtype.type == relationship: + explicit_relation[rtype] = related_tpl + related_tpl._add_relationship_template(req, + rtype.type, + self) + elif self.available_rel_types: + if relationship in self.available_rel_types.keys(): + rel_type_def = self.available_rel_types.\ + get(relationship) + if 'derived_from' in rel_type_def: + super_type = \ + rel_type_def.get('derived_from') + if not super_type.startswith(rel_prfx): + super_type = rel_prfx + super_type + if rtype.type == super_type: + explicit_relation[rtype] = related_tpl + related_tpl.\ + _add_relationship_template( + req, rtype.type, self) + return explicit_relation + + def _add_relationship_template(self, requirement, rtype, source): + req = requirement.copy() + req['type'] = rtype + tpl = RelationshipTemplate(req, rtype, self.custom_def, self, source) + self.relationship_tpl.append(tpl) + + def get_relationship_template(self): + return self.relationship_tpl + + def _add_next(self, nodetpl, relationship): + self.related[nodetpl] = relationship + + @property + def related_nodes(self): + if not self.related: + for relation, node in self.type_definition.relationship.items(): + for tpl in self.templates: + if tpl == node.type: + self.related[NodeTemplate(tpl)] = relation + return self.related.keys() + + def validate(self, tosca_tpl=None): + self._validate_capabilities() + self._validate_requirements() + self._validate_properties(self.entity_tpl, self.type_definition) + self._validate_interfaces() + for prop in self.get_properties_objects(): + prop.validate() + + def _validate_requirements(self): + type_requires = self.type_definition.get_all_requirements() + allowed_reqs = ["template"] + if type_requires: + for treq in type_requires: + for key, value in treq.items(): + allowed_reqs.append(key) + if isinstance(value, dict): + for key in value: + allowed_reqs.append(key) + + requires = self.type_definition.get_value(self.REQUIREMENTS, + self.entity_tpl) + if requires: + if not isinstance(requires, list): + ValidationIssueCollector.appendException( + TypeMismatchError( + what='"requirements" of template "%s"' % self.name, + type='list')) + else: + for req in requires: + for r1, value in req.items(): + if isinstance(value, dict): + self._validate_requirements_keys(value) + self._validate_requirements_properties(value) + allowed_reqs.append(r1) + self._common_validate_field(req, allowed_reqs, + 'requirements') + + def _validate_requirements_properties(self, requirements): + # TO-DO(anyone): Only occurrences property of the requirements is + # validated here. Validation of other requirement properties are being + # validated in different files. Better to keep all the requirements + # properties validation here. + for key, value in requirements.items(): + if key == 'occurrences': + self._validate_occurrences(value) + break + + def _validate_occurrences(self, occurrences): + DataEntity.validate_datatype('list', occurrences) + for value in occurrences: + DataEntity.validate_datatype('integer', value) + if len(occurrences) != 2 or not (0 <= occurrences[0] <= occurrences[1]) \ + or occurrences[1] == 0: + ValidationIssueCollector.appendException( + InvalidPropertyValueError(what=(occurrences))) + + def _validate_requirements_keys(self, requirement): + for key in requirement.keys(): + if key not in self.REQUIREMENTS_SECTION: + ValidationIssueCollector.appendException( + UnknownFieldError( + what='"requirements" of template "%s"' % self.name, + field=key)) + + def _validate_interfaces(self): + ifaces = self.type_definition.get_value(self.INTERFACES, + self.entity_tpl) + if ifaces: + for name, value in ifaces.items(): + if name in (LIFECYCLE, LIFECYCLE_SHORTNAME): + self._common_validate_field( + value, InterfacesDef. + interfaces_node_lifecycle_operations, + 'interfaces') + elif name in (CONFIGURE, CONFIGURE_SHORTNAME): + self._common_validate_field( + value, InterfacesDef. + interfaces_relationship_configure_operations, + 'interfaces') + elif name in self.type_definition.interfaces.keys(): + self._common_validate_field( + value, + self._collect_custom_iface_operations(name), + 'interfaces') + else: + ValidationIssueCollector.appendException( + UnknownFieldError( + what='"interfaces" of template "%s"' % + self.name, field=name)) + + def _collect_custom_iface_operations(self, name): + allowed_operations = [] + nodetype_iface_def = self.type_definition.interfaces[name] + allowed_operations.extend(nodetype_iface_def.keys()) + if 'type' in nodetype_iface_def: + iface_type = nodetype_iface_def['type'] + if iface_type in self.type_definition.custom_def: + iface_type_def = self.type_definition.custom_def[iface_type] + else: + iface_type_def = self.type_definition.TOSCA_DEF[iface_type] + allowed_operations.extend(iface_type_def.keys()) + allowed_operations = [op for op in allowed_operations if + op not in INTERFACE_DEF_RESERVED_WORDS] + return allowed_operations + + def _validate_fields(self, nodetemplate): + for name in nodetemplate.keys(): + if name not in self.SECTIONS and name not in self.SPECIAL_SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Node template "%s"' % self.name, + field=name))*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java new file mode 100644 index 0000000..f7ec967 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java @@ -0,0 +1,188 @@ +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.ValidateUtils; + +public class Policy extends EntityTemplate { + + + private static final String TYPE = "type"; + private static final String METADATA = "metadata"; + private static final String DESCRIPTION = "description"; + private static final String PROPERTIES = "properties"; + private static final String TARGETS = "targets"; + private static final String TRIGGERS = "triggers"; + private static final String SECTIONS[] = { + TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS}; + + LinkedHashMap metaData; + ArrayList targetsList; // *** a list of NodeTemplate OR a list of Group *** + String targetsType; + ArrayList triggers; + LinkedHashMap properties; + + public Policy(String _name, + LinkedHashMap _policy, +// ArrayList targetObjects, + ArrayList targetObjects, + String _targetsType, + LinkedHashMap _customDef) { + super(_name,_policy,"policy_type",_customDef); + + metaData = null; + if(_policy.get(METADATA) != null) { + metaData = (LinkedHashMap)_policy.get(METADATA); + ValidateUtils.validateMap(metaData); + } + + targetsList = targetObjects; + targetsType = _targetsType; + triggers = _triggers((LinkedHashMap)_policy.get(TRIGGERS)); + properties = null; + if(_policy.get("properties") != null) { + properties = (LinkedHashMap)_policy.get("properties"); + } + _validateKeys(); + } + + public ArrayList getTargets() { + return (ArrayList)entityTpl.get("targets"); + } + + public ArrayList getDescription() { + return (ArrayList)entityTpl.get("description"); + } + + public ArrayList getmetadata() { + return (ArrayList)entityTpl.get("metadata"); + } + + public String getTargetsType() { + return targetsType; + } + +// public ArrayList getTargetsList() { + public ArrayList getTargetsList() { + return targetsList; + } + + // entityTemplate already has a different getProperties... + // this is to access the local properties variable + public LinkedHashMap getPolicyProperties() { + return properties; + } + + private ArrayList _triggers(LinkedHashMap triggers) { + ArrayList triggerObjs = new ArrayList<>(); + if(triggers != null) { + for(Map.Entry me: triggers.entrySet()) { + String tname = me.getKey(); + LinkedHashMap ttriggerTpl = + (LinkedHashMap)me.getValue(); + Triggers triggersObj = new Triggers(tname,ttriggerTpl); + triggerObjs.add(triggersObj); + } + } + return triggerObjs; + } + + private void _validateKeys() { + for(String key: entityTpl.keySet()) { + boolean bFound = false; + for(int i=0; i customDef; + + public Property(String propname, + Object propvalue, + LinkedHashMap propschemaDict, + LinkedHashMap propcustomDef) { + + name = propname; + value = propvalue; + customDef = propcustomDef; + schema = new Schema(propname, propschemaDict); + } + + public String getType() { + return schema.getType(); + } + + public boolean isRequired() { + return schema.isRequired(); + } + + public String getDescription() { + return schema.getDescription(); + } + + public Object getDefault() { + return schema.getDefault(); + } + + public ArrayList getConstraints() { + return schema.getConstraints(); + } + + public LinkedHashMap getEntrySchema() { + return schema.getEntrySchema(); + } + + + public String getName() { + return name; + } + + public Object getValue() { + return value; + } + + // setter + public Object setValue(Object vob) { + value = vob; + return value; + } + + public void validate() { + // Validate if not a reference property + if(!Function.isFunction(value)) { + if(getType().equals(Schema.STRING)) { + value = value.toString(); + } + value = DataEntity.validateDatatype(getType(),value, + getEntrySchema(), + customDef, + name); + _validateConstraints(); + } + } + + private void _validateConstraints() { + if(getConstraints() != null) { + for(Constraint constraint: getConstraints()) { + constraint.validate(value); + } + } + } + + @Override + public String toString() { + return "Property{" + + "name='" + name + '\'' + + ", value=" + value + + ", schema=" + schema + + ", customDef=" + customDef + + '}'; + } +} + +/*python + +class Property(object): + '''TOSCA built-in Property type.''' + + PROPERTY_KEYS = ( + TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS + ) = ( + 'type', 'required', 'description', 'default', 'constraints' + ) + + ENTRY_SCHEMA_KEYS = ( + ENTRYTYPE, ENTRYPROPERTIES + ) = ( + 'type', 'properties' + ) + + def __init__(self, property_name, value, schema_dict, custom_def=None): + self.name = property_name + self.value = value + self.custom_def = custom_def + self.schema = Schema(property_name, schema_dict) + + @property + def type(self): + return self.schema.type + + @property + def required(self): + return self.schema.required + + @property + def description(self): + return self.schema.description + + @property + def default(self): + return self.schema.default + + @property + def constraints(self): + return self.schema.constraints + + @property + def entry_schema(self): + return self.schema.entry_schema + + def validate(self): + '''Validate if not a reference property.''' + if not is_function(self.value): + if self.type == Schema.STRING: + self.value = str(self.value) + self.value = DataEntity.validate_datatype(self.type, self.value, + self.entry_schema, + self.custom_def, + self.name) + self._validate_constraints() + + def _validate_constraints(self): + if self.constraints: + for constraint in self.constraints: + constraint.validate(self.value) +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java new file mode 100644 index 0000000..a94caed --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java @@ -0,0 +1,199 @@ +package org.onap.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.onap.sdc.toscaparser.api.elements.PropertyDef; +import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; +import org.onap.sdc.toscaparser.api.elements.EntityType; + +public class RelationshipTemplate extends EntityTemplate { + + private static final String DERIVED_FROM = "derived_from"; + private static final String PROPERTIES = "properties"; + private static final String REQUIREMENTS = "requirements"; + private static final String INTERFACES = "interfaces"; + private static final String CAPABILITIES = "capabilities"; + private static final String TYPE = "type"; + @SuppressWarnings("unused") + private static final String SECTIONS[] = { + DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES, CAPABILITIES, TYPE}; + + private String name; + private NodeTemplate target; + private NodeTemplate source; + private ArrayList _properties; + + public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, + String rtname, + LinkedHashMap rtcustomDef, + NodeTemplate rttarget, + NodeTemplate rtsource) { + super(rtname,rtrelationshipTemplate,"relationship_type",rtcustomDef); + + name = rtname; + target = rttarget; + source = rtsource; + _properties = null; + } + + public ArrayList getPropertiesObjects() { + // Return properties objects for this template + if(_properties == null) { + _properties = _createRelationshipProperties(); + } + return _properties; + } + + @SuppressWarnings({ "unchecked", "unused" }) + public ArrayList _createRelationshipProperties() { + ArrayList props = new ArrayList (); + LinkedHashMap properties = new LinkedHashMap(); + LinkedHashMap relationship = (LinkedHashMap)entityTpl.get("relationship"); + + if(relationship == null) { + for(Object val: entityTpl.values()) { + if(val instanceof LinkedHashMap) { + relationship = (LinkedHashMap)((LinkedHashMap)val).get("relationship"); + break; + } + } + } + + if(relationship != null) { + properties = (LinkedHashMap)((EntityType)typeDefinition).getValue(PROPERTIES,relationship,false); + } + if(properties == null) { + properties = new LinkedHashMap(); + } + if(properties == null) { + properties = (LinkedHashMap)entityTpl.get(PROPERTIES); + } + if(properties == null) { + properties = new LinkedHashMap(); + } + + if(properties != null) { + for(Map.Entry me: properties.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + LinkedHashMap propsDef = ((StatefulEntityType)typeDefinition).getPropertiesDef(); + if(propsDef != null && propsDef.get(pname) != null) { + if(properties.get(pname) != null) { + pvalue = properties.get(name); + } + PropertyDef pd = (PropertyDef)propsDef.get(pname); + Property prop = new Property(pname,pvalue,pd.getSchema(),customDef); + props.add(prop); + } + } + } + ArrayList pds = ((StatefulEntityType)typeDefinition).getPropertiesDefObjects(); + for(PropertyDef p: pds) { + if(p.getDefault() != null && properties.get(p.getName()) == null) { + Property prop = new Property(p.getName(), (LinkedHashMap)p.getDefault(), p.getSchema(), customDef); + props.add(prop); + } + } + return props; + } + + public void validate() { + _validateProperties(entityTpl,(StatefulEntityType)typeDefinition); + } + + // getters/setters + public NodeTemplate getTarget() { + return target; + } + + public NodeTemplate getSource() { + return source; + } + + public void setSource(NodeTemplate nt) { + source = nt; + } + + public void setTarget(NodeTemplate nt) { + target = nt; + } + + @Override + public String toString() { + return "RelationshipTemplate{" + + "name='" + name + '\'' + + ", target=" + target.getName() + + ", source=" + source.getName() + + ", _properties=" + _properties + + '}'; + } + +} + +/*python + +from toscaparser.entity_template import EntityTemplate +from toscaparser.properties import Property + +SECTIONS = (DERIVED_FROM, PROPERTIES, REQUIREMENTS, + INTERFACES, CAPABILITIES, TYPE) = \ + ('derived_from', 'properties', 'requirements', 'interfaces', + 'capabilities', 'type') + +log = logging.getLogger('tosca') + + +class RelationshipTemplate(EntityTemplate): + '''Relationship template.''' + def __init__(self, relationship_template, name, custom_def=None, + target=None, source=None): + super(RelationshipTemplate, self).__init__(name, + relationship_template, + 'relationship_type', + custom_def) + self.name = name.lower() + self.target = target + self.source = source + + def get_properties_objects(self): + '''Return properties objects for this template.''' + if self._properties is None: + self._properties = self._create_relationship_properties() + return self._properties + + def _create_relationship_properties(self): + props = [] + properties = {} + relationship = self.entity_tpl.get('relationship') + + if not relationship: + for value in self.entity_tpl.values(): + if isinstance(value, dict): + relationship = value.get('relationship') + break + + if relationship: + properties = self.type_definition.get_value(self.PROPERTIES, + relationship) or {} + if not properties: + properties = self.entity_tpl.get(self.PROPERTIES) or {} + + if properties: + for name, value in properties.items(): + props_def = self.type_definition.get_properties_def() + if props_def and name in props_def: + if name in properties.keys(): + value = properties.get(name) + prop = Property(name, value, + props_def[name].schema, self.custom_def) + props.append(prop) + for p in self.type_definition.get_properties_def_objects(): + if p.default is not None and p.name not in properties.keys(): + prop = Property(p.name, p.default, p.schema, self.custom_def) + props.append(prop) + return props + + def validate(self): + self._validate_properties(self.entity_tpl, self.type_definition)*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Repository.java b/src/main/java/org/onap/sdc/toscaparser/api/Repository.java new file mode 100644 index 0000000..5bed453 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/Repository.java @@ -0,0 +1,117 @@ +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.UrlUtils; + +import java.util.LinkedHashMap; + +public class Repository { + + private static final String DESCRIPTION = "description"; + private static final String URL = "url"; + private static final String CREDENTIAL = "credential"; + private static final String SECTIONS[] ={DESCRIPTION, URL, CREDENTIAL}; + + private String name; + private Object reposit; + private String url; + + @SuppressWarnings("unchecked") + public Repository(String repName,Object repValue) { + name = repName; + reposit = repValue; + if(reposit instanceof LinkedHashMap) { + url = (String)((LinkedHashMap)reposit).get("url"); + if(url == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE229", String.format( + "MissingRequiredFieldError: Repository \"%s\" is missing required field \"url\"", + name))); + } + } + loadAndValidate(name,reposit); + } + + @SuppressWarnings("unchecked") + private void loadAndValidate(String val,Object repositDef) { + String keyname = val; + if(repositDef instanceof LinkedHashMap) { + for(String key: ((LinkedHashMap)reposit).keySet()) { + boolean bFound = false; + for(String sect: SECTIONS) { + if(key.equals(sect)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE230", String.format( + "UnknownFieldError: repositories \"%s\" contains unknown field \"%s\"", + keyname,key))); + } + } + + String repositUrl = (String)((LinkedHashMap)repositDef).get("url"); + if(repositUrl != null) { + boolean urlVal = UrlUtils.validateUrl(repositUrl); + if(!urlVal) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE231", String.format( + "URLException: repsositories \"%s\" Invalid Url",keyname))); + } + } + } + } + + @Override + public String toString() { + return "Repository{" + + "name='" + name + '\'' + + ", reposit=" + reposit + + ", url='" + url + '\'' + + '}'; + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.common.exception import URLException +from toscaparser.utils.gettextutils import _ +import org.openecomp.sdc.toscaparser.api.utils.urlutils + +SECTIONS = (DESCRIPTION, URL, CREDENTIAL) = \ + ('description', 'url', 'credential') + + +class Repository(object): + def __init__(self, repositories, values): + self.name = repositories + self.reposit = values + if isinstance(self.reposit, dict): + if 'url' not in self.reposit.keys(): + ValidationIssueCollector.appendException( + MissingRequiredFieldError(what=_('Repository "%s"') + % self.name, required='url')) + self.url = self.reposit['url'] + self.load_and_validate(self.name, self.reposit) + + def load_and_validate(self, val, reposit_def): + self.keyname = val + if isinstance(reposit_def, dict): + for key in reposit_def.keys(): + if key not in SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what=_('repositories "%s"') + % self.keyname, field=key)) + + if URL in reposit_def.keys(): + reposit_url = reposit_def.get(URL) + url_val = toscaparser.utils.urlutils.UrlUtils.\ + validate_url(reposit_url) + if url_val is not True: + ValidationIssueCollector.appendException( + URLException(what=_('repsositories "%s" Invalid Url') + % self.keyname)) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java new file mode 100644 index 0000000..1b4e243 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java @@ -0,0 +1,85 @@ +package org.onap.sdc.toscaparser.api; + +import java.util.Map; + +public class RequirementAssignment { + + private String name; + private String nodeName; + private String capabilityName; + private Object relationship; + + public RequirementAssignment(String reqName, String nodeName) { + this.name = reqName; + this.nodeName = nodeName; + } + + public RequirementAssignment(String reqName, String nodeName, String capabilityName) { + this.name = reqName; + this.nodeName = nodeName; + this.capabilityName = capabilityName; + } + + public RequirementAssignment(String reqName, String nodeName, String capabilityName, Object relationship) { + this.name = reqName; + this.nodeName = nodeName; + this.capabilityName = capabilityName; + this.relationship = relationship; + } + + /** + * Get the name for requirement assignment. + * @return the name for requirement assignment. + */ + public String getName() { + return name; + } + + /** + * Set the name for requirement + * @param name - the name for requirement to set + */ + public void setName(String name) { + this.name = name; + } + + /** + * Get the node name for requirement assignment. + * @return the node name for requirement + */ + public String getNodeTemplateName() { + return nodeName; + } + + /** + * Set the node name for requirement + * @param nodeName - the node name for requirement to set + */ + public void setNodeTemplateName(String nodeName) { + this.nodeName = nodeName; + } + + /** + * Get the capability name for requirement assignment. + * @return the capability name for requirement + */ + public String getCapabilityName() { + return capabilityName; + } + + /** + * Set the capability name for requirement assignment. + * @param capabilityName - the capability name for requirement to set + */ + public void setCapabilityName(String capabilityName) { + this.capabilityName = capabilityName; + } + + /** + * Get the relationship object for requirement + * @return the relationship object for requirement + */ + public Object getRelationship() { + return relationship; + } +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java new file mode 100644 index 0000000..b6b9ea4 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java @@ -0,0 +1,37 @@ +package org.onap.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +public class RequirementAssignments { + + private List requirementAssignmentList; + + public RequirementAssignments(List requirementAssignments) { + this.requirementAssignmentList = requirementAssignments != null ? new ArrayList<>(requirementAssignments) : new ArrayList<>(); + } + + /** + * Get all requirement assignments for Node Template.
+ * This object can be either the original one, holding all requirement assignments for this node template,or a filtered one, holding a filtered subset.
+ * @return list of requirement assignments for the node template.
+ * If there are no requirement assignments, empty list is returned. + */ + public List getAll() { + return new ArrayList<>(requirementAssignmentList); + } + + /** + * Filter requirement assignments by requirement name. + * @param reqName - The name of requirement + * @return RequirementAssignments object, containing requirement assignments of this type.
+ * If no such found, filtering will result in an empty collection. + */ + public RequirementAssignments getRequirementsByName(String reqName) { + List requirementAssignments = requirementAssignmentList.stream() + .filter(req -> req.getName().equals(reqName)).collect(Collectors.toList()); + + return new RequirementAssignments(requirementAssignments); + } +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java b/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java new file mode 100644 index 0000000..a87ea6c --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java @@ -0,0 +1,519 @@ +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.NodeType; +import org.onap.sdc.toscaparser.api.elements.PropertyDef; +import org.onap.sdc.toscaparser.api.parameters.Input; +import org.onap.sdc.toscaparser.api.parameters.Output; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; + + +public class SubstitutionMappings { + // SubstitutionMappings class declaration + + // SubstitutionMappings exports the topology template as an + // implementation of a Node type. + + private static final String NODE_TYPE = "node_type"; + private static final String REQUIREMENTS = "requirements"; + private static final String CAPABILITIES = "capabilities"; + + private static final String SECTIONS[] = {NODE_TYPE, REQUIREMENTS, CAPABILITIES}; + + private static final String OPTIONAL_OUTPUTS[] = {"tosca_id", "tosca_name", "state"}; + + private LinkedHashMap subMappingDef; + private ArrayList nodetemplates; + private ArrayList inputs; + private ArrayList outputs; + private ArrayList groups; + private NodeTemplate subMappedNodeTemplate; + private LinkedHashMap customDefs; + private LinkedHashMap _capabilities; + private LinkedHashMap _requirements; + + public SubstitutionMappings(LinkedHashMap smsubMappingDef, + ArrayList smnodetemplates, + ArrayList sminputs, + ArrayList smoutputs, + ArrayList smgroups, + NodeTemplate smsubMappedNodeTemplate, + LinkedHashMap smcustomDefs) { + + subMappingDef = smsubMappingDef; + nodetemplates = smnodetemplates; + inputs = sminputs != null ? sminputs : new ArrayList(); + outputs = smoutputs != null ? smoutputs : new ArrayList(); + groups = smgroups != null ? smgroups : new ArrayList(); + subMappedNodeTemplate = smsubMappedNodeTemplate; + customDefs = smcustomDefs != null ? smcustomDefs : new LinkedHashMap(); + _validate(); + + _capabilities = null; + _requirements = null; + } + + public String getType() { + if(subMappingDef != null) { + return (String)subMappingDef.get(NODE_TYPE); + } + return null; + } + + public ArrayList getNodeTemplates() { + return nodetemplates; + } + + /* + @classmethod + def get_node_type(cls, sub_mapping_def): + if isinstance(sub_mapping_def, dict): + return sub_mapping_def.get(cls.NODE_TYPE) + */ + + public static String stGetNodeType(LinkedHashMap _subMappingDef) { + if(_subMappingDef instanceof LinkedHashMap) { + return (String)_subMappingDef.get(NODE_TYPE); + } + return null; + } + + public String getNodeType() { + return (String)subMappingDef.get(NODE_TYPE); + } + + public ArrayList getInputs() { + return inputs; + } + + public ArrayList getGroups() { + return groups; + } + + public LinkedHashMap getCapabilities() { + return (LinkedHashMap)subMappingDef.get(CAPABILITIES); + } + + public LinkedHashMap getRequirements() { + return (LinkedHashMap)subMappingDef.get(REQUIREMENTS); + } + + public NodeType getNodeDefinition() { + return new NodeType(getNodeType(), customDefs); + } + + private void _validate() { + // Basic validation + _validateKeys(); + _validateType(); + + // SubstitutionMapping class syntax validation + _validateInputs(); + _validateCapabilities(); + _validateRequirements(); + _validateOutputs(); + } + + private void _validateKeys() { + // validate the keys of substitution mappings + for(String key: subMappingDef.keySet()) { + boolean bFound = false; + for(String s: SECTIONS) { + if(s.equals(key)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE232", String.format( + "UnknownFieldError: SubstitutionMappings contain unknown field \"%s\"", + key))); + } + } + } + + private void _validateType() { + // validate the node_type of substitution mappings + String nodeType = (String)subMappingDef.get(NODE_TYPE); + if(nodeType == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE233", String.format( + "MissingRequiredFieldError: SubstitutionMappings used in topology_template is missing required field \"%s\"", + NODE_TYPE))); + } + Object nodeTypeDef = customDefs.get(nodeType); + if(nodeTypeDef == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE234", String.format( + "InvalidNodeTypeError: \"%s\" is invalid",nodeType))); + } + } + + private void _validateInputs() { + // validate the inputs of substitution mappings. + + // The inputs defined by the topology template have to match the + // properties of the node type or the substituted node. If there are + // more inputs than the substituted node has properties, default values + //must be defined for those inputs. + + HashSet allInputs = new HashSet<>(); + for(Input inp: inputs) { + allInputs.add(inp.getName()); + } + HashSet requiredProperties = new HashSet<>(); + for(PropertyDef pd: getNodeDefinition().getPropertiesDefObjects()) { + if(pd.isRequired() && pd.getDefault() == null) { + requiredProperties.add(pd.getName()); + } + } + // Must provide inputs for required properties of node type. + for(String property: requiredProperties) { + // Check property which is 'required' and has no 'default' value + if(!allInputs.contains(property)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE235", String.format( + "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", + getNodeType(),property))); + } + } + // If the optional properties of node type need to be customized by + // substituted node, it also is necessary to define inputs for them, + // otherwise they are not mandatory to be defined. + HashSet customizedParameters = new HashSet<>(); + if(subMappedNodeTemplate != null) { + customizedParameters.addAll(subMappedNodeTemplate.getProperties().keySet()); + } + HashSet allProperties = new HashSet( + getNodeDefinition().getPropertiesDef().keySet()); + HashSet diffset = customizedParameters; + diffset.removeAll(allInputs); + for(String parameter: diffset) { + if(allProperties.contains(parameter)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE236", String.format( + "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", + getNodeType(),parameter))); + } + } + // Additional inputs are not in the properties of node type must + // provide default values. Currently the scenario may not happen + // because of parameters validation in nodetemplate, here is a + // guarantee. + for(Input inp: inputs) { + diffset = allInputs; + diffset.removeAll(allProperties); + if(diffset.contains(inp.getName()) && inp.getDefault() == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE237", String.format( + "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing rquired input \"%s\"", + getNodeType(),inp.getName()))); + } + } + } + + private void _validateCapabilities() { + // validate the capabilities of substitution mappings + + // The capabilities must be in node template which be mapped. + LinkedHashMap tplsCapabilities = + (LinkedHashMap)subMappingDef.get(CAPABILITIES); + List nodeCapabilities = null; + if(subMappedNodeTemplate != null) { + nodeCapabilities = subMappedNodeTemplate.getCapabilities().getAll(); + } + if(nodeCapabilities != null) { + for(CapabilityAssignment cap: nodeCapabilities) { + if(tplsCapabilities != null && tplsCapabilities.get(cap.getName()) == null) { + ; //pass + // ValidationIssueCollector.appendException( + // UnknownFieldError(what='SubstitutionMappings', + // field=cap)) + } + } + } + } + + private void _validateRequirements() { + // validate the requirements of substitution mappings + //***************************************************** + //TO-DO - Different from Python code!! one is a bug... + //***************************************************** + // The requirements must be in node template which be mapped. + LinkedHashMap tplsRequirements = + (LinkedHashMap)subMappingDef.get(REQUIREMENTS); + List nodeRequirements = null; + if(subMappedNodeTemplate != null) { + nodeRequirements = subMappedNodeTemplate.getRequirements().getAll(); + } + if(nodeRequirements != null) { + for(RequirementAssignment ro: nodeRequirements) { + String cap = ro.getName(); + if(tplsRequirements != null && tplsRequirements.get(cap) == null) { + ; //pass + // ValidationIssueCollector.appendException( + // UnknownFieldError(what='SubstitutionMappings', + // field=cap)) + } + } + } + } + + private void _validateOutputs() { + // validate the outputs of substitution mappings. + + // The outputs defined by the topology template have to match the + // attributes of the node type or the substituted node template, + // and the observable attributes of the substituted node template + // have to be defined as attributes of the node type or outputs in + // the topology template. + + // The outputs defined by the topology template have to match the + // attributes of the node type according to the specification, but + // it's reasonable that there are more inputs than the node type + // has properties, the specification will be amended? + + for(Output output: outputs) { + Object ado = getNodeDefinition().getAttributesDef(); + if(ado != null && ((LinkedHashMap)ado).get(output.getName()) == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE238", String.format( + "UnknownOutputError: Unknown output \"%s\" in SubstitutionMappings with node_type \"%s\"", + output.getName(),getNodeType()))); + } + } + } + + @Override + public String toString() { + return "SubstitutionMappings{" + +// "subMappingDef=" + subMappingDef + +// ", nodetemplates=" + nodetemplates + +// ", inputs=" + inputs + +// ", outputs=" + outputs + +// ", groups=" + groups + + ", subMappedNodeTemplate=" + (subMappedNodeTemplate==null?"":subMappedNodeTemplate.getName()) + +// ", customDefs=" + customDefs + +// ", _capabilities=" + _capabilities + +// ", _requirements=" + _requirements + + '}'; + } + + @Deprecated + public String toLimitedString() { + return "SubstitutionMappings{" + + "subMappingDef=" + subMappingDef + + ", nodetemplates=" + nodetemplates + + ", inputs=" + inputs + + ", outputs=" + outputs + + ", groups=" + groups + + ", subMappedNodeTemplate=" + (subMappedNodeTemplate==null?"":subMappedNodeTemplate.getName()) + + ", customDefs=" + customDefs + + ", _capabilities=" + _capabilities + + ", _requirements=" + _requirements + + '}'; + } +} + + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidNodeTypeError +from toscaparser.common.exception import MissingDefaultValueError +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import MissingRequiredInputError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.common.exception import UnknownOutputError +from toscaparser.elements.nodetype import NodeType +from toscaparser.utils.gettextutils import _ + +log = logging.getLogger('tosca') + + +class SubstitutionMappings(object): + '''SubstitutionMappings class declaration + + SubstitutionMappings exports the topology template as an + implementation of a Node type. + ''' + + SECTIONS = (NODE_TYPE, REQUIREMENTS, CAPABILITIES) = \ + ('node_type', 'requirements', 'capabilities') + + OPTIONAL_OUTPUTS = ['tosca_id', 'tosca_name', 'state'] + + def __init__(self, sub_mapping_def, nodetemplates, inputs, outputs, + sub_mapped_node_template, custom_defs): + self.nodetemplates = nodetemplates + self.sub_mapping_def = sub_mapping_def + self.inputs = inputs or [] + self.outputs = outputs or [] + self.sub_mapped_node_template = sub_mapped_node_template + self.custom_defs = custom_defs or {} + self._validate() + + self._capabilities = None + self._requirements = None + + @property + def type(self): + if self.sub_mapping_def: + return self.sub_mapping_def.get(self.NODE_TYPE) + + @classmethod + def get_node_type(cls, sub_mapping_def): + if isinstance(sub_mapping_def, dict): + return sub_mapping_def.get(cls.NODE_TYPE) + + @property + def node_type(self): + return self.sub_mapping_def.get(self.NODE_TYPE) + + @property + def capabilities(self): + return self.sub_mapping_def.get(self.CAPABILITIES) + + @property + def requirements(self): + return self.sub_mapping_def.get(self.REQUIREMENTS) + + @property + def node_definition(self): + return NodeType(self.node_type, self.custom_defs) + + def _validate(self): + # Basic validation + self._validate_keys() + self._validate_type() + + # SubstitutionMapping class syntax validation + self._validate_inputs() + self._validate_capabilities() + self._validate_requirements() + self._validate_outputs() + + def _validate_keys(self): + """validate the keys of substitution mappings.""" + for key in self.sub_mapping_def.keys(): + if key not in self.SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what=_('SubstitutionMappings'), + field=key)) + + def _validate_type(self): + """validate the node_type of substitution mappings.""" + node_type = self.sub_mapping_def.get(self.NODE_TYPE) + if not node_type: + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what=_('SubstitutionMappings used in topology_template'), + required=self.NODE_TYPE)) + + node_type_def = self.custom_defs.get(node_type) + if not node_type_def: + ValidationIssueCollector.appendException( + InvalidNodeTypeError(what=node_type)) + + def _validate_inputs(self): + """validate the inputs of substitution mappings. + + The inputs defined by the topology template have to match the + properties of the node type or the substituted node. If there are + more inputs than the substituted node has properties, default values + must be defined for those inputs. + """ + + all_inputs = set([input.name for input in self.inputs]) + required_properties = set([p.name for p in + self.node_definition. + get_properties_def_objects() + if p.required and p.default is None]) + # Must provide inputs for required properties of node type. + for property in required_properties: + # Check property which is 'required' and has no 'default' value + if property not in all_inputs: + ValidationIssueCollector.appendException( + MissingRequiredInputError( + what=_('SubstitutionMappings with node_type ') + + self.node_type, + input_name=property)) + + # If the optional properties of node type need to be customized by + # substituted node, it also is necessary to define inputs for them, + # otherwise they are not mandatory to be defined. + customized_parameters = set(self.sub_mapped_node_template + .get_properties().keys() + if self.sub_mapped_node_template else []) + all_properties = set(self.node_definition.get_properties_def()) + for parameter in customized_parameters - all_inputs: + if parameter in all_properties: + ValidationIssueCollector.appendException( + MissingRequiredInputError( + what=_('SubstitutionMappings with node_type ') + + self.node_type, + input_name=parameter)) + + # Additional inputs are not in the properties of node type must + # provide default values. Currently the scenario may not happen + # because of parameters validation in nodetemplate, here is a + # guarantee. + for input in self.inputs: + if input.name in all_inputs - all_properties \ + and input.default is None: + ValidationIssueCollector.appendException( + MissingDefaultValueError( + what=_('SubstitutionMappings with node_type ') + + self.node_type, + input_name=input.name)) + + def _validate_capabilities(self): + """validate the capabilities of substitution mappings.""" + + # The capabilites must be in node template wchich be mapped. + tpls_capabilities = self.sub_mapping_def.get(self.CAPABILITIES) + node_capabiliteys = self.sub_mapped_node_template.get_capabilities() \ + if self.sub_mapped_node_template else None + for cap in node_capabiliteys.keys() if node_capabiliteys else []: + if (tpls_capabilities and + cap not in list(tpls_capabilities.keys())): + pass + # ValidationIssueCollector.appendException( + # UnknownFieldError(what='SubstitutionMappings', + # field=cap)) + + def _validate_requirements(self): + """validate the requirements of substitution mappings.""" + + # The requirements must be in node template wchich be mapped. + tpls_requirements = self.sub_mapping_def.get(self.REQUIREMENTS) + node_requirements = self.sub_mapped_node_template.requirements \ + if self.sub_mapped_node_template else None + for req in node_requirements if node_requirements else []: + if (tpls_requirements and + req not in list(tpls_requirements.keys())): + pass + # ValidationIssueCollector.appendException( + # UnknownFieldError(what='SubstitutionMappings', + # field=req)) + + def _validate_outputs(self): + """validate the outputs of substitution mappings. + + The outputs defined by the topology template have to match the + attributes of the node type or the substituted node template, + and the observable attributes of the substituted node template + have to be defined as attributes of the node type or outputs in + the topology template. + """ + + # The outputs defined by the topology template have to match the + # attributes of the node type according to the specification, but + # it's reasonable that there are more inputs than the node type + # has properties, the specification will be amended? + for output in self.outputs: + if output.name not in self.node_definition.get_attributes_def(): + ValidationIssueCollector.appendException( + UnknownOutputError( + where=_('SubstitutionMappings with node_type ') + + self.node_type, + output_name=output.name))*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java new file mode 100644 index 0000000..e2c268e --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java @@ -0,0 +1,858 @@ +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.InterfacesDef; +import org.onap.sdc.toscaparser.api.elements.NodeType; +import org.onap.sdc.toscaparser.api.elements.RelationshipType; +import org.onap.sdc.toscaparser.api.functions.Function; +import org.onap.sdc.toscaparser.api.functions.GetAttribute; +import org.onap.sdc.toscaparser.api.functions.GetInput; +import org.onap.sdc.toscaparser.api.parameters.Input; +import org.onap.sdc.toscaparser.api.parameters.Output; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.Map; + +public class TopologyTemplate { + + private static final String DESCRIPTION = "description"; + private static final String INPUTS = "inputs"; + private static final String NODE_TEMPLATES = "node_templates"; + private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; + private static final String OUTPUTS = "outputs"; + private static final String GROUPS = "groups"; + private static final String SUBSTITUTION_MAPPINGS = "substitution_mappings"; + private static final String POLICIES = "policies"; + private static final String METADATA = "metadata"; + + private static String SECTIONS[] = { + DESCRIPTION, INPUTS, NODE_TEMPLATES, RELATIONSHIP_TEMPLATES, + OUTPUTS, GROUPS, SUBSTITUTION_MAPPINGS, POLICIES, METADATA + }; + + private LinkedHashMap tpl; + LinkedHashMap metaData; + private ArrayList inputs; + private ArrayList outputs; + private ArrayList relationshipTemplates; + private ArrayList nodeTemplates; + private LinkedHashMap customDefs; + private LinkedHashMap relTypes;//TYPE + private NodeTemplate subMappedNodeTemplate; + private ArrayList groups; + private ArrayList policies; + private LinkedHashMap parsedParams = null;//TYPE + private String description; + private ToscaGraph graph; + private SubstitutionMappings substitutionMappings; + private boolean resolveGetInput; + + public TopologyTemplate( + LinkedHashMap _template, + LinkedHashMap _customDefs, + LinkedHashMap _relTypes,//TYPE + LinkedHashMap _parsedParams, + NodeTemplate _subMappedNodeTemplate, + boolean _resolveGetInput) { + + tpl = _template; + if(tpl != null) { + subMappedNodeTemplate = _subMappedNodeTemplate; + metaData = _metaData(); + customDefs = _customDefs; + relTypes = _relTypes; + parsedParams = _parsedParams; + resolveGetInput = _resolveGetInput; + _validateField(); + description = _tplDescription(); + inputs = _inputs(); + relationshipTemplates =_relationshipTemplates(); + nodeTemplates = _nodeTemplates(); + outputs = _outputs(); + if(nodeTemplates != null) { + graph = new ToscaGraph(nodeTemplates); + } + groups = _groups(); + policies = _policies(); + _processIntrinsicFunctions(); + substitutionMappings = _substitutionMappings(); + } + } + + @SuppressWarnings("unchecked") + private ArrayList _inputs() { + //DumpUtils.dumpYaml(customDefs,0); + ArrayList alInputs = new ArrayList<>(); + for(String name: _tplInputs().keySet()) { + Object attrs = _tplInputs().get(name); + Input input = new Input(name,(LinkedHashMap)attrs,customDefs); + if(parsedParams != null && parsedParams.get(name) != null) { + input.validate(parsedParams.get(name)); + } + else { + Object _default = input.getDefault(); + if(_default != null) { + input.validate(_default); + } + } + if((parsedParams != null && parsedParams.get(input.getName()) == null || parsedParams == null) + && input.isRequired() && input.getDefault() == null) { + System.out.format("Log warning: The required parameter \"%s\" is not provided\n",input.getName()); + } + alInputs.add(input); + } + return alInputs; + + } + + private LinkedHashMap _metaData() { + if(tpl.get(METADATA) != null) { + return (LinkedHashMap)tpl.get(METADATA); + } + else { + return new LinkedHashMap(); + } + + } + + private ArrayList _nodeTemplates() { + ArrayList alNodeTemplates = new ArrayList<>(); + LinkedHashMap tpls = _tplNodeTemplates(); + if(tpls != null) { + for(String name: tpls.keySet()) { + NodeTemplate tpl = new NodeTemplate(name, + tpls, + customDefs, + relationshipTemplates, + relTypes); + if(tpl.getTypeDefinition() != null) { + boolean b = NodeType.TOSCA_DEF.get(tpl.getType()) != null; + if(b || (tpl.getCustomDef() != null && !tpl.getCustomDef().isEmpty())) { + tpl.validate(); + alNodeTemplates.add(tpl); + } + } + } + } + return alNodeTemplates; + } + + @SuppressWarnings("unchecked") + private ArrayList _relationshipTemplates() { + ArrayList alRelationshipTemplates = new ArrayList<>(); + LinkedHashMap tpls = _tplRelationshipTemplates(); + if(tpls != null) { + for(String name: tpls.keySet()) { + RelationshipTemplate tpl = new RelationshipTemplate( + (LinkedHashMap)tpls.get(name),name,customDefs,null,null); + + alRelationshipTemplates.add(tpl); + } + } + return alRelationshipTemplates; + } + + private ArrayList _outputs() { + ArrayList alOutputs = new ArrayList<>(); + for(Map.Entry me: _tplOutputs().entrySet()) { + String oname = me.getKey(); + LinkedHashMap oattrs = (LinkedHashMap)me.getValue(); + Output o = new Output(oname,oattrs); + o.validate(); + alOutputs.add(o); + } + return alOutputs; + } + + private SubstitutionMappings _substitutionMappings() { + LinkedHashMap tplSubstitutionMapping = (LinkedHashMap) _tplSubstitutionMappings(); + + //*** the commenting-out below and the weaker condition are in the Python source + // #if tpl_substitution_mapping and self.sub_mapped_node_template: + if(tplSubstitutionMapping != null && tplSubstitutionMapping.size() > 0) { + return new SubstitutionMappings(tplSubstitutionMapping, + nodeTemplates, + inputs, + outputs, + groups, + subMappedNodeTemplate, + customDefs); + } + return null; + + } + + @SuppressWarnings("unchecked") + private ArrayList _policies() { + ArrayList alPolicies = new ArrayList<>(); + for(Object po: _tplPolicies()) { + LinkedHashMap policy = (LinkedHashMap)po; + for(Map.Entry me: policy.entrySet()) { + String policyName = me.getKey(); + LinkedHashMap policyTpl = (LinkedHashMap)me.getValue(); + ArrayList targetList = (ArrayList)policyTpl.get("targets"); + //ArrayList targetObjects = new ArrayList<>(); + ArrayList targetNodes = new ArrayList<>(); + ArrayList targetObjects = new ArrayList<>(); + ArrayList targetGroups = new ArrayList<>(); + String targetsType = "groups"; + if(targetList != null && targetList.size() >= 1) { + targetGroups = _getPolicyGroups(targetList); + if(targetGroups == null) { + targetsType = "node_templates"; + targetNodes = _getGroupMembers(targetList); + for(NodeTemplate nt: targetNodes) { + targetObjects.add(nt); + } + } + else { + for(Group gr: targetGroups) { + targetObjects.add(gr); + } + } + } + Policy policyObj = new Policy(policyName, + policyTpl, + targetObjects, + targetsType, + customDefs); + alPolicies.add(policyObj); + } + } + return alPolicies; + } + + private ArrayList _groups() { + ArrayList groups = new ArrayList<>(); + ArrayList memberNodes = null; + for(Map.Entry me: _tplGroups().entrySet()) { + String groupName = me.getKey(); + LinkedHashMap groupTpl = (LinkedHashMap)me.getValue(); + ArrayList memberNames = (ArrayList)groupTpl.get("members"); + if(memberNames != null) { + DataEntity.validateDatatype("list", memberNames,null,null,null); + if(memberNames.size() < 1 || + (new HashSet(memberNames)).size() != memberNames.size()) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE005",String.format( + "InvalidGroupTargetException: Member nodes \"%s\" should be >= 1 and not repeated", + memberNames.toString()))); + } + else { + memberNodes = _getGroupMembers(memberNames); + } + } + Group group = new Group(groupName, + groupTpl, + memberNodes, + customDefs); + groups.add(group); + } + return groups; + } + + private ArrayList _getGroupMembers(ArrayList memberNames) { + ArrayList memberNodes = new ArrayList<>(); + _validateGroupMembers(memberNames); + for(String member: memberNames) { + for(NodeTemplate node: nodeTemplates) { + if(member.equals(node.getName())) { + memberNodes.add(node); + } + } + } + return memberNodes; + } + + private ArrayList _getPolicyGroups(ArrayList memberNames) { + ArrayList memberGroups = new ArrayList<>(); + for(String member: memberNames) { + for(Group group: groups) { + if(member.equals(group.getName())) { + memberGroups.add(group); + } + } + } + return memberGroups; + } + + private void _validateGroupMembers(ArrayList members) { + ArrayList nodeNames = new ArrayList<>(); + for(NodeTemplate node: nodeTemplates) { + nodeNames.add(node.getName()); + } + for(String member: members) { + if(!nodeNames.contains(member)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE239", String.format( + "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"",member))); + } + } + } + + // topology template can act like node template + // it is exposed by substitution_mappings. + + public String nodetype() { + return substitutionMappings.getNodeType(); + } + + public LinkedHashMap capabilities() { + return substitutionMappings.getCapabilities(); + } + + public LinkedHashMap requirements() { + return substitutionMappings.getRequirements(); + } + + private String _tplDescription() { + return (String)tpl.get(DESCRIPTION); + //if description: + // return description.rstrip() + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplInputs() { + if(tpl.get(INPUTS) != null) { + return (LinkedHashMap)tpl.get(INPUTS); + } + else { + return new LinkedHashMap(); + } + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplNodeTemplates() { + return (LinkedHashMap)tpl.get(NODE_TEMPLATES); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplRelationshipTemplates() { + if(tpl.get(RELATIONSHIP_TEMPLATES) != null) { + return (LinkedHashMap)tpl.get(RELATIONSHIP_TEMPLATES); + } + else { + return new LinkedHashMap(); + } + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplOutputs() { + if(tpl.get(OUTPUTS) != null) { + return (LinkedHashMap)tpl.get(OUTPUTS); + } + else { + return new LinkedHashMap(); + } + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplSubstitutionMappings() { + if(tpl.get(SUBSTITUTION_MAPPINGS) != null) { + return (LinkedHashMap)tpl.get(SUBSTITUTION_MAPPINGS); + } + else { + return new LinkedHashMap(); + } + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplGroups() { + if(tpl.get(GROUPS) != null) { + return (LinkedHashMap)tpl.get(GROUPS); + } + else { + return new LinkedHashMap(); + } + } + + @SuppressWarnings("unchecked") + private ArrayList _tplPolicies() { + if(tpl.get(POLICIES) != null) { + return (ArrayList)tpl.get(POLICIES); + } + else { + return new ArrayList(); + } + } + + private void _validateField() { + for(String name: tpl.keySet()) { + boolean bFound = false; + for(String section: SECTIONS) { + if(name.equals(section)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE240", String.format( + "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"",name))); + } + } + } + + @SuppressWarnings("unchecked") + private void _processIntrinsicFunctions() { + // Process intrinsic functions + + // Current implementation processes functions within node template + // properties, requirements, interfaces inputs and template outputs. + + if(nodeTemplates != null) { + for(NodeTemplate nt: nodeTemplates) { + for(Property prop: nt.getPropertiesObjects()) { + prop.setValue(Function.getFunction(this,nt,prop.getValue(), resolveGetInput)); + } + for(InterfacesDef ifd: nt.getInterfaces()) { + LinkedHashMap ifin = ifd.getInputs(); + if(ifin != null) { + for(Map.Entry me: ifin.entrySet()) { + String name = me.getKey(); + Object value = Function.getFunction(this,nt,me.getValue(), resolveGetInput); + ifd.setInput(name,value); + } + } + } + if(nt.getRequirements() != null) { + for(RequirementAssignment req: nt.getRequirements().getAll()) { + LinkedHashMap rel; + Object t = req.getRelationship(); + // it can be a string or a LHM... + if(t instanceof LinkedHashMap) { + rel = (LinkedHashMap)t; + } + else { + // we set it to null to fail the next test + // and avoid the get("proprties") + rel = null; + } + + if(rel != null && rel.get("properties") != null) { + LinkedHashMap relprops = + (LinkedHashMap)rel.get("properties"); + for(String key: relprops.keySet()) { + Object value = relprops.get(key); + Object func = Function.getFunction(this,req,value, resolveGetInput); + relprops.put(key,func); + } + } + } + } + if(nt.getCapabilitiesObjects() != null) { + for(CapabilityAssignment cap: nt.getCapabilitiesObjects()) { + if(cap.getPropertiesObjects() != null) { + for(Property prop: cap.getPropertiesObjects()) { + Object propvalue = Function.getFunction(this,nt,prop.getValue(), resolveGetInput); + if(propvalue instanceof GetInput) { + propvalue = ((GetInput)propvalue).result(); + for(String p: cap.getProperties().keySet()) { + //Object v = cap.getProperties().get(p); + if(p.equals(prop.getName())) { + cap.setProperty(p,propvalue); + } + } + } + } + } + } + } + for(RelationshipType rel: nt.getRelationships().keySet()) { + NodeTemplate node = nt.getRelationships().get(rel); + ArrayList relTpls = node.getRelationshipTemplate(); + if(relTpls != null) { + for(RelationshipTemplate relTpl: relTpls) { + // TT 5 + for(InterfacesDef iface: relTpl.getInterfaces()) { + if(iface.getInputs() != null) { + for(String name: iface.getInputs().keySet()) { + Object value = iface.getInputs().get(name); + Object func = Function.getFunction( + this, + relTpl, + value, + resolveGetInput); + iface.setInput(name,func); + } + } + } + } + } + } + } + } + for(Output output: outputs) { + Object func = Function.getFunction(this,outputs,output.getValue(), resolveGetInput); + if(func instanceof GetAttribute) { + output.setAttr(Output.VALUE,func); + } + } + } + + public static String getSubMappingNodeType(LinkedHashMap topologyTpl) { + if(topologyTpl != null && topologyTpl instanceof LinkedHashMap) { + Object submapTpl = topologyTpl.get(SUBSTITUTION_MAPPINGS); + return SubstitutionMappings.stGetNodeType((LinkedHashMap)submapTpl); + } + return null; + } + + // getters + + public LinkedHashMap getTpl() { + return tpl; + } + + public LinkedHashMap getMetadata() { + return metaData; + } + + public ArrayList getInputs() { + return inputs; + } + + public ArrayList getOutputs() { + return outputs; + } + + public ArrayList getPolicies() { + return policies; + } + + public ArrayList getRelationshipTemplates() { + return relationshipTemplates; + } + + public ArrayList getNodeTemplates() { + return nodeTemplates; + } + + public ArrayList getGroups() { + return groups; + } + + public SubstitutionMappings getSubstitutionMappings() { + return substitutionMappings; + } + + public LinkedHashMap getParsedParams() { + return parsedParams; + } + + public boolean getResolveGetInput() { + return resolveGetInput; + } +} + +/*python + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +import logging + +from toscaparser.common import exception +from toscaparser.dataentity import DataEntity +from toscaparser import functions +from toscaparser.groups import Group +from toscaparser.nodetemplate import NodeTemplate +from toscaparser.parameters import Input +from toscaparser.parameters import Output +from toscaparser.policy import Policy +from toscaparser.relationship_template import RelationshipTemplate +from toscaparser.substitution_mappings import SubstitutionMappings +from toscaparser.tpl_relationship_graph import ToscaGraph +from toscaparser.utils.gettextutils import _ + + +# Topology template key names +SECTIONS = (DESCRIPTION, INPUTS, NODE_TEMPLATES, + RELATIONSHIP_TEMPLATES, OUTPUTS, GROUPS, + SUBSTITUION_MAPPINGS, POLICIES) = \ + ('description', 'inputs', 'node_templates', + 'relationship_templates', 'outputs', 'groups', + 'substitution_mappings', 'policies') + +log = logging.getLogger("tosca.model") + + +class TopologyTemplate(object): + + '''Load the template data.''' + def __init__(self, template, custom_defs, + rel_types=None, parsed_params=None, + sub_mapped_node_template=None): + self.tpl = template + self.sub_mapped_node_template = sub_mapped_node_template + if self.tpl: + self.custom_defs = custom_defs + self.rel_types = rel_types + self.parsed_params = parsed_params + self._validate_field() + self.description = self._tpl_description() + self.inputs = self._inputs() + self.relationship_templates = self._relationship_templates() + self.nodetemplates = self._nodetemplates() + self.outputs = self._outputs() + if hasattr(self, 'nodetemplates'): + self.graph = ToscaGraph(self.nodetemplates) + self.groups = self._groups() + self.policies = self._policies() + self._process_intrinsic_functions() + self.substitution_mappings = self._substitution_mappings() + + def _inputs(self): + inputs = [] + for name, attrs in self._tpl_inputs().items(): + input = Input(name, attrs) + if self.parsed_params and name in self.parsed_params: + input.validate(self.parsed_params[name]) + else: + default = input.default + if default: + input.validate(default) + if (self.parsed_params and input.name not in self.parsed_params + or self.parsed_params is None) and input.required \ + and input.default is None: + log.warning(_('The required parameter %s ' + 'is not provided') % input.name) + + inputs.append(input) + return inputs + + def _nodetemplates(self): + nodetemplates = [] + tpls = self._tpl_nodetemplates() + if tpls: + for name in tpls: + tpl = NodeTemplate(name, tpls, self.custom_defs, + self.relationship_templates, + self.rel_types) + if (tpl.type_definition and + (tpl.type in tpl.type_definition.TOSCA_DEF or + (tpl.type not in tpl.type_definition.TOSCA_DEF and + bool(tpl.custom_def)))): + tpl.validate(self) + nodetemplates.append(tpl) + return nodetemplates + + def _relationship_templates(self): + rel_templates = [] + tpls = self._tpl_relationship_templates() + for name in tpls: + tpl = RelationshipTemplate(tpls[name], name, self.custom_defs) + rel_templates.append(tpl) + return rel_templates + + def _outputs(self): + outputs = [] + for name, attrs in self._tpl_outputs().items(): + output = Output(name, attrs) + output.validate() + outputs.append(output) + return outputs + + def _substitution_mappings(self): + tpl_substitution_mapping = self._tpl_substitution_mappings() + # if tpl_substitution_mapping and self.sub_mapped_node_template: + if tpl_substitution_mapping: + return SubstitutionMappings(tpl_substitution_mapping, + self.nodetemplates, + self.inputs, + self.outputs, + self.sub_mapped_node_template, + self.custom_defs) + + def _policies(self): + policies = [] + for policy in self._tpl_policies(): + for policy_name, policy_tpl in policy.items(): + target_list = policy_tpl.get('targets') + if target_list and len(target_list) >= 1: + target_objects = [] + targets_type = "groups" + target_objects = self._get_policy_groups(target_list) + if not target_objects: + targets_type = "node_templates" + target_objects = self._get_group_members(target_list) + policyObj = Policy(policy_name, policy_tpl, + target_objects, targets_type, + self.custom_defs) + policies.append(policyObj) + return policies + + def _groups(self): + groups = [] + member_nodes = None + for group_name, group_tpl in self._tpl_groups().items(): + member_names = group_tpl.get('members') + if member_names is not None: + DataEntity.validate_datatype('list', member_names) + if len(member_names) < 1 or \ + len(member_names) != len(set(member_names)): + exception.ValidationIssueCollector.appendException( + exception.InvalidGroupTargetException( + message=_('Member nodes "%s" should be >= 1 ' + 'and not repeated') % member_names)) + else: + member_nodes = self._get_group_members(member_names) + group = Group(group_name, group_tpl, + member_nodes, + self.custom_defs) + groups.append(group) + return groups + + def _get_group_members(self, member_names): + member_nodes = [] + self._validate_group_members(member_names) + for member in member_names: + for node in self.nodetemplates: + if node.name == member: + member_nodes.append(node) + return member_nodes + + def _get_policy_groups(self, member_names): + member_groups = [] + for member in member_names: + for group in self.groups: + if group.name == member: + member_groups.append(group) + return member_groups + + def _validate_group_members(self, members): + node_names = [] + for node in self.nodetemplates: + node_names.append(node.name) + for member in members: + if member not in node_names: + exception.ValidationIssueCollector.appendException( + exception.InvalidGroupTargetException( + message=_('Target member "%s" is not found in ' + 'node_templates') % member)) + + # topology template can act like node template + # it is exposed by substitution_mappings. + def nodetype(self): + return self.substitution_mappings.node_type \ + if self.substitution_mappings else None + + def capabilities(self): + return self.substitution_mappings.capabilities \ + if self.substitution_mappings else None + + def requirements(self): + return self.substitution_mappings.requirements \ + if self.substitution_mappings else None + + def _tpl_description(self): + description = self.tpl.get(DESCRIPTION) + if description: + return description.rstrip() + + def _tpl_inputs(self): + return self.tpl.get(INPUTS) or {} + + def _tpl_nodetemplates(self): + return self.tpl.get(NODE_TEMPLATES) + + def _tpl_relationship_templates(self): + return self.tpl.get(RELATIONSHIP_TEMPLATES) or {} + + def _tpl_outputs(self): + return self.tpl.get(OUTPUTS) or {} + + def _tpl_substitution_mappings(self): + return self.tpl.get(SUBSTITUION_MAPPINGS) or {} + + def _tpl_groups(self): + return self.tpl.get(GROUPS) or {} + + def _tpl_policies(self): + return self.tpl.get(POLICIES) or {} + + def _validate_field(self): + for name in self.tpl: + if name not in SECTIONS: + exception.ValidationIssueCollector.appendException( + exception.UnknownFieldError(what='Template', field=name)) + + def _process_intrinsic_functions(self): + """Process intrinsic functions + + Current implementation processes functions within node template + properties, requirements, interfaces inputs and template outputs. + """ + if hasattr(self, 'nodetemplates'): + for node_template in self.nodetemplates: + for prop in node_template.get_properties_objects(): + prop.value = functions.get_function(self, + node_template, + prop.value) + for interface in node_template.interfaces: + if interface.inputs: + for name, value in interface.inputs.items(): + interface.inputs[name] = functions.get_function( + self, + node_template, + value) + if node_template.requirements and \ + isinstance(node_template.requirements, list): + for req in node_template.requirements: + rel = req + for req_name, req_item in req.items(): + if isinstance(req_item, dict): + rel = req_item.get('relationship') + break + if rel and 'properties' in rel: + for key, value in rel['properties'].items(): + rel['properties'][key] = \ + functions.get_function(self, + req, + value) + if node_template.get_capabilities_objects(): + for cap in node_template.get_capabilities_objects(): + if cap.get_properties_objects(): + for prop in cap.get_properties_objects(): + propvalue = functions.get_function( + self, + node_template, + prop.value) + if isinstance(propvalue, functions.GetInput): + propvalue = propvalue.result() + for p, v in cap._properties.items(): + if p == prop.name: + cap._properties[p] = propvalue + for rel, node in node_template.relationships.items(): + rel_tpls = node.relationship_tpl + if rel_tpls: + for rel_tpl in rel_tpls: + for interface in rel_tpl.interfaces: + if interface.inputs: + for name, value in \ + interface.inputs.items(): + interface.inputs[name] = \ + functions.get_function(self, + rel_tpl, + value) + for output in self.outputs: + func = functions.get_function(self, self.outputs, output.value) + if isinstance(func, functions.GetAttribute): + output.attrs[output.VALUE] = func + + @classmethod + def get_sub_mapping_node_type(cls, topology_tpl): + if topology_tpl and isinstance(topology_tpl, dict): + submap_tpl = topology_tpl.get(SUBSTITUION_MAPPINGS) + return SubstitutionMappings.get_node_type(submap_tpl) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java new file mode 100644 index 0000000..fa371c3 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java @@ -0,0 +1,109 @@ +package org.onap.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.onap.sdc.toscaparser.api.elements.RelationshipType; + +//import java.util.Iterator; + +public class ToscaGraph { + // Graph of Tosca Node Templates + + private ArrayList nodeTemplates; + private LinkedHashMap vertices; + + public ToscaGraph(ArrayList inodeTemplates) { + nodeTemplates = inodeTemplates; + vertices = new LinkedHashMap(); + _create(); + } + + private void _createVertex(NodeTemplate node) { + if(vertices.get(node.getName()) == null) { + vertices.put(node.getName(),node); + } + } + + private void _createEdge(NodeTemplate node1, + NodeTemplate node2, + RelationshipType relation) { + if(vertices.get(node1.getName()) == null) { + _createVertex(node1); + vertices.get(node1.name)._addNext(node2,relation); + } + } + + public NodeTemplate vertex(String name) { + if(vertices.get(name) != null) { + return vertices.get(name); + } + return null; + } + +// public Iterator getIter() { +// return vertices.values().iterator(); +// } + + private void _create() { + for(NodeTemplate node: nodeTemplates) { + LinkedHashMap relation = node.getRelationships(); + if(relation != null) { + for(RelationshipType rel: relation.keySet()) { + NodeTemplate nodeTpls = relation.get(rel); + for(NodeTemplate tpl: nodeTemplates) { + if(tpl.getName().equals(nodeTpls.getName())) { + _createEdge(node,tpl,rel); + } + } + } + } + _createVertex(node); + } + } + + @Override + public String toString() { + return "ToscaGraph{" + + "nodeTemplates=" + nodeTemplates + + ", vertices=" + vertices + + '}'; + } +} + +/*python + +class ToscaGraph(object): + '''Graph of Tosca Node Templates.''' + def __init__(self, nodetemplates): + self.nodetemplates = nodetemplates + self.vertices = {} + self._create() + + def _create_vertex(self, node): + if node not in self.vertices: + self.vertices[node.name] = node + + def _create_edge(self, node1, node2, relationship): + if node1 not in self.vertices: + self._create_vertex(node1) + self.vertices[node1.name]._add_next(node2, + relationship) + + def vertex(self, node): + if node in self.vertices: + return self.vertices[node] + + def __iter__(self): + return iter(self.vertices.values()) + + def _create(self): + for node in self.nodetemplates: + relation = node.relationships + if relation: + for rel, nodetpls in relation.items(): + for tpl in self.nodetemplates: + if tpl.name == nodetpls.name: + self._create_edge(node, tpl, rel) + self._create_vertex(node) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java new file mode 100644 index 0000000..3d94d6e --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java @@ -0,0 +1,1200 @@ +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.common.ValidationIssueCollector; +import org.onap.sdc.toscaparser.api.parameters.Output; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.nio.file.Files; +import java.util.function.Predicate; +import java.nio.file.Paths; + +import org.onap.sdc.toscaparser.api.common.JToscaException; +import org.onap.sdc.toscaparser.api.elements.EntityType; +import org.onap.sdc.toscaparser.api.elements.Metadata; +import org.onap.sdc.toscaparser.api.extensions.ExtTools; +import org.onap.sdc.toscaparser.api.parameters.Input; +import org.onap.sdc.toscaparser.api.prereq.CSAR; +import org.onap.sdc.toscaparser.api.utils.JToscaErrorCodes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.Yaml; + +public class ToscaTemplate extends Object { + + private static Logger log = LoggerFactory.getLogger(ToscaTemplate.class.getName()); + + // TOSCA template key names + private static final String DEFINITION_VERSION = "tosca_definitions_version"; + private static final String DEFAULT_NAMESPACE = "tosca_default_namespace"; + private static final String TEMPLATE_NAME = "template_name"; + private static final String TOPOLOGY_TEMPLATE = "topology_template"; + private static final String TEMPLATE_AUTHOR = "template_author"; + private static final String TEMPLATE_VERSION = "template_version"; + private static final String DESCRIPTION = "description"; + private static final String IMPORTS = "imports"; + private static final String DSL_DEFINITIONS = "dsl_definitions"; + private static final String NODE_TYPES = "node_types"; + private static final String RELATIONSHIP_TYPES = "relationship_types"; + private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; + private static final String CAPABILITY_TYPES = "capability_types"; + private static final String ARTIFACT_TYPES = "artifact_types"; + private static final String DATA_TYPES = "data_types"; + private static final String INTERFACE_TYPES = "interface_types"; + private static final String POLICY_TYPES = "policy_types"; + private static final String GROUP_TYPES = "group_types"; + private static final String REPOSITORIES = "repositories"; + + private static String SECTIONS[] = { + DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME, + TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION, + DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES, + RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES, + CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES, + INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES, REPOSITORIES + }; + + // Sections that are specific to individual template definitions + private static final String METADATA = "metadata"; + private static ArrayList SPECIAL_SECTIONS; + + private ExtTools exttools = new ExtTools(); + + private ArrayList VALID_TEMPLATE_VERSIONS; + private LinkedHashMap> ADDITIONAL_SECTIONS; + + private boolean isFile; + private String path; + private String inputPath; + private String rootPath; + private LinkedHashMap parsedParams; + private boolean resolveGetInput; + private LinkedHashMap tpl; + private String version; + private ArrayList imports; + private LinkedHashMap relationshipTypes; + private Metadata metaData; + private String description; + private TopologyTemplate topologyTemplate; + private ArrayList repositories; + private ArrayList inputs; + private ArrayList relationshipTemplates; + private ArrayList nodeTemplates; + private ArrayList outputs; + private ArrayList policies; + private ConcurrentHashMap nestedToscaTplsWithTopology; + private ArrayList nestedToscaTemplatesWithTopology; + private ToscaGraph graph; + private String csarTempDir; + private int nestingLoopCounter; + private LinkedHashMap> metaProperties; + private Set processedImports; + + public ToscaTemplate(String _path, + LinkedHashMap _parsedParams, + boolean aFile, + LinkedHashMap yamlDictTpl) throws JToscaException { + init(_path, _parsedParams, aFile, yamlDictTpl, true); + } + + public ToscaTemplate(String _path, + LinkedHashMap _parsedParams, + boolean aFile, + LinkedHashMap yamlDictTpl, boolean resolveGetInput) throws JToscaException { + init(_path, _parsedParams, aFile, yamlDictTpl, resolveGetInput); + } + + @SuppressWarnings("unchecked") + private void init(String _path, + LinkedHashMap _parsedParams, + boolean aFile, + LinkedHashMap yamlDictTpl, boolean _resolveGetInput) throws JToscaException { + + ThreadLocalsHolder.setCollector(new ValidationIssueCollector()); + + VALID_TEMPLATE_VERSIONS = new ArrayList<>(); + VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_0"); + VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_1"); + VALID_TEMPLATE_VERSIONS.addAll(exttools.getVersions()); + ADDITIONAL_SECTIONS = new LinkedHashMap<>(); + SPECIAL_SECTIONS = new ArrayList<>(); + SPECIAL_SECTIONS.add(METADATA); + ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_0",SPECIAL_SECTIONS); + ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_1",SPECIAL_SECTIONS); + ADDITIONAL_SECTIONS.putAll(exttools.getSections()); + + //long startTime = System.nanoTime(); + + + isFile = aFile; + inputPath = null; + path = null; + tpl = null; + csarTempDir = null; + nestedToscaTplsWithTopology = new ConcurrentHashMap<>(); + nestedToscaTemplatesWithTopology = new ArrayList(); + resolveGetInput = _resolveGetInput; + metaProperties = new LinkedHashMap<>(); + + if(_path != null && !_path.isEmpty()) { + // save the original input path + inputPath = _path; + // get the actual path (will change with CSAR) + path = _getPath(_path); + // load the YAML template + if (path != null && !path.isEmpty()) { + try (InputStream input = new FileInputStream(new File(path));){ + //System.out.println("Loading YAML file " + path); + log.debug("ToscaTemplate Loading YAMEL file {}", path); + Yaml yaml = new Yaml(); + Object data = yaml.load(input); + this.tpl = (LinkedHashMap) data; + } + catch (FileNotFoundException e) { + log.error("ToscaTemplate - Exception loading yaml: {}", e.getMessage()); + log.error("Exception", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", + "ToscaTemplate - Exception loading yaml: -> " + e.getMessage())); + return; + } + catch(Exception e) { + log.error("ToscaTemplate - Error loading yaml, aborting -> ", e.getMessage()); + log.error("Exception", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", + "ToscaTemplate - Error loading yaml, aborting -> " + e.getMessage())); + return; + } + + if(yamlDictTpl != null) { + //msg = (_('Both path and yaml_dict_tpl arguments were ' + // 'provided. Using path and ignoring yaml_dict_tpl.')) + //log.info(msg) + log.debug("ToscaTemplate - Both path and yaml_dict_tpl arguments were provided. Using path and ignoring yaml_dict_tpl"); + } + } else { + // no input to process... + _abort(); + } + } + else { + if(yamlDictTpl != null) { + tpl = yamlDictTpl; + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE244", + "ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse")); + log.debug("ToscaTemplate ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse"); + + } + } + + if(tpl != null) { + parsedParams = _parsedParams; + _validateField(); + this.rootPath = path; + this.processedImports = new HashSet(); + this.imports = _tplImports(); + this.version = _tplVersion(); + this.metaData = _tplMetaData(); + this.relationshipTypes = _tplRelationshipTypes(); + this.description = _tplDescription(); + this.topologyTemplate = _topologyTemplate(); + this.repositories = _tplRepositories(); + if(topologyTemplate.getTpl() != null) { + this.inputs = _inputs(); + this.relationshipTemplates = _relationshipTemplates(); + this.nodeTemplates = _nodeTemplates(); + this.outputs = _outputs(); + this.policies = _policies(); +// _handleNestedToscaTemplatesWithTopology(); + _handleNestedToscaTemplatesWithTopology(topologyTemplate); + graph = new ToscaGraph(nodeTemplates); + } + } + + if(csarTempDir != null) { + CSAR.deleteDir(new File(csarTempDir)); + csarTempDir = null; + } + + verifyTemplate(); + + } + + private void _abort() throws JToscaException { + // print out all exceptions caught + verifyTemplate(); + throw new JToscaException("jtosca aborting", JToscaErrorCodes.PATH_NOT_VALID.getValue()); + } + + private TopologyTemplate _topologyTemplate() { + return new TopologyTemplate( + _tplTopologyTemplate(), + _getAllCustomDefs(imports), + relationshipTypes, + parsedParams, + null, + resolveGetInput); + } + + private ArrayList _inputs() { + return topologyTemplate.getInputs(); + } + + private ArrayList _nodeTemplates() { + return topologyTemplate.getNodeTemplates(); + } + + private ArrayList _relationshipTemplates() { + return topologyTemplate.getRelationshipTemplates(); + } + + private ArrayList _outputs() { + return topologyTemplate.getOutputs(); + } + + private String _tplVersion() { + return (String)tpl.get(DEFINITION_VERSION); + } + + @SuppressWarnings("unchecked") + private Metadata _tplMetaData() { + Object mdo = tpl.get(METADATA); + if(mdo instanceof LinkedHashMap) { + return new Metadata((Map)mdo); + } + else { + return null; + } + } + + private String _tplDescription() { + return (String)tpl.get(DESCRIPTION); + } + + private ArrayList _tplImports() { + return (ArrayList)tpl.get(IMPORTS); + } + + private ArrayList _tplRepositories() { + LinkedHashMap repositories = + (LinkedHashMap)tpl.get(REPOSITORIES); + ArrayList reposit = new ArrayList<>(); + if(repositories != null) { + for(Map.Entry me: repositories.entrySet()) { + Repository reposits = new Repository(me.getKey(),me.getValue()); + reposit.add(reposits); + } + } + return reposit; + } + + private LinkedHashMap _tplRelationshipTypes() { + return (LinkedHashMap)_getCustomTypes(RELATIONSHIP_TYPES,null); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplRelationshipTemplates() { + return (LinkedHashMap)_tplTopologyTemplate().get(RELATIONSHIP_TEMPLATES); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplTopologyTemplate() { + return (LinkedHashMap)tpl.get(TOPOLOGY_TEMPLATE); + } + + private ArrayList _policies() { + return topologyTemplate.getPolicies(); + } + + /** + * This method is used to get consolidated custom definitions from all imports + * It is logically divided in two parts to handle imports; map and list formats. + * Before processing the imports; it sorts them to make sure the current directory imports are + * being processed first and then others. Once sorted; it processes each import one by one in + * recursive manner. + * To avoid cyclic dependency among imports; this method uses a set to keep track of all + * imports which are already processed and filters the imports which occurs more than once. + * + * @param alImports all imports which needs to be processed + * @return the linked hash map containing all import definitions + */ + private LinkedHashMap _getAllCustomDefs(Object alImports) { + + String types[] = { + IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, + DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES + }; + LinkedHashMap customDefsFinal = new LinkedHashMap<>(); + + List> imports = (List>) alImports; + if (imports != null && !imports.isEmpty()) { + if (imports.get(0) instanceof LinkedHashMap) { + imports = sortImports(imports); + + for (Map map : imports) { + List> singleImportList = new ArrayList(); + singleImportList.add(map); + + Map importNameDetails = getValidFileNameForImportReference(singleImportList); + singleImportList = filterImportsForRecursion(singleImportList, importNameDetails); + + if(!singleImportList.get(0).isEmpty()){ + LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(singleImportList)); + processedImports.add(importNameDetails.get("importFileName")); + + if (customDefs != null) { + customDefsFinal.putAll(customDefs); + + if (customDefs.get(IMPORTS) != null) { + resetPathForRecursiveImports(importNameDetails.get("importRelativeName")); + LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); + customDefsFinal.putAll(importDefs); + } + } + } + } + } else { + LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(imports)); + if (customDefs != null) { + customDefsFinal.putAll(customDefs); + + if (customDefs.get(IMPORTS) != null) { + LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); + customDefsFinal.putAll(importDefs); + } + } + } + } + + // As imports are not custom_types, remove from the dict + customDefsFinal.remove(IMPORTS); + + return customDefsFinal; + } + + /** + * This method is used to sort the imports in order so that same directory + * imports will be processed first + * + * @param customImports the custom imports + * @return the sorted list of imports + */ + private List> sortImports(List> customImports){ + List> finalList1 = new ArrayList<>(); + List> finalList2 = new ArrayList<>(); + Iterator> itr = customImports.iterator(); + while(itr.hasNext()) { + Map innerMap = itr.next(); + if (innerMap.toString().contains("../")) { + finalList2.add(innerMap); + itr.remove(); + } + else if (innerMap.toString().contains("/")) { + finalList1.add(innerMap); + itr.remove(); + } + } + + customImports.addAll(finalList1); + customImports.addAll(finalList2); + return customImports; + } + + /** + * This method is used to reset PATH variable after processing of current import file is done + * This is required because of relative path nature of imports present in files. + * + * @param currImportRelativeName the current import relative name + */ + private void resetPathForRecursiveImports(String currImportRelativeName){ + path = getPath(path, currImportRelativeName); + } + + /** + * This is a recursive method which starts from current import and then recursively finds a + * valid path relative to current import file name. + * By doing this it handles all nested hierarchy of imports defined in CSARs + * + * @param path the path + * @param importFileName the import file name + * @return the string containing updated path value + */ + private String getPath(String path, String importFileName){ + String tempFullPath = (Paths.get(path).toAbsolutePath().getParent() + .toString() + File.separator + importFileName.replace("../", "")).replace('\\', '/'); + String tempPartialPath = (Paths.get(path).toAbsolutePath().getParent().toString()).replace('\\', '/'); + if(Files.exists(Paths.get(tempFullPath))) + return tempFullPath; + else + return getPath(tempPartialPath, importFileName); + } + + /** + * This method is used to get full path name for the file which needs to be processed. It helps + * in situation where files are present in different directory and are references as relative + * paths. + * + * @param customImports the custom imports + * @return the map containing import file full and relative paths + */ + private Map getValidFileNameForImportReference(List> + customImports){ + String importFileName; + Map retMap = new HashMap<>(); + for (Map map1 : customImports) { + for (Map.Entry entry : map1.entrySet()) { + Map innerMostMap = (Map) entry.getValue(); + Iterator> it = innerMostMap.entrySet().iterator(); + while (it.hasNext()) { + Map.Entry val = it.next(); + if(val.getValue().contains("/")){ + importFileName = (Paths.get(rootPath).toAbsolutePath().getParent().toString() + File + .separator + val.getValue().replace("../", "")).replace('\\', '/'); + } + else { + importFileName = (Paths.get(path).toAbsolutePath().getParent().toString() + File + .separator + val.getValue().replace("../", "")).replace('\\', '/'); + } + retMap.put("importFileName", importFileName); + retMap.put("importRelativeName", val.getValue()); + } + } + } + return retMap; + } + + /** + * This method is used to filter the imports which already gets processed in previous step. + * It handles the use case of cyclic dependency in imports which may cause Stack Overflow + * exception + * + * @param customImports the custom imports + * @param importNameDetails the import name details + * @return the list containing filtered imports + */ + private List> filterImportsForRecursion(List> + customImports, Map importNameDetails){ + for (Map map1 : customImports) { + for (Map.Entry entry : map1.entrySet()) { + Map innerMostMap = (Map) entry.getValue(); + Iterator> it = innerMostMap.entrySet().iterator(); + while (it.hasNext()) { + it.next(); + if (processedImports.contains(importNameDetails.get("importFileName"))) { + it.remove(); + } + } + } + } + + // Remove Empty elements + Iterator> itr = customImports.iterator(); + while(itr.hasNext()) { + Map innerMap = itr.next(); + Predicate predicate = p-> p.values().isEmpty(); + innerMap.values().removeIf(predicate); + } + + return customImports; + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _getCustomTypes(Object typeDefinitions,ArrayList alImports) { + + // Handle custom types defined in imported template files + // This method loads the custom type definitions referenced in "imports" + // section of the TOSCA YAML template. + + LinkedHashMap customDefs = new LinkedHashMap(); + ArrayList typeDefs = new ArrayList(); + if(typeDefinitions instanceof String[]) { + for(String s: (String[])typeDefinitions) { + typeDefs.add(s); + } + } + else { + typeDefs.add((String)typeDefinitions); + } + + if(alImports == null) { + alImports = _tplImports(); + } + + if(alImports != null) { + ImportsLoader customService = new ImportsLoader(alImports,path,typeDefs,tpl); + ArrayList> nestedToscaTpls = customService.getNestedToscaTpls(); + _updateNestedToscaTplsWithTopology(nestedToscaTpls); + + customDefs = customService.getCustomDefs(); + if(customDefs == null) { + return null; + } + } + + //Handle custom types defined in current template file + for(String td: typeDefs) { + if(!td.equals(IMPORTS)) { + LinkedHashMap innerCustomTypes = (LinkedHashMap )tpl.get(td); + if(innerCustomTypes != null) { + customDefs.putAll(innerCustomTypes); + } + } + } + return customDefs; + } + + private void _updateNestedToscaTplsWithTopology(ArrayList> nestedToscaTpls) { + for(LinkedHashMap ntpl: nestedToscaTpls) { + // there is just one key:value pair in ntpl + for(Map.Entry me: ntpl.entrySet()) { + String fileName = me.getKey(); + @SuppressWarnings("unchecked") + LinkedHashMap toscaTpl = (LinkedHashMap)me.getValue(); + if(toscaTpl.get(TOPOLOGY_TEMPLATE) != null) { + if(nestedToscaTplsWithTopology.get(fileName) == null) { + nestedToscaTplsWithTopology.putAll(ntpl); + } + } + } + } + } + + // multi level nesting - RECURSIVE + private void _handleNestedToscaTemplatesWithTopology(TopologyTemplate tt) { + if(++nestingLoopCounter > 10) { + log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting"); + return; + } + // Reset Processed Imports for nested templates + this.processedImports = new HashSet<>(); + for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { + String fname = me.getKey(); + LinkedHashMap toscaTpl = + (LinkedHashMap)me.getValue(); + for(NodeTemplate nt: tt.getNodeTemplates()) { + if(_isSubMappedNode(nt,toscaTpl)) { + parsedParams = _getParamsForNestedTemplate(nt); + ArrayList alim = (ArrayList)toscaTpl.get(IMPORTS); + LinkedHashMap topologyTpl = + (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE); + TopologyTemplate topologyWithSubMapping = + new TopologyTemplate(topologyTpl, + _getAllCustomDefs(alim), + relationshipTypes, + parsedParams, + nt, + resolveGetInput); + if(topologyWithSubMapping.getSubstitutionMappings() != null) { + // Record nested topology templates in top level template + //nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); + // Set substitution mapping object for mapped node + nt.setSubMappingToscaTemplate( + topologyWithSubMapping.getSubstitutionMappings()); + _handleNestedToscaTemplatesWithTopology(topologyWithSubMapping); + } + } + } + } + } + +// private void _handleNestedToscaTemplatesWithTopology() { +// for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { +// String fname = me.getKey(); +// LinkedHashMap toscaTpl = +// (LinkedHashMap)me.getValue(); +// for(NodeTemplate nt: nodeTemplates) { +// if(_isSubMappedNode(nt,toscaTpl)) { +// parsedParams = _getParamsForNestedTemplate(nt); +// ArrayList alim = (ArrayList)toscaTpl.get(IMPORTS); +// LinkedHashMap topologyTpl = +// (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE); +// TopologyTemplate topologyWithSubMapping = +// new TopologyTemplate(topologyTpl, +// //_getAllCustomDefs(null), +// _getAllCustomDefs(alim), +// relationshipTypes, +// parsedParams, +// nt); +// if(topologyWithSubMapping.getSubstitutionMappings() != null) { +// // Record nested topology templates in top level template +// nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); +// // Set substitution mapping object for mapped node +// nt.setSubMappingToscaTemplate( +// topologyWithSubMapping.getSubstitutionMappings()); +// } +// } +// } +// } +// } + + private void _validateField() { + String sVersion = _tplVersion(); + if(sVersion == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE245", String.format( + "MissingRequiredField: Template is missing required field \"%s\"",DEFINITION_VERSION))); + } + else { + _validateVersion(sVersion); + this.version = sVersion; + } + + for (String sKey : tpl.keySet()) { + boolean bFound = false; + for (String sSection: SECTIONS) { + if(sKey.equals(sSection)) { + bFound = true; + break; + } + } + // check ADDITIONAL_SECTIONS + if(!bFound) { + if(ADDITIONAL_SECTIONS.get(version) != null && + ADDITIONAL_SECTIONS.get(version).contains(sKey)) { + bFound = true; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE246", String.format( + "UnknownFieldError: Template contains unknown field \"%s\"", + sKey))); + } + } + } + + private void _validateVersion(String sVersion) { + boolean bFound = false; + for(String vtv: VALID_TEMPLATE_VERSIONS) { + if(sVersion.equals(vtv)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE247", String.format( + "InvalidTemplateVersion: \"%s\" is invalid. Valid versions are %s", + sVersion,VALID_TEMPLATE_VERSIONS.toString()))); + } + else if ((!sVersion.equals("tosca_simple_yaml_1_0") && !sVersion.equals("tosca_simple_yaml_1_1"))) { + EntityType.updateDefinitions(sVersion); + + } + } + + private String _getPath(String _path) throws JToscaException { + if (_path.toLowerCase().endsWith(".yaml") || _path.toLowerCase().endsWith(".yml")) { + return _path; + } + else if (_path.toLowerCase().endsWith(".zip") || _path.toLowerCase().endsWith(".csar")) { + // a CSAR archive + CSAR csar = new CSAR(_path, isFile); + if (csar.validate()) { + try { + csar.decompress(); + metaProperties = csar.getMetaProperties(); + } + catch (IOException e) { + log.error("ToscaTemplate - _getPath - IOException trying to decompress {}", _path); + return null; + } + isFile = true; // the file has been decompressed locally + csar.cleanup(); + csarTempDir = csar.getTempDir(); + return csar.getTempDir() + File.separator + csar.getMainTemplate(); + } + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE248", "ValueError: " + _path + " is not a valid file")); + return null; + } + return null; + } + + private void verifyTemplate() throws JToscaException { + //Criticals + int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); + if (validationIssuesCaught > 0) { + List validationIssueStrings = ThreadLocalsHolder.getCollector().getValidationIssueReport(); + log.trace("####################################################################################################"); + log.trace("ToscaTemplate - verifyTemplate - {} Parsing Critical{} occurred...", validationIssuesCaught, (validationIssuesCaught > 1 ? "s" : "")); + for (String s : validationIssueStrings) { + log.trace("{}. CSAR name - {}", s, inputPath); + } + log.trace("####################################################################################################"); + } + + } + + public String getPath() { + return path; + } + + public String getVersion() { + return version; + } + + public String getDescription() { + return description; + } + + public TopologyTemplate getTopologyTemplate() { + return topologyTemplate; + } + + public Metadata getMetaData() { + return metaData; + } + + public ArrayList getInputs() { + return inputs; + } + + public ArrayList getOutputs() { + return outputs; + } + + public ArrayList getPolicies() { + return policies; + } + + public ArrayList getNodeTemplates() { + return nodeTemplates; + } + + public LinkedHashMap getMetaProperties(String propertiesFile) { + return metaProperties.get(propertiesFile); + } + +// private boolean _isSubMappedNode(NodeTemplate nt,LinkedHashMap toscaTpl) { +// // Return True if the nodetemple is substituted +// if(nt != null && nt.getSubMappingToscaTemplate() == null && +// getSubMappingNodeType(toscaTpl).equals(nt.getType()) && +// nt.getInterfaces().size() < 1) { +// return true; +// } +// return false; +// } + + private boolean _isSubMappedNode(NodeTemplate nt, LinkedHashMap toscaTpl) { + // Return True if the nodetemple is substituted + if(nt != null && nt.getSubMappingToscaTemplate() == null && + getSubMappingNodeType(toscaTpl).equals(nt.getType()) && + nt.getInterfaces().size() < 1) { + return true; + } + return false; + } + + private LinkedHashMap _getParamsForNestedTemplate(NodeTemplate nt) { + // Return total params for nested_template + LinkedHashMap pparams; + if(parsedParams != null) { + pparams = parsedParams; + } + else { + pparams = new LinkedHashMap(); + } + if(nt != null) { + for(String pname: nt.getProperties().keySet()) { + pparams.put(pname,nt.getPropertyValue(pname)); + } + } + return pparams; + } + + private String getSubMappingNodeType(LinkedHashMap toscaTpl) { + // Return substitution mappings node type + if(toscaTpl != null) { + return TopologyTemplate.getSubMappingNodeType( + (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE)); + } + return null; + } + + private boolean _hasSubstitutionMapping() { + // Return True if the template has valid substitution mappings + return topologyTemplate != null && + topologyTemplate.getSubstitutionMappings() != null; + } + + public boolean hasNestedTemplates() { + // Return True if the tosca template has nested templates + return nestedToscaTemplatesWithTopology != null && + nestedToscaTemplatesWithTopology.size() >= 1; + + } + + public ArrayList getNestedTemplates() { + return nestedToscaTemplatesWithTopology; + } + + @Override + public String toString() { + return "ToscaTemplate{" + + "exttools=" + exttools + + ", VALID_TEMPLATE_VERSIONS=" + VALID_TEMPLATE_VERSIONS + + ", ADDITIONAL_SECTIONS=" + ADDITIONAL_SECTIONS + + ", isFile=" + isFile + + ", path='" + path + '\'' + + ", inputPath='" + inputPath + '\'' + + ", parsedParams=" + parsedParams + + ", tpl=" + tpl + + ", version='" + version + '\'' + + ", imports=" + imports + + ", relationshipTypes=" + relationshipTypes + + ", metaData=" + metaData + + ", description='" + description + '\'' + + ", topologyTemplate=" + topologyTemplate + + ", repositories=" + repositories + + ", inputs=" + inputs + + ", relationshipTemplates=" + relationshipTemplates + + ", nodeTemplates=" + nodeTemplates + + ", outputs=" + outputs + + ", policies=" + policies + + ", nestedToscaTplsWithTopology=" + nestedToscaTplsWithTopology + + ", nestedToscaTemplatesWithTopology=" + nestedToscaTemplatesWithTopology + + ", graph=" + graph + + ", csarTempDir='" + csarTempDir + '\'' + + ", nestingLoopCounter=" + nestingLoopCounter + + '}'; + } +} + +/*python + +import logging +import os + +from copy import deepcopy +from toscaparser.common.exception import ValidationIssueCollector.collector +from toscaparser.common.exception import InvalidTemplateVersion +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.common.exception import ValidationError +from toscaparser.elements.entity_type import update_definitions +from toscaparser.extensions.exttools import ExtTools +import org.openecomp.sdc.toscaparser.api.imports +from toscaparser.prereq.csar import CSAR +from toscaparser.repositories import Repository +from toscaparser.topology_template import TopologyTemplate +from toscaparser.tpl_relationship_graph import ToscaGraph +from toscaparser.utils.gettextutils import _ +import org.openecomp.sdc.toscaparser.api.utils.yamlparser + + +# TOSCA template key names +SECTIONS = (DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME, + TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION, + DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES, + RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES, + CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES, INTERFACE_TYPES, + POLICY_TYPES, GROUP_TYPES, REPOSITORIES) = \ + ('tosca_definitions_version', 'tosca_default_namespace', + 'template_name', 'topology_template', 'template_author', + 'template_version', 'description', 'imports', 'dsl_definitions', + 'node_types', 'relationship_types', 'relationship_templates', + 'capability_types', 'artifact_types', 'data_types', + 'interface_types', 'policy_types', 'group_types', 'repositories') +# Sections that are specific to individual template definitions +SPECIAL_SECTIONS = (METADATA) = ('metadata') + +log = logging.getLogger("tosca.model") + +YAML_LOADER = toscaparser.utils.yamlparser.load_yaml + + +class ToscaTemplate(object): + exttools = ExtTools() + + VALID_TEMPLATE_VERSIONS = ['tosca_simple_yaml_1_0'] + + VALID_TEMPLATE_VERSIONS.extend(exttools.get_versions()) + + ADDITIONAL_SECTIONS = {'tosca_simple_yaml_1_0': SPECIAL_SECTIONS} + + ADDITIONAL_SECTIONS.update(exttools.get_sections()) + + '''Load the template data.''' + def __init__(self, path=None, parsed_params=None, a_file=True, + yaml_dict_tpl=None): + + ValidationIssueCollector.collector.start() + self.a_file = a_file + self.input_path = None + self.path = None + self.tpl = None + self.nested_tosca_tpls_with_topology = {} + self.nested_tosca_templates_with_topology = [] + if path: + self.input_path = path + self.path = self._get_path(path) + if self.path: + self.tpl = YAML_LOADER(self.path, self.a_file) + if yaml_dict_tpl: + msg = (_('Both path and yaml_dict_tpl arguments were ' + 'provided. Using path and ignoring yaml_dict_tpl.')) + log.info(msg) + print(msg) + else: + if yaml_dict_tpl: + self.tpl = yaml_dict_tpl + else: + ValidationIssueCollector.collector.appendException( + ValueError(_('No path or yaml_dict_tpl was provided. ' + 'There is nothing to parse.'))) + + if self.tpl: + self.parsed_params = parsed_params + self._validate_field() + self.version = self._tpl_version() + self.relationship_types = self._tpl_relationship_types() + self.description = self._tpl_description() + self.topology_template = self._topology_template() + self.repositories = self._tpl_repositories() + if self.topology_template.tpl: + self.inputs = self._inputs() + self.relationship_templates = self._relationship_templates() + self.nodetemplates = self._nodetemplates() + self.outputs = self._outputs() + self._handle_nested_tosca_templates_with_topology() + self.graph = ToscaGraph(self.nodetemplates) + + ValidationIssueCollector.collector.stop() + self.verify_template() + + def _topology_template(self): + return TopologyTemplate(self._tpl_topology_template(), + self._get_all_custom_defs(), + self.relationship_types, + self.parsed_params, + None) + + def _inputs(self): + return self.topology_template.inputs + + def _nodetemplates(self): + return self.topology_template.nodetemplates + + def _relationship_templates(self): + return self.topology_template.relationship_templates + + def _outputs(self): + return self.topology_template.outputs + + def _tpl_version(self): + return self.tpl.get(DEFINITION_VERSION) + + def _tpl_description(self): + desc = self.tpl.get(DESCRIPTION) + if desc: + return desc.rstrip() + + def _tpl_imports(self): + return self.tpl.get(IMPORTS) + + def _tpl_repositories(self): + repositories = self.tpl.get(REPOSITORIES) + reposit = [] + if repositories: + for name, val in repositories.items(): + reposits = Repository(name, val) + reposit.append(reposits) + return reposit + + def _tpl_relationship_types(self): + return self._get_custom_types(RELATIONSHIP_TYPES) + + def _tpl_relationship_templates(self): + topology_template = self._tpl_topology_template() + return topology_template.get(RELATIONSHIP_TEMPLATES) + + def _tpl_topology_template(self): + return self.tpl.get(TOPOLOGY_TEMPLATE) + + def _get_all_custom_defs(self, imports=None): + types = [IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, + DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES] + custom_defs_final = {} + custom_defs = self._get_custom_types(types, imports) + if custom_defs: + custom_defs_final.update(custom_defs) + if custom_defs.get(IMPORTS): + import_defs = self._get_all_custom_defs( + custom_defs.get(IMPORTS)) + custom_defs_final.update(import_defs) + + # As imports are not custom_types, removing from the dict + custom_defs_final.pop(IMPORTS, None) + return custom_defs_final + + def _get_custom_types(self, type_definitions, imports=None): + """Handle custom types defined in imported template files + + This method loads the custom type definitions referenced in "imports" + section of the TOSCA YAML template. + """ + custom_defs = {} + type_defs = [] + if not isinstance(type_definitions, list): + type_defs.append(type_definitions) + else: + type_defs = type_definitions + + if not imports: + imports = self._tpl_imports() + + if imports: + custom_service = toscaparser.imports.\ + ImportsLoader(imports, self.path, + type_defs, self.tpl) + + nested_tosca_tpls = custom_service.get_nested_tosca_tpls() + self._update_nested_tosca_tpls_with_topology(nested_tosca_tpls) + + custom_defs = custom_service.get_custom_defs() + if not custom_defs: + return + + # Handle custom types defined in current template file + for type_def in type_defs: + if type_def != IMPORTS: + inner_custom_types = self.tpl.get(type_def) or {} + if inner_custom_types: + custom_defs.update(inner_custom_types) + return custom_defs + + def _update_nested_tosca_tpls_with_topology(self, nested_tosca_tpls): + for tpl in nested_tosca_tpls: + filename, tosca_tpl = list(tpl.items())[0] + if (tosca_tpl.get(TOPOLOGY_TEMPLATE) and + filename not in list( + self.nested_tosca_tpls_with_topology.keys())): + self.nested_tosca_tpls_with_topology.update(tpl) + + def _handle_nested_tosca_templates_with_topology(self): + for fname, tosca_tpl in self.nested_tosca_tpls_with_topology.items(): + for nodetemplate in self.nodetemplates: + if self._is_sub_mapped_node(nodetemplate, tosca_tpl): + parsed_params = self._get_params_for_nested_template( + nodetemplate) + topology_tpl = tosca_tpl.get(TOPOLOGY_TEMPLATE) + topology_with_sub_mapping = TopologyTemplate( + topology_tpl, + self._get_all_custom_defs(), + self.relationship_types, + parsed_params, + nodetemplate) + if topology_with_sub_mapping.substitution_mappings: + # Record nested topo templates in top level template + self.nested_tosca_templates_with_topology.\ + append(topology_with_sub_mapping) + # Set substitution mapping object for mapped node + nodetemplate.sub_mapping_tosca_template = \ + topology_with_sub_mapping.substitution_mappings + + def _validate_field(self): + version = self._tpl_version() + if not version: + ValidationIssueCollector.collector.appendException( + MissingRequiredFieldError(what='Template', + required=DEFINITION_VERSION)) + else: + self._validate_version(version) + self.version = version + + for name in self.tpl: + if (name not in SECTIONS and + name not in self.ADDITIONAL_SECTIONS.get(version, ())): + ValidationIssueCollector.collector.appendException( + UnknownFieldError(what='Template', field=name)) + + def _validate_version(self, version): + if version not in self.VALID_TEMPLATE_VERSIONS: + ValidationIssueCollector.collector.appendException( + InvalidTemplateVersion( + what=version, + valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS))) + else: + if version != 'tosca_simple_yaml_1_0': + update_definitions(version) + + def _get_path(self, path): + if path.lower().endswith(('.yaml','.yml')): + return path + elif path.lower().endswith(('.zip', '.csar')): + # a CSAR archive + csar = CSAR(path, self.a_file) + if csar.validate(): + csar.decompress() + self.a_file = True # the file has been decompressed locally + return os.path.join(csar.temp_dir, csar.get_main_template()) + else: + ValidationIssueCollector.collector.appendException( + ValueError(_('"%(path)s" is not a valid file.') + % {'path': path})) + + def verify_template(self): + if ValidationIssueCollector.collector.exceptionsCaught(): + if self.input_path: + raise ValidationError( + message=(_('\nThe input "%(path)s" failed validation with ' + 'the following error(s): \n\n\t') + % {'path': self.input_path}) + + '\n\t'.join(ValidationIssueCollector.collector.getExceptionsReport())) + else: + raise ValidationError( + message=_('\nThe pre-parsed input failed validation with ' + 'the following error(s): \n\n\t') + + '\n\t'.join(ValidationIssueCollector.collector.getExceptionsReport())) + else: + if self.input_path: + msg = (_('The input "%(path)s" successfully passed ' + 'validation.') % {'path': self.input_path}) + else: + msg = _('The pre-parsed input successfully passed validation.') + + log.info(msg) + + def _is_sub_mapped_node(self, nodetemplate, tosca_tpl): + """Return True if the nodetemple is substituted.""" + if (nodetemplate and not nodetemplate.sub_mapping_tosca_template and + self.get_sub_mapping_node_type(tosca_tpl) == nodetemplate.type + and len(nodetemplate.interfaces) < 1): + return True + else: + return False + + def _get_params_for_nested_template(self, nodetemplate): + """Return total params for nested_template.""" + parsed_params = deepcopy(self.parsed_params) \ + if self.parsed_params else {} + if nodetemplate: + for pname in nodetemplate.get_properties(): + parsed_params.update({pname: + nodetemplate.get_property_value(pname)}) + return parsed_params + + def get_sub_mapping_node_type(self, tosca_tpl): + """Return substitution mappings node type.""" + if tosca_tpl: + return TopologyTemplate.get_sub_mapping_node_type( + tosca_tpl.get(TOPOLOGY_TEMPLATE)) + + def _has_substitution_mappings(self): + """Return True if the template has valid substitution mappings.""" + return self.topology_template is not None and \ + self.topology_template.substitution_mappings is not None + + def has_nested_templates(self): + """Return True if the tosca template has nested templates.""" + return self.nested_tosca_templates_with_topology is not None and \ + len(self.nested_tosca_templates_with_topology) >= 1 +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java b/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java new file mode 100644 index 0000000..cfe0138 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java @@ -0,0 +1,183 @@ +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.ValidateUtils; + +import java.util.LinkedHashMap; + +public class Triggers extends EntityTemplate { + + private static final String DESCRIPTION = "description"; + private static final String EVENT = "event_type"; + private static final String SCHEDULE = "schedule"; + private static final String TARGET_FILTER = "target_filter"; + private static final String CONDITION = "condition"; + private static final String ACTION = "action"; + + private static final String SECTIONS[] = { + DESCRIPTION, EVENT, SCHEDULE, TARGET_FILTER, CONDITION, ACTION + }; + + private static final String METER_NAME = "meter_name"; + private static final String CONSTRAINT = "constraint"; + private static final String PERIOD = "period"; + private static final String EVALUATIONS = "evaluations"; + private static final String METHOD = "method"; + private static final String THRESHOLD = "threshold"; + private static final String COMPARISON_OPERATOR = "comparison_operator"; + + private static final String CONDITION_KEYNAMES[] = { + METER_NAME, CONSTRAINT, PERIOD, EVALUATIONS, METHOD, THRESHOLD, COMPARISON_OPERATOR + }; + + private String name; + private LinkedHashMap triggerTpl; + + public Triggers(String _name,LinkedHashMap _triggerTpl) { + super(); // dummy. don't want super + name = _name; + triggerTpl = _triggerTpl; + _validateKeys(); + _validateCondition(); + _validateInput(); + } + + public String getDescription() { + return (String)triggerTpl.get("description"); + } + + public String getEvent() { + return (String)triggerTpl.get("event_type"); + } + + public LinkedHashMap getSchedule() { + return (LinkedHashMap)triggerTpl.get("schedule"); + } + + public LinkedHashMap getTargetFilter() { + return (LinkedHashMap)triggerTpl.get("target_filter"); + } + + public LinkedHashMap getCondition() { + return (LinkedHashMap)triggerTpl.get("condition"); + } + + public LinkedHashMap getAction() { + return (LinkedHashMap)triggerTpl.get("action"); + } + + private void _validateKeys() { + for(String key: triggerTpl.keySet()) { + boolean bFound = false; + for(int i=0; i validationIssues = new HashMap(); + public void appendValidationIssue(JToscaValidationIssue issue) { + + validationIssues.put(issue.getMessage(),issue); + + } + + public List getValidationIssueReport() { + List report = new ArrayList<>(); + if (!validationIssues.isEmpty()) { + for (JToscaValidationIssue exception : validationIssues.values()) { + report.add("["+exception.getCode()+"]: "+ exception.getMessage()); + } + } + + return report; + } + public Map getValidationIssues() { + return validationIssues; + } + + + public int validationIssuesCaught() { + return validationIssues.size(); + } + +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java new file mode 100644 index 0000000..e5cbf90 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java @@ -0,0 +1,105 @@ +package org.onap.sdc.toscaparser.api.elements; + +import java.util.LinkedHashMap; + +public class ArtifactTypeDef extends StatefulEntityType { + + private String type; + private LinkedHashMap customDef; + private LinkedHashMap properties; + private LinkedHashMap parentArtifacts; + + + + public ArtifactTypeDef(String atype,LinkedHashMap _customDef) { + super(atype,ARTIFACT_PREFIX,_customDef); + + type = atype; + customDef = _customDef; + properties = null; + if(defs != null) { + properties = (LinkedHashMap)defs.get(PROPERTIES); + } + parentArtifacts = _getParentArtifacts(); + } + + private LinkedHashMap _getParentArtifacts() { + LinkedHashMap artifacts = new LinkedHashMap<>(); + String parentArtif = null; + if(getParentType() != null) { + parentArtif = getParentType().getType(); + } + if(parentArtif != null && !parentArtif.isEmpty()) { + while(!parentArtif.equals("tosca.artifacts.Root")) { + Object ob = TOSCA_DEF.get(parentArtif); + artifacts.put(parentArtif,ob); + parentArtif = + (String)((LinkedHashMap)ob).get("derived_from"); + } + } + return artifacts; + } + + public ArtifactTypeDef getParentType() { + // Return a artifact entity from which this entity is derived + if(defs == null) { + return null; + } + String partifactEntity = derivedFrom(defs); + if(partifactEntity != null) { + return new ArtifactTypeDef(partifactEntity,customDef); + } + return null; + } + + public Object getArtifact(String name) { + // Return the definition of an artifact field by name + if(defs != null) { + return defs.get(name); + } + return null; + } + + public String getType() { + return type; + } + +} + +/*python +class ArtifactTypeDef(StatefulEntityType): + '''TOSCA built-in artifacts type.''' + + def __init__(self, atype, custom_def=None): + super(ArtifactTypeDef, self).__init__(atype, self.ARTIFACT_PREFIX, + custom_def) + self.type = atype + self.custom_def = custom_def + self.properties = None + if self.PROPERTIES in self.defs: + self.properties = self.defs[self.PROPERTIES] + self.parent_artifacts = self._get_parent_artifacts() + + def _get_parent_artifacts(self): + artifacts = {} + parent_artif = self.parent_type.type if self.parent_type else None + if parent_artif: + while parent_artif != 'tosca.artifacts.Root': + artifacts[parent_artif] = self.TOSCA_DEF[parent_artif] + parent_artif = artifacts[parent_artif]['derived_from'] + return artifacts + + @property + def parent_type(self): + '''Return a artifact entity from which this entity is derived.''' + if not hasattr(self, 'defs'): + return None + partifact_entity = self.derived_from(self.defs) + if partifact_entity: + return ArtifactTypeDef(partifact_entity, self.custom_def) + + def get_artifact(self, name): + '''Return the definition of an artifact field by name.''' + if name in self.defs: + return self.defs[name] +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java new file mode 100644 index 0000000..702094f --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java @@ -0,0 +1,40 @@ +package org.onap.sdc.toscaparser.api.elements; + +import java.util.LinkedHashMap; + +public class AttributeDef { + // TOSCA built-in Attribute type + + private String name; + private Object value; + private LinkedHashMap schema; + + public AttributeDef(String adName, Object adValue, LinkedHashMap adSchema) { + name = adName; + value = adValue; + schema = adSchema; + } + + public String getName() { + return name; + } + + public Object getValue() { + return value; + } + + public LinkedHashMap getSchema() { + return schema; + } +} + +/*python + +class AttributeDef(object): + '''TOSCA built-in Attribute type.''' + + def __init__(self, name, value=None, schema=None): + self.name = name + self.value = value + self.schema = schema +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java new file mode 100644 index 0000000..e64f1b8 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java @@ -0,0 +1,222 @@ +package org.onap.sdc.toscaparser.api.elements; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public class CapabilityTypeDef extends StatefulEntityType { + // TOSCA built-in capabilities type + + private static final String TOSCA_TYPEURI_CAPABILITY_ROOT = "tosca.capabilities.Root"; + + private String name; + private String nodetype; + private LinkedHashMap customDef; + private LinkedHashMap properties; + private LinkedHashMap parentCapabilities; + + @SuppressWarnings("unchecked") + public CapabilityTypeDef(String cname,String ctype,String ntype,LinkedHashMap ccustomDef) { + super(ctype,CAPABILITY_PREFIX,ccustomDef); + + name = cname; + nodetype = ntype; + properties = null; + customDef = ccustomDef; + if(defs != null) { + properties = (LinkedHashMap)defs.get(PROPERTIES); + } + parentCapabilities = _getParentCapabilities(customDef); + } + + @SuppressWarnings("unchecked") + public ArrayList getPropertiesDefObjects () { + // Return a list of property definition objects + ArrayList propsdefs = new ArrayList<>(); + LinkedHashMap parentProperties = new LinkedHashMap<>(); + if(parentCapabilities != null) { + for(Map.Entry me: parentCapabilities.entrySet()) { + parentProperties.put(me.getKey(),((LinkedHashMap)me.getValue()).get("properties")); + } + } + if(properties != null) { + for(Map.Entry me: properties.entrySet()) { + propsdefs.add(new PropertyDef(me.getKey(),null,(LinkedHashMap)me.getValue())); + } + } + if(parentProperties != null) { + for(Map.Entry me: parentProperties.entrySet()) { + LinkedHashMap props = (LinkedHashMap)me.getValue(); + if (props != null) { + for(Map.Entry pe: props.entrySet()) { + String prop = pe.getKey(); + LinkedHashMap schema = (LinkedHashMap)pe.getValue(); + // add parent property if not overridden by children type + if(properties == null || properties.get(prop) == null) { + propsdefs.add(new PropertyDef(prop, null, schema)); + } + } + } + } + } + return propsdefs; + } + + public LinkedHashMap getPropertiesDef() { + LinkedHashMap pds = new LinkedHashMap<>(); + for(PropertyDef pd: getPropertiesDefObjects()) { + pds.put(pd.getName(),pd); + } + return pds; + } + + public PropertyDef getPropertyDefValue(String pdname) { + // Return the definition of a given property name + LinkedHashMap propsDef = getPropertiesDef(); + if(propsDef != null && propsDef.get(pdname) != null) { + return (PropertyDef)propsDef.get(pdname).getPDValue(); + } + return null; + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _getParentCapabilities(LinkedHashMap customDef) { + LinkedHashMap capabilities = new LinkedHashMap<>(); + CapabilityTypeDef parentCap = getParentType(); + if(parentCap != null) { + String sParentCap = parentCap.getType(); + while(!sParentCap.equals(TOSCA_TYPEURI_CAPABILITY_ROOT)) { + if(TOSCA_DEF.get(sParentCap) != null) { + capabilities.put(sParentCap,TOSCA_DEF.get(sParentCap)); + } + else if(customDef != null && customDef.get(sParentCap) != null) { + capabilities.put(sParentCap,customDef.get(sParentCap)); + } + sParentCap = (String)((LinkedHashMap)capabilities.get(sParentCap)).get("derived_from"); + } + } + return capabilities; + } + + public CapabilityTypeDef getParentType() { + // Return a capability this capability is derived from + if(defs == null) { + return null; + } + String pnode = derivedFrom(defs); + if(pnode != null && !pnode.isEmpty()) { + return new CapabilityTypeDef(name, pnode, nodetype, customDef); + } + return null; + } + + public boolean inheritsFrom(ArrayList typeNames) { + // Check this capability is in type_names + + // Check if this capability or some of its parent types + // are in the list of types: type_names + if(typeNames.contains(getType())) { + return true; + } + else if(getParentType() != null) { + return getParentType().inheritsFrom(typeNames); + } + return false; + } + + // getters/setters + + public LinkedHashMap getProperties() { + return properties; + } + + public String getName() { + return name; + } +} + +/*python +from toscaparser.elements.property_definition import PropertyDef +from toscaparser.elements.statefulentitytype import StatefulEntityType + + +class CapabilityTypeDef(StatefulEntityType): + '''TOSCA built-in capabilities type.''' + TOSCA_TYPEURI_CAPABILITY_ROOT = 'tosca.capabilities.Root' + + def __init__(self, name, ctype, ntype, custom_def=None): + self.name = name + super(CapabilityTypeDef, self).__init__(ctype, self.CAPABILITY_PREFIX, + custom_def) + self.nodetype = ntype + self.properties = None + self.custom_def = custom_def + if self.PROPERTIES in self.defs: + self.properties = self.defs[self.PROPERTIES] + self.parent_capabilities = self._get_parent_capabilities(custom_def) + + def get_properties_def_objects(self): + '''Return a list of property definition objects.''' + properties = [] + parent_properties = {} + if self.parent_capabilities: + for type, value in self.parent_capabilities.items(): + parent_properties[type] = value.get('properties') + if self.properties: + for prop, schema in self.properties.items(): + properties.append(PropertyDef(prop, None, schema)) + if parent_properties: + for parent, props in parent_properties.items(): + for prop, schema in props.items(): + # add parent property if not overridden by children type + if not self.properties or \ + prop not in self.properties.keys(): + properties.append(PropertyDef(prop, None, schema)) + return properties + + def get_properties_def(self): + '''Return a dictionary of property definition name-object pairs.''' + return {prop.name: prop + for prop in self.get_properties_def_objects()} + + def get_property_def_value(self, name): + '''Return the definition of a given property name.''' + props_def = self.get_properties_def() + if props_def and name in props_def: + return props_def[name].value + + def _get_parent_capabilities(self, custom_def=None): + capabilities = {} + parent_cap = self.parent_type + if parent_cap: + parent_cap = parent_cap.type + while parent_cap != self.TOSCA_TYPEURI_CAPABILITY_ROOT: + if parent_cap in self.TOSCA_DEF.keys(): + capabilities[parent_cap] = self.TOSCA_DEF[parent_cap] + elif custom_def and parent_cap in custom_def.keys(): + capabilities[parent_cap] = custom_def[parent_cap] + parent_cap = capabilities[parent_cap]['derived_from'] + return capabilities + + @property + def parent_type(self): + '''Return a capability this capability is derived from.''' + if not hasattr(self, 'defs'): + return None + pnode = self.derived_from(self.defs) + if pnode: + return CapabilityTypeDef(self.name, pnode, + self.nodetype, self.custom_def) + + def inherits_from(self, type_names): + '''Check this capability is in type_names + + Check if this capability or some of its parent types + are in the list of types: type_names + ''' + if self.type in type_names: + return True + elif self.parent_type: + return self.parent_type.inherits_from(type_names) + else: + return False*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java new file mode 100644 index 0000000..17f1ad4 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java @@ -0,0 +1,116 @@ +package org.onap.sdc.toscaparser.api.elements; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +public class DataType extends StatefulEntityType { + + LinkedHashMap customDef; + + public DataType(String _dataTypeName,LinkedHashMap _customDef) { + super(_dataTypeName,DATATYPE_NETWORK_PREFIX,_customDef); + + customDef = _customDef; + } + + public DataType getParentType() { + // Return a datatype this datatype is derived from + if(defs != null) { + String ptype = derivedFrom(defs); + if(ptype != null) { + return new DataType(ptype,customDef); + } + } + return null; + } + + public String getValueType() { + // Return 'type' section in the datatype schema + if(defs != null) { + return (String)entityValue(defs,"type"); + } + return null; + } + + public ArrayList getAllPropertiesObjects() { + //Return all properties objects defined in type and parent type + ArrayList propsDef = getPropertiesDefObjects(); + DataType ptype = getParentType(); + while(ptype != null) { + propsDef.addAll(ptype.getPropertiesDefObjects()); + ptype = ptype.getParentType(); + } + return propsDef; + } + + public LinkedHashMap getAllProperties() { + // Return a dictionary of all property definition name-object pairs + LinkedHashMap pno = new LinkedHashMap<>(); + for(PropertyDef pd: getAllPropertiesObjects()) { + pno.put(pd.getName(),pd); + } + return pno; + } + + public Object getAllPropertyValue(String name) { + // Return the value of a given property name + LinkedHashMap propsDef = getAllProperties(); + if(propsDef != null && propsDef.get(name) != null) { + return propsDef.get(name).getPDValue(); + } + return null; + } + + public LinkedHashMap getDefs() { + return defs; + } + +} + +/*python + +from toscaparser.elements.statefulentitytype import StatefulEntityType + + +class DataType(StatefulEntityType): + '''TOSCA built-in and user defined complex data type.''' + + def __init__(self, datatypename, custom_def=None): + super(DataType, self).__init__(datatypename, + self.DATATYPE_NETWORK_PREFIX, + custom_def) + self.custom_def = custom_def + + @property + def parent_type(self): + '''Return a datatype this datatype is derived from.''' + ptype = self.derived_from(self.defs) + if ptype: + return DataType(ptype, self.custom_def) + return None + + @property + def value_type(self): + '''Return 'type' section in the datatype schema.''' + return self.entity_value(self.defs, 'type') + + def get_all_properties_objects(self): + '''Return all properties objects defined in type and parent type.''' + props_def = self.get_properties_def_objects() + ptype = self.parent_type + while ptype: + props_def.extend(ptype.get_properties_def_objects()) + ptype = ptype.parent_type + return props_def + + def get_all_properties(self): + '''Return a dictionary of all property definition name-object pairs.''' + return {prop.name: prop + for prop in self.get_all_properties_objects()} + + def get_all_property_value(self, name): + '''Return the value of a given property name.''' + props_def = self.get_all_properties() + if props_def and name in props_def.key(): + return props_def[name].value +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java new file mode 100644 index 0000000..e2ad766 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java @@ -0,0 +1,418 @@ +package org.onap.sdc.toscaparser.api.elements; + +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.CopyUtils; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.extensions.ExtTools; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.Yaml; + +public class EntityType { + + private static Logger log = LoggerFactory.getLogger(EntityType.class.getName()); + + private static final String TOSCA_DEFINITION_1_0_YAML = "TOSCA_definition_1_0.yaml"; + protected static final String DERIVED_FROM = "derived_from"; + protected static final String PROPERTIES = "properties"; + protected static final String ATTRIBUTES = "attributes"; + protected static final String REQUIREMENTS = "requirements"; + protected static final String INTERFACES = "interfaces"; + protected static final String CAPABILITIES = "capabilities"; + protected static final String TYPE = "type"; + protected static final String ARTIFACTS = "artifacts"; + + @SuppressWarnings("unused") + private static final String SECTIONS[] = { + DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS, + INTERFACES, CAPABILITIES, TYPE, ARTIFACTS + }; + + public static final String TOSCA_DEF_SECTIONS[] = { + "node_types", "data_types", "artifact_types", + "group_types", "relationship_types", + "capability_types", "interface_types", + "policy_types"}; + + + // TOSCA definition file + //private final static String path = EntityType.class.getProtectionDomain().getCodeSource().getLocation().getPath(); + + //private final static String path = EntityType.class.getClassLoader().getResource("TOSCA_definition_1_0.yaml").getFile(); + //private final static String TOSCA_DEF_FILE = EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); + + private static LinkedHashMap TOSCA_DEF_LOAD_AS_IS = loadTdf(); + + //EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); + + @SuppressWarnings("unchecked") + private static LinkedHashMap loadTdf() { + String toscaDefLocation = EntityType.class.getClassLoader().getResource(TOSCA_DEFINITION_1_0_YAML).getFile(); + InputStream input = EntityType.class.getClassLoader().getResourceAsStream(TOSCA_DEFINITION_1_0_YAML); + if (input == null){ + log.error("EntityType - loadTdf - Couldn't load TOSCA_DEF_FILE {}", toscaDefLocation); + } + Yaml yaml = new Yaml(); + Object loaded = yaml.load(input); + //@SuppressWarnings("unchecked") + return (LinkedHashMap) loaded; + } + + // Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS + public static LinkedHashMap TOSCA_DEF; + static { + TOSCA_DEF = new LinkedHashMap(); + for(String section: TOSCA_DEF_SECTIONS) { + @SuppressWarnings("unchecked") + LinkedHashMap value = (LinkedHashMap)TOSCA_DEF_LOAD_AS_IS.get(section); + if(value != null) { + for(String key: value.keySet()) { + TOSCA_DEF.put(key, value.get(key)); + } + } + } + } + + public static final String DEPENDSON = "tosca.relationships.DependsOn"; + public static final String HOSTEDON = "tosca.relationships.HostedOn"; + public static final String CONNECTSTO = "tosca.relationships.ConnectsTo"; + public static final String ATTACHESTO = "tosca.relationships.AttachesTo"; + public static final String LINKSTO = "tosca.relationships.network.LinksTo"; + public static final String BINDSTO = "tosca.relationships.network.BindsTo"; + + public static final String RELATIONSHIP_TYPE[] = { + "tosca.relationships.DependsOn", + "tosca.relationships.HostedOn", + "tosca.relationships.ConnectsTo", + "tosca.relationships.AttachesTo", + "tosca.relationships.network.LinksTo", + "tosca.relationships.network.BindsTo"}; + + public static final String NODE_PREFIX = "tosca.nodes."; + public static final String RELATIONSHIP_PREFIX = "tosca.relationships."; + public static final String CAPABILITY_PREFIX = "tosca.capabilities."; + public static final String INTERFACE_PREFIX = "tosca.interfaces."; + public static final String ARTIFACT_PREFIX = "tosca.artifacts."; + public static final String POLICY_PREFIX = "tosca.policies."; + public static final String GROUP_PREFIX = "tosca.groups."; + //currently the data types are defined only for network + // but may have changes in the future. + public static final String DATATYPE_PREFIX = "tosca.datatypes."; + public static final String DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + "network."; + public static final String TOSCA = "tosca"; + + protected String type; + protected LinkedHashMap defs = null; + public Object getParentType() { return null; } + + public String derivedFrom(LinkedHashMap defs) { + // Return a type this type is derived from + return (String)entityValue(defs, "derived_from"); + } + + public boolean isDerivedFrom(String type_str) { + // Check if object inherits from the given type + // Returns true if this object is derived from 'type_str' + // False otherwise. + if(type == null || this.type.isEmpty()) { + return false; + } + else if(type == type_str) { + return true; + } + else if(getParentType() != null) { + return ((EntityType)getParentType()).isDerivedFrom(type_str); + } + else { + return false; + } + } + + public Object entityValue(LinkedHashMap defs, String key) { + if(defs != null) { + return defs.get(key); + } + return null; + } + + @SuppressWarnings("unchecked") + public Object getValue(String ndtype, LinkedHashMap _defs, boolean parent) { + Object value = null; + if(_defs == null) { + if(defs == null) { + return null; + } + _defs = this.defs; + } + Object defndt = _defs.get(ndtype); + if(defndt != null) { + // copy the value to avoid that next operations add items in the + // item definitions + //value = copy.copy(defs[ndtype]) + value = CopyUtils.copyLhmOrAl(defndt); + } + + if(parent) { + EntityType p = this; + if(p != null) { + while(p != null) { + if(p.defs != null && p.defs.get(ndtype) != null) { + // get the parent value + Object parentValue = p.defs.get(ndtype); + if(value != null) { + if(value instanceof LinkedHashMap) { + for(Map.Entry me: ((LinkedHashMap)parentValue).entrySet()) { + String k = me.getKey(); + if(((LinkedHashMap)value).get(k) == null) { + ((LinkedHashMap)value).put(k,me.getValue()); + } + } + } + if(value instanceof ArrayList) { + for(Object pValue: (ArrayList)parentValue) { + if(!((ArrayList)value).contains(pValue)) { + ((ArrayList)value).add(pValue); + } + } + } + } + else { + // value = copy.copy(parent_value) + value = CopyUtils.copyLhmOrAl(parentValue); + } + } + p = (EntityType)p.getParentType(); + } + } + } + + return value; + } + + @SuppressWarnings("unchecked") + public Object getDefinition(String ndtype) { + Object value = null; + LinkedHashMap _defs; + // no point in hasattr, because we have it, and it + // doesn't do anything except emit an exception anyway + //if not hasattr(self, 'defs'): + // defs = None + // ValidationIssueCollector.appendException( + // ValidationError(message="defs is " + str(defs))) + //else: + // defs = self.defs + _defs = this.defs; + + + if(_defs != null && _defs.get(ndtype) != null) { + value = _defs.get(ndtype); + } + + Object p = getParentType(); + if(p != null) { + Object inherited = ((EntityType)p).getDefinition(ndtype); + if(inherited != null) { + // inherited = dict(inherited) WTF?!? + if(value == null) { + value = inherited; + } + else { + //????? + //inherited.update(value) + //value.update(inherited) + for(Map.Entry me: ((LinkedHashMap)inherited).entrySet()) { + ((LinkedHashMap)value).put(me.getKey(),me.getValue()); + } + } + } + } + return value; + } + + public static void updateDefinitions(String version) { + ExtTools exttools = new ExtTools(); + String extensionDefsFile = exttools.getDefsFile(version); + + try (InputStream input = EntityType.class.getClassLoader().getResourceAsStream(extensionDefsFile);){ + Yaml yaml = new Yaml(); + LinkedHashMap nfvDefFile = (LinkedHashMap)yaml.load(input); + LinkedHashMap nfvDef = new LinkedHashMap<>(); + for(String section: TOSCA_DEF_SECTIONS) { + if(nfvDefFile.get(section) != null) { + LinkedHashMap value = + (LinkedHashMap)nfvDefFile.get(section); + for(String key: value.keySet()) { + nfvDef.put(key, value.get(key)); + } + } + } + TOSCA_DEF.putAll(nfvDef); + } + catch (IOException e) { + log.error("EntityType - updateDefinitions - Failed to update definitions from defs file {}",extensionDefsFile); + log.error("Exception:", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE280", + String.format("Failed to update definitions from defs file \"%s\" ",extensionDefsFile))); + return; + } + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import ValidationError +from toscaparser.extensions.exttools import ExtTools +import org.onap.sdc.toscaparser.api.utils.yamlparser + +log = logging.getLogger('tosca') + + +class EntityType(object): + '''Base class for TOSCA elements.''' + + SECTIONS = (DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS, + INTERFACES, CAPABILITIES, TYPE, ARTIFACTS) = \ + ('derived_from', 'properties', 'attributes', 'requirements', + 'interfaces', 'capabilities', 'type', 'artifacts') + + TOSCA_DEF_SECTIONS = ['node_types', 'data_types', 'artifact_types', + 'group_types', 'relationship_types', + 'capability_types', 'interface_types', + 'policy_types'] + + '''TOSCA definition file.''' + TOSCA_DEF_FILE = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "TOSCA_definition_1_0.yaml") + + loader = toscaparser.utils.yamlparser.load_yaml + + TOSCA_DEF_LOAD_AS_IS = loader(TOSCA_DEF_FILE) + + # Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS + TOSCA_DEF = {} + for section in TOSCA_DEF_SECTIONS: + if section in TOSCA_DEF_LOAD_AS_IS.keys(): + value = TOSCA_DEF_LOAD_AS_IS[section] + for key in value.keys(): + TOSCA_DEF[key] = value[key] + + RELATIONSHIP_TYPE = (DEPENDSON, HOSTEDON, CONNECTSTO, ATTACHESTO, + LINKSTO, BINDSTO) = \ + ('tosca.relationships.DependsOn', + 'tosca.relationships.HostedOn', + 'tosca.relationships.ConnectsTo', + 'tosca.relationships.AttachesTo', + 'tosca.relationships.network.LinksTo', + 'tosca.relationships.network.BindsTo') + + NODE_PREFIX = 'tosca.nodes.' + RELATIONSHIP_PREFIX = 'tosca.relationships.' + CAPABILITY_PREFIX = 'tosca.capabilities.' + INTERFACE_PREFIX = 'tosca.interfaces.' + ARTIFACT_PREFIX = 'tosca.artifacts.' + POLICY_PREFIX = 'tosca.policies.' + GROUP_PREFIX = 'tosca.groups.' + # currently the data types are defined only for network + # but may have changes in the future. + DATATYPE_PREFIX = 'tosca.datatypes.' + DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + 'network.' + TOSCA = 'tosca' + + def derived_from(self, defs): + '''Return a type this type is derived from.''' + return self.entity_value(defs, 'derived_from') + + def is_derived_from(self, type_str): + '''Check if object inherits from the given type. + + Returns true if this object is derived from 'type_str'. + False otherwise. + ''' + if not self.type: + return False + elif self.type == type_str: + return True + elif self.parent_type: + return self.parent_type.is_derived_from(type_str) + else: + return False + + def entity_value(self, defs, key): + if key in defs: + return defs[key] + + def get_value(self, ndtype, defs=None, parent=None): + value = None + if defs is None: + if not hasattr(self, 'defs'): + return None + defs = self.defs + if ndtype in defs: + # copy the value to avoid that next operations add items in the + # item definitions + value = copy.copy(defs[ndtype]) + if parent: + p = self + if p: + while p: + if ndtype in p.defs: + # get the parent value + parent_value = p.defs[ndtype] + if value: + if isinstance(value, dict): + for k, v in parent_value.items(): + if k not in value.keys(): + value[k] = v + if isinstance(value, list): + for p_value in parent_value: + if p_value not in value: + value.append(p_value) + else: + value = copy.copy(parent_value) + p = p.parent_type + return value + + def get_definition(self, ndtype): + value = None + if not hasattr(self, 'defs'): + defs = None + ValidationIssueCollector.appendException( + ValidationError(message="defs is " + str(defs))) + else: + defs = self.defs + if defs is not None and ndtype in defs: + value = defs[ndtype] + p = self.parent_type + if p: + inherited = p.get_definition(ndtype) + if inherited: + inherited = dict(inherited) + if not value: + value = inherited + else: + inherited.update(value) + value.update(inherited) + return value + + +def update_definitions(version): + exttools = ExtTools() + extension_defs_file = exttools.get_defs_file(version) + loader = toscaparser.utils.yamlparser.load_yaml + nfv_def_file = loader(extension_defs_file) + nfv_def = {} + for section in EntityType.TOSCA_DEF_SECTIONS: + if section in nfv_def_file.keys(): + value = nfv_def_file[section] + for key in value.keys(): + nfv_def[key] = value[key] + EntityType.TOSCA_DEF.update(nfv_def) +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java new file mode 100644 index 0000000..1419461 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java @@ -0,0 +1,215 @@ +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.LinkedHashMap; + +public class GroupType extends StatefulEntityType { + + private static final String DERIVED_FROM = "derived_from"; + private static final String VERSION = "version"; + private static final String METADATA = "metadata"; + private static final String DESCRIPTION = "description"; + private static final String PROPERTIES = "properties"; + private static final String MEMBERS = "members"; + private static final String INTERFACES = "interfaces"; + + private static final String SECTIONS[] = { + DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; + + private String groupType; + private LinkedHashMap customDef; + private String groupDescription; + private String groupVersion; + //private LinkedHashMap groupProperties; + //private ArrayList groupMembers; + private LinkedHashMap metaData; + + @SuppressWarnings("unchecked") + public GroupType(String _grouptype,LinkedHashMap _customDef) { + super(_grouptype,GROUP_PREFIX,_customDef); + + groupType = _grouptype; + customDef = _customDef; + _validateFields(); + if(defs != null) { + groupDescription = (String)defs.get(DESCRIPTION); + groupVersion = (String)defs.get(VERSION); + //groupProperties = (LinkedHashMap)defs.get(PROPERTIES); + //groupMembers = (ArrayList)defs.get(MEMBERS); + Object mdo = defs.get(METADATA); + if(mdo instanceof LinkedHashMap) { + metaData = (LinkedHashMap)mdo; + } + else { + metaData = null; + } + + if(metaData != null) { + _validateMetadata(metaData); + } + } + } + + public GroupType getParentType() { + // Return a group statefulentity of this entity is derived from. + if(defs == null) { + return null; + } + String pgroupEntity = derivedFrom(defs); + if(pgroupEntity != null) { + return new GroupType(pgroupEntity,customDef); + } + return null; + } + + public String getDescription() { + return groupDescription; + } + + public String getVersion() { + return groupVersion; + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getInterfaces() { + Object ifo = getValue(INTERFACES,null,false); + if(ifo instanceof LinkedHashMap) { + return (LinkedHashMap)ifo; + } + return new LinkedHashMap(); + } + + private void _validateFields() { + if(defs != null) { + for(String name: defs.keySet()) { + boolean bFound = false; + for(String sect: SECTIONS) { + if(name.equals(sect)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE120", String.format( + "UnknownFieldError: Group Type \"%s\" contains unknown field \"%s\"", + groupType,name))); + } + } + } + } + + @SuppressWarnings("unchecked") + private void _validateMetadata(LinkedHashMap metadata) { + String mtt = (String) metadata.get("type"); + if(mtt != null && !mtt.equals("map") && !mtt.equals("tosca:map")) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE121", String.format( + "InvalidTypeError: \"%s\" defined in group for metadata is invalid", + mtt))); + } + for(String entrySchema: metadata.keySet()) { + Object estob = metadata.get(entrySchema); + if(estob instanceof LinkedHashMap) { + String est = (String)((LinkedHashMap)estob).get("type"); + if(!est.equals("string")) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE122", String.format( + "InvalidTypeError: \"%s\" defined in group for metadata \"%s\" is invalid", + est,entrySchema))); + } + } + } + } + + public String getType() { + return groupType; + } + + +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidTypeError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.statefulentitytype import StatefulEntityType + + +class GroupType(StatefulEntityType): + '''TOSCA built-in group type.''' + + SECTIONS = (DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, + MEMBERS, INTERFACES) = \ + ("derived_from", "version", "metadata", "description", + "properties", "members", "interfaces") + + def __init__(self, grouptype, custom_def=None): + super(GroupType, self).__init__(grouptype, self.GROUP_PREFIX, + custom_def) + self.custom_def = custom_def + self.grouptype = grouptype + self._validate_fields() + self.group_description = None + if self.DESCRIPTION in self.defs: + self.group_description = self.defs[self.DESCRIPTION] + + self.group_version = None + if self.VERSION in self.defs: + self.group_version = self.defs[self.VERSION] + + self.group_properties = None + if self.PROPERTIES in self.defs: + self.group_properties = self.defs[self.PROPERTIES] + + self.group_members = None + if self.MEMBERS in self.defs: + self.group_members = self.defs[self.MEMBERS] + + if self.METADATA in self.defs: + self.meta_data = self.defs[self.METADATA] + self._validate_metadata(self.meta_data) + + @property + def parent_type(self): + '''Return a group statefulentity of this entity is derived from.''' + if not hasattr(self, 'defs'): + return None + pgroup_entity = self.derived_from(self.defs) + if pgroup_entity: + return GroupType(pgroup_entity, self.custom_def) + + @property + def description(self): + return self.group_description + + @property + def version(self): + return self.group_version + + @property + def interfaces(self): + return self.get_value(self.INTERFACES) + + def _validate_fields(self): + if self.defs: + for name in self.defs.keys(): + if name not in self.SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Group Type %s' + % self.grouptype, field=name)) + + def _validate_metadata(self, meta_data): + if not meta_data.get('type') in ['map', 'tosca:map']: + ValidationIssueCollector.appendException( + InvalidTypeError(what='"%s" defined in group for ' + 'metadata' % (meta_data.get('type')))) + for entry_schema, entry_schema_type in meta_data.items(): + if isinstance(entry_schema_type, dict) and not \ + entry_schema_type.get('type') == 'string': + ValidationIssueCollector.appendException( + InvalidTypeError(what='"%s" defined in group for ' + 'metadata "%s"' + % (entry_schema_type.get('type'), + entry_schema))) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java new file mode 100644 index 0000000..f8669ed --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java @@ -0,0 +1,228 @@ +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.EntityTemplate; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public class InterfacesDef extends StatefulEntityType { + + public static final String LIFECYCLE = "tosca.interfaces.node.lifecycle.Standard"; + public static final String CONFIGURE = "tosca.interfaces.relationship.Configure"; + public static final String LIFECYCLE_SHORTNAME = "Standard"; + public static final String CONFIGURE_SHORTNAME = "Configure"; + + public static final String SECTIONS[] = { + LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME,CONFIGURE_SHORTNAME + }; + + public static final String IMPLEMENTATION = "implementation"; + public static final String INPUTS = "inputs"; + + public static final String INTERFACEVALUE[] = {IMPLEMENTATION, INPUTS}; + + public static final String INTERFACE_DEF_RESERVED_WORDS[] = { + "type", "inputs", "derived_from", "version", "description"}; + + private EntityType ntype; + private EntityTemplate nodeTemplate; + private String name; + private Object value; + private String implementation; + private LinkedHashMap inputs; + + + @SuppressWarnings("unchecked") + public InterfacesDef(EntityType inodeType, + String interfaceType, + EntityTemplate inodeTemplate, + String iname, + Object ivalue) { + // void + super(); + + ntype = inodeType; + nodeTemplate = inodeTemplate; + type = interfaceType; + name = iname; + value = ivalue; + implementation = null; + inputs = null; + defs = new LinkedHashMap(); + + if(interfaceType.equals(LIFECYCLE_SHORTNAME)) { + interfaceType = LIFECYCLE; + } + if(interfaceType.equals(CONFIGURE_SHORTNAME)) { + interfaceType = CONFIGURE; + } + + // only NodeType has getInterfaces "hasattr(ntype,interfaces)" + // while RelationshipType does not + if(ntype instanceof NodeType) { + if(((NodeType)ntype).getInterfaces() != null && + ((NodeType)ntype).getInterfaces().values().contains(interfaceType)) { + LinkedHashMap nii = (LinkedHashMap) + ((NodeType)ntype).getInterfaces().get(interfaceType); + interfaceType = (String)nii.get("type"); + } + } + if(inodeType != null) { + if(nodeTemplate != null && nodeTemplate.getCustomDef() != null && + nodeTemplate.getCustomDef().values().contains(interfaceType)) { + defs = (LinkedHashMap) + nodeTemplate.getCustomDef().get(interfaceType); + } + else { + defs = (LinkedHashMap)TOSCA_DEF.get(interfaceType); + } + } + + if(ivalue != null) { + if(ivalue instanceof LinkedHashMap) { + for(Map.Entry me: ((LinkedHashMap)ivalue).entrySet()) { + if(me.getKey().equals("implementation")) { + implementation = (String)me.getValue(); + } + else if(me.getKey().equals("inputs")) { + inputs = (LinkedHashMap)me.getValue(); + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE123", String.format( + "UnknownFieldError: \"interfaces\" of template \"%s\" contain unknown field \"%s\"", + nodeTemplate.getName(),me.getKey()))); + } + } + } + else { + implementation = (String)ivalue; + } + } + } + + public ArrayList getLifecycleOps() { + if(defs != null) { + if(type.equals(LIFECYCLE)) { + return _ops(); + } + } + return null; + } + + public ArrayList getConfigureOps() { + if(defs != null) { + if(type.equals(CONFIGURE)) { + return _ops(); + } + } + return null; + } + + private ArrayList _ops() { + return new ArrayList(defs.keySet()); + } + + // getters/setters + + public LinkedHashMap getInputs() { + return inputs; + } + + public void setInput(String name,Object value) { + inputs.put(name, value); + } +} + +/*python + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.statefulentitytype import StatefulEntityType + +SECTIONS = (LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME, + CONFIGURE_SHORTNAME) = \ + ('tosca.interfaces.node.lifecycle.Standard', + 'tosca.interfaces.relationship.Configure', + 'Standard', 'Configure') + +INTERFACEVALUE = (IMPLEMENTATION, INPUTS) = ('implementation', 'inputs') + +INTERFACE_DEF_RESERVED_WORDS = ['type', 'inputs', 'derived_from', 'version', + 'description'] + + +class InterfacesDef(StatefulEntityType): + '''TOSCA built-in interfaces type.''' + + def __init__(self, node_type, interfacetype, + node_template=None, name=None, value=None): + self.ntype = node_type + self.node_template = node_template + self.type = interfacetype + self.name = name + self.value = value + self.implementation = None + self.inputs = None + self.defs = {} + if interfacetype == LIFECYCLE_SHORTNAME: + interfacetype = LIFECYCLE + if interfacetype == CONFIGURE_SHORTNAME: + interfacetype = CONFIGURE + if hasattr(self.ntype, 'interfaces') \ + and self.ntype.interfaces \ + and interfacetype in self.ntype.interfaces: + interfacetype = self.ntype.interfaces[interfacetype]['type'] + if node_type: + if self.node_template and self.node_template.custom_def \ + and interfacetype in self.node_template.custom_def: + self.defs = self.node_template.custom_def[interfacetype] + else: + self.defs = self.TOSCA_DEF[interfacetype] + if value: + if isinstance(self.value, dict): + for i, j in self.value.items(): + if i == IMPLEMENTATION: + self.implementation = j + elif i == INPUTS: + self.inputs = j + else: + what = ('"interfaces" of template "%s"' % + self.node_template.name) + ValidationIssueCollector.appendException( + UnknownFieldError(what=what, field=i)) + else: + self.implementation = value + + @property + def lifecycle_ops(self): + if self.defs: + if self.type == LIFECYCLE: + return self._ops() + + @property + def configure_ops(self): + if self.defs: + if self.type == CONFIGURE: + return self._ops() + + def _ops(self): + ops = [] + for name in list(self.defs.keys()): + ops.append(name) + return ops +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java new file mode 100644 index 0000000..6b818f5 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java @@ -0,0 +1,41 @@ +package org.onap.sdc.toscaparser.api.elements; + +import java.util.AbstractMap; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Collectors; + +public class Metadata { + + private final Map metadataMap; + + public Metadata(Map metadataMap) { + this.metadataMap = metadataMap != null ? metadataMap : new HashMap<>(); + } + + public String getValue(String key) { + + Object obj = this.metadataMap.get(key); + if (obj != null){ + return String.valueOf(obj); + } + return null; + } + + /** + * Get all properties of a Metadata object.
+ * This object represents the "metadata" section of some entity. + * @return all properties of this Metadata, as a key-value. + */ + public Map getAllProperties() { + return metadataMap.entrySet().stream().map(e-> new AbstractMap.SimpleEntry(e.getKey(), String.valueOf(e.getValue()))).collect(Collectors.toMap(Map.Entry::getKey,Map.Entry::getValue)); + } + + @Override + public String toString() { + return "Metadata{" + + "metadataMap=" + metadataMap + + '}'; + } + +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java new file mode 100644 index 0000000..7dcc44d --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java @@ -0,0 +1,525 @@ +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public class NodeType extends StatefulEntityType { + // TOSCA built-in node type + + private static final String DERIVED_FROM = "derived_from"; + private static final String METADATA = "metadata"; + private static final String PROPERTIES = "properties"; + private static final String VERSION = "version"; + private static final String DESCRIPTION = "description"; + private static final String ATTRIBUTES = "attributes"; + private static final String REQUIREMENTS = "requirements"; + private static final String CAPABILITIES = "capabilities"; + private static final String INTERFACES = "interfaces"; + private static final String ARTIFACTS = "artifacts"; + + private static final String SECTIONS[] = { + DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS + }; + + private String ntype; + public LinkedHashMap customDef; + + public NodeType(String nttype,LinkedHashMap ntcustomDef) { + super(nttype,NODE_PREFIX, ntcustomDef); + ntype = nttype; + customDef = ntcustomDef; + _validateKeys(); + } + + public Object getParentType() { + // Return a node this node is derived from + if(defs == null) { + return null; + } + String pnode = derivedFrom(defs); + if(pnode != null && !pnode.isEmpty()) { + return new NodeType(pnode,customDef); + } + return null; + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getRelationship() { + // Return a dictionary of relationships to other node types + + // This method returns a dictionary of named relationships that nodes + // of the current node type (self) can have to other nodes (of specific + // types) in a TOSCA template. + + LinkedHashMap relationship = new LinkedHashMap<>(); + ArrayList> requires; + Object treq = getAllRequirements(); + if(treq != null) { + // NOTE(sdmonov): Check if requires is a dict. + // If it is a dict convert it to a list of dicts. + // This is needed because currently the code below supports only + // lists as requirements definition. The following check will + // make sure if a map (dict) was provided it will be converted to + // a list before proceeding to the parsing. + if(treq instanceof LinkedHashMap) { + requires = new ArrayList<>(); + for(Map.Entry me: ((LinkedHashMap)treq).entrySet()) { + LinkedHashMap tl = new LinkedHashMap<>(); + tl.put(me.getKey(),me.getValue()); + requires.add(tl); + } + } + else { + requires = (ArrayList>)treq; + } + + String keyword = null; + String nodeType = null; + for(LinkedHashMap require: requires) { + String relation = null; + for(Map.Entry re: require.entrySet()) { + String key = re.getKey(); + LinkedHashMap req = (LinkedHashMap)re.getValue(); + if(req.get("relationship") != null) { + Object trelation = req.get("relationship"); + // trelation is a string or a dict with "type" mapped to the string we want + if(trelation instanceof String) { + relation = (String)trelation; + } + else { + if(((LinkedHashMap)trelation).get("type") != null) { + relation = (String)((LinkedHashMap)trelation).get("type"); + } + } + nodeType = (String)req.get("node"); + //BUG meaningless?? LinkedHashMap value = req; + if(nodeType != null) { + keyword = "node"; + } + else { + // If value is a dict and has a type key + // we need to lookup the node type using + // the capability type + String captype = (String)req.get("capability"); + String value = _getNodeTypeByCap(captype); + String getRelation = _getRelation(key,value); + if (getRelation != null) { + relation = getRelation; + } + keyword = key; + nodeType = value; + } + } + + } + RelationshipType rtype = new RelationshipType(relation, keyword, customDef); + NodeType relatednode = new NodeType(nodeType, customDef); + relationship.put(rtype, relatednode); + } + } + return relationship; + + } + + @SuppressWarnings("unchecked") + private String _getNodeTypeByCap(String cap) { + // Find the node type that has the provided capability + + // This method will lookup all node types if they have the + // provided capability. + // Filter the node types + ArrayList nodeTypes = new ArrayList<>(); + for(String nt: customDef.keySet()) { + if(nt.startsWith(NODE_PREFIX) || nt.startsWith("org.onap") && !nt.equals("tosca.nodes.Root")) { + nodeTypes.add(nt); + } + } + for(String nt: nodeTypes) { + LinkedHashMap nodeDef = (LinkedHashMap)customDef.get(nt); + if(nodeDef instanceof LinkedHashMap && nodeDef.get("capabilities") != null) { + LinkedHashMap nodeCaps = (LinkedHashMap)nodeDef.get("capabilities"); + if(nodeCaps != null) { + for(Object val: nodeCaps.values()) { + if(val instanceof LinkedHashMap) { + String tp = (String)((LinkedHashMap)val).get("type"); + if(tp != null && tp.equals(cap)) { + return nt; + } + } + } + } + } + } + return null; + } + + @SuppressWarnings("unchecked") + private String _getRelation(String key,String ndtype) { + String relation = null; + NodeType ntype = new NodeType(ndtype, customDef); + LinkedHashMap caps = ntype.getCapabilities(); + if(caps != null && caps.get(key) != null) { + CapabilityTypeDef c = caps.get(key); + for(int i=0; i< RELATIONSHIP_TYPE.length; i++) { + String r = RELATIONSHIP_TYPE[i]; + if(r != null) { + relation = r; + break; + } + LinkedHashMap rtypedef = (LinkedHashMap)customDef.get(r); + for(Object o: rtypedef.values()) { + LinkedHashMap properties = (LinkedHashMap)o; + if(properties.get(c.getType()) != null) { + relation = r; + break; + } + } + if(relation != null) { + break; + } + else { + for(Object o: rtypedef.values()) { + LinkedHashMap properties = (LinkedHashMap)o; + if(properties.get(c.getParentType()) != null) { + relation = r; + break; + } + } + } + } + } + return relation; + } + + @SuppressWarnings("unchecked") + public ArrayList getCapabilitiesObjects() { + // Return a list of capability objects + ArrayList typecapabilities = new ArrayList<>(); + LinkedHashMap caps = (LinkedHashMap)getValue(CAPABILITIES, null, true); + if(caps != null) { + // 'cname' is symbolic name of the capability + // 'cvalue' is a dict { 'type': } + for(Map.Entry me: caps.entrySet()) { + String cname = me.getKey(); + LinkedHashMap cvalue = (LinkedHashMap)me.getValue(); + String ctype = cvalue.get("type"); + CapabilityTypeDef cap = new CapabilityTypeDef(cname,ctype,type,customDef); + typecapabilities.add(cap); + } + } + return typecapabilities; + } + + public LinkedHashMap getCapabilities() { + // Return a dictionary of capability name-objects pairs + LinkedHashMap caps = new LinkedHashMap<>(); + for(CapabilityTypeDef ctd: getCapabilitiesObjects()) { + caps.put(ctd.getName(),ctd); + } + return caps; + } + + @SuppressWarnings("unchecked") + public ArrayList getRequirements() { + return (ArrayList)getValue(REQUIREMENTS,null,true); + } + + public ArrayList getAllRequirements() { + return getRequirements(); + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getInterfaces() { + return (LinkedHashMap)getValue(INTERFACES,null,false); + } + + + @SuppressWarnings("unchecked") + public ArrayList getLifecycleInputs() + { + // Return inputs to life cycle operations if found + ArrayList inputs = new ArrayList<>(); + LinkedHashMap interfaces = getInterfaces(); + if(interfaces != null) { + for(Map.Entry me: interfaces.entrySet()) { + String iname = me.getKey(); + LinkedHashMap ivalue = (LinkedHashMap)me.getValue(); + if(iname.equals(InterfacesDef.LIFECYCLE)) { + for(Map.Entry ie: ivalue.entrySet()) { + if(ie.getKey().equals("input")) { + LinkedHashMap y = (LinkedHashMap)ie.getValue(); + for(String i: y.keySet()) { + inputs.add(i); + } + } + } + } + } + } + return inputs; + } + + public ArrayList getLifecycleOperations() { + // Return available life cycle operations if found + ArrayList ops = null; + LinkedHashMap interfaces = getInterfaces(); + if(interfaces != null) { + InterfacesDef i = new InterfacesDef(this,InterfacesDef.LIFECYCLE,null,null,null); + ops = i.getLifecycleOps(); + } + return ops; + } + + public CapabilityTypeDef getCapability(String name) { + //BUG?? the python code has to be wrong + // it refers to a bad attribute 'value'... + LinkedHashMap caps = getCapabilities(); + if(caps != null) { + return caps.get(name); + } + return null; + /* + def get_capability(self, name): + caps = self.get_capabilities() + if caps and name in caps.keys(): + return caps[name].value + */ + } + + public String getCapabilityType(String name) { + //BUG?? the python code has to be wrong + // it refers to a bad attribute 'value'... + CapabilityTypeDef captype = getCapability(name); + if(captype != null) { + return captype.getType(); + } + return null; + /* + def get_capability_type(self, name): + captype = self.get_capability(name) + if captype and name in captype.keys(): + return captype[name].value + */ + } + + private void _validateKeys() { + if(defs != null) { + for(String key: defs.keySet()) { + boolean bFound = false; + for(int i=0; i< SECTIONS.length; i++) { + if(key.equals(SECTIONS[i])) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE124", String.format( + "UnknownFieldError: Nodetype \"%s\" has unknown field \"%s\"",ntype,key))); + } + } + } + } + +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.capabilitytype import CapabilityTypeDef +import org.openecomp.sdc.toscaparser.api.elements.interfaces as ifaces +from toscaparser.elements.interfaces import InterfacesDef +from toscaparser.elements.relationshiptype import RelationshipType +from toscaparser.elements.statefulentitytype import StatefulEntityType + + +class NodeType(StatefulEntityType): + '''TOSCA built-in node type.''' + SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS) = \ + ('derived_from', 'metadata', 'properties', 'version', + 'description', 'attributes', 'requirements', 'capabilities', + 'interfaces', 'artifacts') + + def __init__(self, ntype, custom_def=None): + super(NodeType, self).__init__(ntype, self.NODE_PREFIX, custom_def) + self.ntype = ntype + self.custom_def = custom_def + self._validate_keys() + + @property + def parent_type(self): + '''Return a node this node is derived from.''' + if not hasattr(self, 'defs'): + return None + pnode = self.derived_from(self.defs) + if pnode: + return NodeType(pnode, self.custom_def) + + @property + def relationship(self): + '''Return a dictionary of relationships to other node types. + + This method returns a dictionary of named relationships that nodes + of the current node type (self) can have to other nodes (of specific + types) in a TOSCA template. + + ''' + relationship = {} + requires = self.get_all_requirements() + if requires: + # NOTE(sdmonov): Check if requires is a dict. + # If it is a dict convert it to a list of dicts. + # This is needed because currently the code below supports only + # lists as requirements definition. The following check will + # make sure if a map (dict) was provided it will be converted to + # a list before proceeding to the parsing. + if isinstance(requires, dict): + requires = [{key: value} for key, value in requires.items()] + + keyword = None + node_type = None + for require in requires: + for key, req in require.items(): + if 'relationship' in req: + relation = req.get('relationship') + if 'type' in relation: + relation = relation.get('type') + node_type = req.get('node') + value = req + if node_type: + keyword = 'node' + else: + # If value is a dict and has a type key + # we need to lookup the node type using + # the capability type + value = req + if isinstance(value, dict): + captype = value['capability'] + value = (self. + _get_node_type_by_cap(key, captype)) + relation = self._get_relation(key, value) + keyword = key + node_type = value + rtype = RelationshipType(relation, keyword, self.custom_def) + relatednode = NodeType(node_type, self.custom_def) + relationship[rtype] = relatednode + return relationship + + def _get_node_type_by_cap(self, key, cap): + '''Find the node type that has the provided capability + + This method will lookup all node types if they have the + provided capability. + ''' + + # Filter the node types + node_types = [node_type for node_type in self.TOSCA_DEF.keys() + if node_type.startswith(self.NODE_PREFIX) and + node_type != 'tosca.nodes.Root'] + + for node_type in node_types: + node_def = self.TOSCA_DEF[node_type] + if isinstance(node_def, dict) and 'capabilities' in node_def: + node_caps = node_def['capabilities'] + for value in node_caps.values(): + if isinstance(value, dict) and \ + 'type' in value and value['type'] == cap: + return node_type + + def _get_relation(self, key, ndtype): + relation = None + ntype = NodeType(ndtype) + caps = ntype.get_capabilities() + if caps and key in caps.keys(): + c = caps[key] + for r in self.RELATIONSHIP_TYPE: + rtypedef = ntype.TOSCA_DEF[r] + for properties in rtypedef.values(): + if c.type in properties: + relation = r + break + if relation: + break + else: + for properties in rtypedef.values(): + if c.parent_type in properties: + relation = r + break + return relation + + def get_capabilities_objects(self): + '''Return a list of capability objects.''' + typecapabilities = [] + caps = self.get_value(self.CAPABILITIES, None, True) + if caps: + # 'name' is symbolic name of the capability + # 'value' is a dict { 'type': } + for name, value in caps.items(): + ctype = value.get('type') + cap = CapabilityTypeDef(name, ctype, self.type, + self.custom_def) + typecapabilities.append(cap) + return typecapabilities + + def get_capabilities(self): + '''Return a dictionary of capability name-objects pairs.''' + return {cap.name: cap + for cap in self.get_capabilities_objects()} + + @property + def requirements(self): + return self.get_value(self.REQUIREMENTS, None, True) + + def get_all_requirements(self): + return self.requirements + + @property + def interfaces(self): + return self.get_value(self.INTERFACES) + + @property + def lifecycle_inputs(self): + '''Return inputs to life cycle operations if found.''' + inputs = [] + interfaces = self.interfaces + if interfaces: + for name, value in interfaces.items(): + if name == ifaces.LIFECYCLE: + for x, y in value.items(): + if x == 'inputs': + for i in y.iterkeys(): + inputs.append(i) + return inputs + + @property + def lifecycle_operations(self): + '''Return available life cycle operations if found.''' + ops = None + interfaces = self.interfaces + if interfaces: + i = InterfacesDef(self.type, ifaces.LIFECYCLE) + ops = i.lifecycle_ops + return ops + + def get_capability(self, name): + caps = self.get_capabilities() + if caps and name in caps.keys(): + return caps[name].value + + def get_capability_type(self, name): + captype = self.get_capability(name) + if captype and name in captype.keys(): + return captype[name].value + + def _validate_keys(self): + if self.defs: + for key in self.defs.keys(): + if key not in self.SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Nodetype"%s"' % self.ntype, + field=key)) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java new file mode 100644 index 0000000..0a36a35 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java @@ -0,0 +1,291 @@ +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.onap.sdc.toscaparser.api.utils.TOSCAVersionProperty; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class PolicyType extends StatefulEntityType { + + private static final String DERIVED_FROM = "derived_from"; + private static final String METADATA = "metadata"; + private static final String PROPERTIES = "properties"; + private static final String VERSION = "version"; + private static final String DESCRIPTION = "description"; + private static final String TARGETS = "targets"; + private static final String TRIGGERS = "triggers"; + private static final String TYPE = "type"; + + private static final String SECTIONS[] = { + DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS, TRIGGERS, TYPE + }; + + private LinkedHashMap customDef; + private String policyDescription; + private Object policyVersion; + private LinkedHashMap properties; + private LinkedHashMap parentPolicies; + private LinkedHashMap metaData; + private ArrayList targetsList; + + + public PolicyType(String _type, LinkedHashMap _customDef) { + super(_type,POLICY_PREFIX,_customDef); + + type = _type; + customDef = _customDef; + _validateKeys(); + + metaData = null; + if(defs != null && defs.get(METADATA) != null) { + metaData = (LinkedHashMap)defs.get(METADATA); + _validateMetadata(metaData); + } + + properties = null; + if(defs != null && defs.get(PROPERTIES) != null) { + properties = (LinkedHashMap)defs.get(PROPERTIES); + } + parentPolicies = _getParentPolicies(); + + policyVersion = null; + if(defs != null && defs.get(VERSION) != null) { + policyVersion = (new TOSCAVersionProperty( + defs.get(VERSION))).getVersion(); + } + + policyDescription = null; + if(defs != null && defs.get(DESCRIPTION) != null) { + policyDescription = (String)defs.get(DESCRIPTION); + } + + targetsList = null; + if(defs != null && defs.get(TARGETS) != null) { + targetsList = (ArrayList)defs.get(TARGETS); + _validateTargets(targetsList,customDef); + } + + } + + private LinkedHashMap _getParentPolicies() { + LinkedHashMap policies = new LinkedHashMap<>(); + String parentPolicy; + if(getParentType() != null) { + parentPolicy = getParentType().getType(); + } + else { + parentPolicy = null; + } + if(parentPolicy != null) { + while(parentPolicy != null && !parentPolicy.equals("tosca.policies.Root")) { + policies.put(parentPolicy, TOSCA_DEF.get(parentPolicy)); + parentPolicy = (String) + ((LinkedHashMap)policies.get(parentPolicy)).get("derived_from);"); + } + } + return policies; + } + + public String getType() { + return type; + } + + public PolicyType getParentType() { + // Return a policy statefulentity of this node is derived from + if(defs == null) { + return null; + } + String ppolicyEntity = derivedFrom(defs); + if(ppolicyEntity != null) { + return new PolicyType(ppolicyEntity,customDef); + } + return null; + } + + public Object getPolicy(String name) { + // Return the definition of a policy field by name + if(defs != null && defs.get(name) != null) { + return defs.get(name); + } + return null; + } + + public ArrayList getTargets() { + // Return targets + return targetsList; + } + + public String getDescription() { + return policyDescription; + } + + public Object getVersion() { + return policyVersion; + } + + private void _validateKeys() { + for(String key: defs.keySet()) { + boolean bFound = false; + for(String sect: SECTIONS) { + if(key.equals(sect)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE125", String.format( + "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"", + type,key))); + } + } + } + + private void _validateTargets(ArrayList _targetsList, + LinkedHashMap _customDef) { + for(String nodetype: _targetsList) { + if(_customDef.get(nodetype) == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE126", String.format( + "InvalidTypeError: \"%s\" defined in targets for policy \"%s\"", + nodetype,type))); + + } + } + } + + private void _validateMetadata(LinkedHashMap _metaData) { + String mtype = (String)_metaData.get("type"); + if(mtype != null && !mtype.equals("map") && !mtype.equals("tosca:map")) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE127", String.format( + "InvalidTypeError: \"%s\" defined in policy for metadata", + mtype))); + } + for(String entrySchema: metaData.keySet()) { + Object estob = metaData.get(entrySchema); + if(estob instanceof LinkedHashMap) { + String est = (String) + ((LinkedHashMap)estob).get("type"); + if(!est.equals("string")) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE128", String.format( + "InvalidTypeError: \"%s\" defined in policy for metadata \"%s\"", + est,entrySchema))); + } + } + } + } + +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidTypeError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.statefulentitytype import StatefulEntityType +from toscaparser.utils.validateutils import TOSCAVersionProperty + + +class PolicyType(StatefulEntityType): + + '''TOSCA built-in policies type.''' + SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS) = \ + ('derived_from', 'metadata', 'properties', 'version', + 'description', 'targets') + + def __init__(self, ptype, custom_def=None): + super(PolicyType, self).__init__(ptype, self.POLICY_PREFIX, + custom_def) + self.type = ptype + self.custom_def = custom_def + self._validate_keys() + + self.meta_data = None + if self.METADATA in self.defs: + self.meta_data = self.defs[self.METADATA] + self._validate_metadata(self.meta_data) + + self.properties = None + if self.PROPERTIES in self.defs: + self.properties = self.defs[self.PROPERTIES] + self.parent_policies = self._get_parent_policies() + + self.policy_version = None + if self.VERSION in self.defs: + self.policy_version = TOSCAVersionProperty( + self.defs[self.VERSION]).get_version() + + self.policy_description = self.defs[self.DESCRIPTION] \ + if self.DESCRIPTION in self.defs else None + + self.targets_list = None + if self.TARGETS in self.defs: + self.targets_list = self.defs[self.TARGETS] + self._validate_targets(self.targets_list, custom_def) + + def _get_parent_policies(self): + policies = {} + parent_policy = self.parent_type.type if self.parent_type else None + if parent_policy: + while parent_policy != 'tosca.policies.Root': + policies[parent_policy] = self.TOSCA_DEF[parent_policy] + parent_policy = policies[parent_policy]['derived_from'] + return policies + + @property + def parent_type(self): + '''Return a policy statefulentity of this node is derived from.''' + if not hasattr(self, 'defs'): + return None + ppolicy_entity = self.derived_from(self.defs) + if ppolicy_entity: + return PolicyType(ppolicy_entity, self.custom_def) + + def get_policy(self, name): + '''Return the definition of a policy field by name.''' + if name in self.defs: + return self.defs[name] + + @property + def targets(self): + '''Return targets.''' + return self.targets_list + + @property + def description(self): + return self.policy_description + + @property + def version(self): + return self.policy_version + + def _validate_keys(self): + for key in self.defs.keys(): + if key not in self.SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Policy "%s"' % self.type, + field=key)) + + def _validate_targets(self, targets_list, custom_def): + for nodetype in targets_list: + if nodetype not in custom_def: + ValidationIssueCollector.appendException( + InvalidTypeError(what='"%s" defined in targets for ' + 'policy "%s"' % (nodetype, self.type))) + + def _validate_metadata(self, meta_data): + if not meta_data.get('type') in ['map', 'tosca:map']: + ValidationIssueCollector.appendException( + InvalidTypeError(what='"%s" defined in policy for ' + 'metadata' % (meta_data.get('type')))) + + for entry_schema, entry_schema_type in meta_data.items(): + if isinstance(entry_schema_type, dict) and not \ + entry_schema_type.get('type') == 'string': + ValidationIssueCollector.appendException( + InvalidTypeError(what='"%s" defined in policy for ' + 'metadata "%s"' + % (entry_schema_type.get('type'), + entry_schema))) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java new file mode 100644 index 0000000..8fb65df --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java @@ -0,0 +1,160 @@ +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.DataEntity; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.ValidateUtils; + +import java.util.LinkedHashMap; + +public class PortSpec { + // Parent class for tosca.datatypes.network.PortSpec type + + private static final String SHORTNAME = "PortSpec"; + private static final String TYPE_URI = "tosca.datatypes.network." + SHORTNAME; + + private static final String PROTOCOL = "protocol"; + private static final String SOURCE = "source"; + private static final String SOURCE_RANGE = "source_range"; + private static final String TARGET = "target"; + private static final String TARGET_RANGE = "target_range"; + + private static final String PROPERTY_NAMES[] = { + PROTOCOL, SOURCE, SOURCE_RANGE, + TARGET, TARGET_RANGE + }; + + // todo(TBD) May want to make this a subclass of DataType + // and change init method to set PortSpec's properties + public PortSpec() { + + } + + // The following additional requirements MUST be tested: + // 1) A valid PortSpec MUST have at least one of the following properties: + // target, target_range, source or source_range. + // 2) A valid PortSpec MUST have a value for the source property that + // is within the numeric range specified by the property source_range + // when source_range is specified. + // 3) A valid PortSpec MUST have a value for the target property that is + // within the numeric range specified by the property target_range + // when target_range is specified. + public static void validateAdditionalReq(Object _properties, + String propName, + LinkedHashMap custom_def) { + + try { + LinkedHashMap properties = (LinkedHashMap)_properties; + Object source = properties.get(PortSpec.SOURCE); + Object sourceRange = properties.get(PortSpec.SOURCE_RANGE); + Object target = properties.get(PortSpec.TARGET); + Object targetRange = properties.get(PortSpec.TARGET_RANGE); + + // verify one of the specified values is set + if(source == null && sourceRange == null && + target == null && targetRange == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE129", String.format( + "InvalidTypeAdditionalRequirementsError: Additional requirements for type \"%s\" not met", + TYPE_URI))); + } + // Validate source value is in specified range + if(source != null && sourceRange != null) { + ValidateUtils.validateValueInRange(source,sourceRange,SOURCE); + } + else { + DataEntity portdef = new DataEntity("PortDef", source, null, SOURCE); + portdef.validate(); + } + // Validate target value is in specified range + if(target != null && targetRange != null) { + ValidateUtils.validateValueInRange(target,targetRange,SOURCE); + } + else { + DataEntity portdef = new DataEntity("PortDef", source, null, TARGET); + portdef.validate(); + } + } + catch(Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE130", String.format( + "ValueError: \"%s\" do not meet requirements for type \"%s\"", + _properties.toString(),SHORTNAME))); + } + } + +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidTypeAdditionalRequirementsError +from toscaparser.utils.gettextutils import _ +import org.openecomp.sdc.toscaparser.api.utils.validateutils as validateutils + +log = logging.getLogger('tosca') + + +class PortSpec(object): + '''Parent class for tosca.datatypes.network.PortSpec type.''' + + SHORTNAME = 'PortSpec' + TYPE_URI = 'tosca.datatypes.network.' + SHORTNAME + + PROPERTY_NAMES = ( + PROTOCOL, SOURCE, SOURCE_RANGE, + TARGET, TARGET_RANGE + ) = ( + 'protocol', 'source', 'source_range', + 'target', 'target_range' + ) + + # TODO(TBD) May want to make this a subclass of DataType + # and change init method to set PortSpec's properties + def __init__(self): + pass + + # The following additional requirements MUST be tested: + # 1) A valid PortSpec MUST have at least one of the following properties: + # target, target_range, source or source_range. + # 2) A valid PortSpec MUST have a value for the source property that + # is within the numeric range specified by the property source_range + # when source_range is specified. + # 3) A valid PortSpec MUST have a value for the target property that is + # within the numeric range specified by the property target_range + # when target_range is specified. + @staticmethod + def validate_additional_req(properties, prop_name, custom_def=None, ): + try: + source = properties.get(PortSpec.SOURCE) + source_range = properties.get(PortSpec.SOURCE_RANGE) + target = properties.get(PortSpec.TARGET) + target_range = properties.get(PortSpec.TARGET_RANGE) + + # verify one of the specified values is set + if source is None and source_range is None and \ + target is None and target_range is None: + ValidationIssueCollector.appendException( + InvalidTypeAdditionalRequirementsError( + type=PortSpec.TYPE_URI)) + # Validate source value is in specified range + if source and source_range: + validateutils.validate_value_in_range(source, source_range, + PortSpec.SOURCE) + else: + from toscaparser.dataentity import DataEntity + portdef = DataEntity('PortDef', source, None, PortSpec.SOURCE) + portdef.validate() + # Validate target value is in specified range + if target and target_range: + validateutils.validate_value_in_range(target, target_range, + PortSpec.TARGET) + else: + from toscaparser.dataentity import DataEntity + portdef = DataEntity('PortDef', source, None, PortSpec.TARGET) + portdef.validate() + except Exception: + msg = _('"%(value)s" do not meet requirements ' + 'for type "%(type)s".') \ + % {'value': properties, 'type': PortSpec.SHORTNAME} + ValidationIssueCollector.appendException( + ValueError(msg)) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java new file mode 100644 index 0000000..e37603d --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java @@ -0,0 +1,231 @@ +package org.onap.sdc.toscaparser.api.elements; + +import java.util.LinkedHashMap; +import java.util.Map; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class PropertyDef { + + private static final String PROPERTY_KEYNAME_DEFAULT = "default"; + private static final String PROPERTY_KEYNAME_REQUIRED = "required"; + private static final String PROPERTY_KEYNAME_STATUS = "status"; + private static final String VALID_PROPERTY_KEYNAMES[] = { + PROPERTY_KEYNAME_DEFAULT, + PROPERTY_KEYNAME_REQUIRED, + PROPERTY_KEYNAME_STATUS}; + + private static final boolean PROPERTY_REQUIRED_DEFAULT = true; + + private static final String VALID_REQUIRED_VALUES[] = {"true", "false"}; + + private static final String PROPERTY_STATUS_SUPPORTED = "supported"; + private static final String PROPERTY_STATUS_EXPERIMENTAL = "experimental"; + private static final String VALID_STATUS_VALUES[] = { + PROPERTY_STATUS_SUPPORTED, PROPERTY_STATUS_EXPERIMENTAL}; + + private static final String PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED; + + private String name; + private Object value; + private LinkedHashMap schema; + private String _status; + private boolean _required; + + public PropertyDef(String pdName, Object pdValue, + LinkedHashMap pdSchema) { + name = pdName; + value = pdValue; + schema = pdSchema; + _status = PROPERTY_STATUS_DEFAULT; + _required = PROPERTY_REQUIRED_DEFAULT; + + if(schema != null) { + // Validate required 'type' property exists + if(schema.get("type") == null) { + //msg = (_('Schema definition of "%(pname)s" must have a "type" ' + // 'attribute.') % dict(pname=self.name)) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE131", String.format( + "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name))); + } + _loadRequiredAttrFromSchema(); + _loadStatusAttrFromSchema(); + } + } + + public Object getDefault() { + if(schema != null) { + for(Map.Entry me: schema.entrySet()) { + if(me.getKey().equals(PROPERTY_KEYNAME_DEFAULT)) { + return me.getValue(); + } + } + } + return null; + } + + public boolean isRequired() { + return _required; + } + + private void _loadRequiredAttrFromSchema() { + // IF 'required' keyname exists verify it's a boolean, + // if so override default + Object val = schema.get(PROPERTY_KEYNAME_REQUIRED); + if(val != null) { + if(val instanceof Boolean) { + _required = (boolean)val; + } + else { + //valid_values = ', '.join(self.VALID_REQUIRED_VALUES) + //attr = self.PROPERTY_KEYNAME_REQUIRED + //TOSCAException.generate_inv_schema_property_error(self, + // attr, + // value, + // valid_values) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE132", String.format( + "Schema definition of \"%s\" has \"required\" attribute with an invalid value", + name))); + } + } + } + + public String getStatus() { + return _status; + } + + private void _loadStatusAttrFromSchema() { + // IF 'status' keyname exists verify it's a boolean, + // if so override default + String sts = (String)schema.get(PROPERTY_KEYNAME_STATUS); + if(sts != null) { + boolean bFound = false; + for(String vsv: VALID_STATUS_VALUES) { + if(vsv.equals(sts)) { + bFound = true; + break; + } + } + if(bFound) { + _status = sts; + } + else { + //valid_values = ', '.join(self.VALID_STATUS_VALUES) + //attr = self.PROPERTY_KEYNAME_STATUS + //TOSCAException.generate_inv_schema_property_error(self, + // attr, + // value, + // valid_values) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE006", String.format( + "Schema definition of \"%s\" has \"status\" attribute with an invalid value", + name))); + } + } + } + + public String getName() { + return name; + } + + public LinkedHashMap getSchema() { + return schema; + } + + public Object getPDValue() { + // there's getValue in EntityType... + return value; + } + +} +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidSchemaError +from toscaparser.common.exception import TOSCAException +from toscaparser.utils.gettextutils import _ + + +class PropertyDef(object): + '''TOSCA built-in Property type.''' + + VALID_PROPERTY_KEYNAMES = (PROPERTY_KEYNAME_DEFAULT, + PROPERTY_KEYNAME_REQUIRED, + PROPERTY_KEYNAME_STATUS) = \ + ('default', 'required', 'status') + + PROPERTY_REQUIRED_DEFAULT = True + + VALID_REQUIRED_VALUES = ['true', 'false'] + VALID_STATUS_VALUES = (PROPERTY_STATUS_SUPPORTED, + PROPERTY_STATUS_EXPERIMENTAL) = \ + ('supported', 'experimental') + + PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED + + def __init__(self, name, value=None, schema=None): + self.name = name + self.value = value + self.schema = schema + self._status = self.PROPERTY_STATUS_DEFAULT + self._required = self.PROPERTY_REQUIRED_DEFAULT + + # Validate required 'type' property exists + try: + self.schema['type'] + except KeyError: + msg = (_('Schema definition of "%(pname)s" must have a "type" ' + 'attribute.') % dict(pname=self.name)) + ValidationIssueCollector.appendException( + InvalidSchemaError(message=msg)) + + if self.schema: + self._load_required_attr_from_schema() + self._load_status_attr_from_schema() + + @property + def default(self): + if self.schema: + for prop_key, prop_value in self.schema.items(): + if prop_key == self.PROPERTY_KEYNAME_DEFAULT: + return prop_value + return None + + @property + def required(self): + return self._required + + def _load_required_attr_from_schema(self): + # IF 'required' keyname exists verify it's a boolean, + # if so override default + if self.PROPERTY_KEYNAME_REQUIRED in self.schema: + value = self.schema[self.PROPERTY_KEYNAME_REQUIRED] + if isinstance(value, bool): + self._required = value + else: + valid_values = ', '.join(self.VALID_REQUIRED_VALUES) + attr = self.PROPERTY_KEYNAME_REQUIRED + TOSCAException.generate_inv_schema_property_error(self, + attr, + value, + valid_values) + + @property + def status(self): + return self._status + + def _load_status_attr_from_schema(self): + # IF 'status' keyname exists verify it's a valid value, + # if so override default + if self.PROPERTY_KEYNAME_STATUS in self.schema: + value = self.schema[self.PROPERTY_KEYNAME_STATUS] + if value in self.VALID_STATUS_VALUES: + self._status = value + else: + valid_values = ', '.join(self.VALID_STATUS_VALUES) + attr = self.PROPERTY_KEYNAME_STATUS + TOSCAException.generate_inv_schema_property_error(self, + attr, + value, + valid_values) +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java new file mode 100644 index 0000000..0197d54 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java @@ -0,0 +1,101 @@ +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.LinkedHashMap; + +public class RelationshipType extends StatefulEntityType { + + private static final String DERIVED_FROM = "derived_from"; + private static final String VALID_TARGET_TYPES = "valid_target_types"; + private static final String INTERFACES = "interfaces"; + private static final String ATTRIBUTES = "attributes"; + private static final String PROPERTIES = "properties"; + private static final String DESCRIPTION = "description"; + private static final String VERSION = "version"; + private static final String CREDENTIAL = "credential"; + + private static final String SECTIONS[] = { + DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES, + ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, CREDENTIAL}; + + private String capabilityName; + private LinkedHashMap customDef; + + public RelationshipType(String _type, String _capabilityName, LinkedHashMap _customDef) { + super(_type,RELATIONSHIP_PREFIX,_customDef); + capabilityName = _capabilityName; + customDef = _customDef; + } + + public RelationshipType getParentType() { + // Return a relationship this reletionship is derived from.''' + String prel = derivedFrom(defs); + if(prel != null) { + return new RelationshipType(prel,null,customDef); + } + return null; + } + + public Object getValidTargetTypes() { + return entityValue(defs,"valid_target_types"); + } + + private void _validateKeys() { + for(String key: defs.keySet()) { + boolean bFound = false; + for(int i=0; i< SECTIONS.length; i++) { + if(key.equals(SECTIONS[i])) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE133", String.format( + "UnknownFieldError: Relationshiptype \"%s\" has unknown field \"%s\"",type,key))); + } + } + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.statefulentitytype import StatefulEntityType + + +class RelationshipType(StatefulEntityType): + '''TOSCA built-in relationship type.''' + SECTIONS = (DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES, + ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, + CREDENTIAL) = ('derived_from', 'valid_target_types', + 'interfaces', 'attributes', 'properties', + 'description', 'version', 'credential') + + def __init__(self, type, capability_name=None, custom_def=None): + super(RelationshipType, self).__init__(type, self.RELATIONSHIP_PREFIX, + custom_def) + self.capability_name = capability_name + self.custom_def = custom_def + self._validate_keys() + + @property + def parent_type(self): + '''Return a relationship this reletionship is derived from.''' + prel = self.derived_from(self.defs) + if prel: + return RelationshipType(prel, self.custom_def) + + @property + def valid_target_types(self): + return self.entity_value(self.defs, 'valid_target_types') + + def _validate_keys(self): + for key in self.defs.keys(): + if key not in self.SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Relationshiptype "%s"' % self.type, + field=key)) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java new file mode 100644 index 0000000..f7f2a8a --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java @@ -0,0 +1,262 @@ +package org.onap.sdc.toscaparser.api.elements; + +import java.util.HashMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.ValidateUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public abstract class ScalarUnit { + + private static Logger log = LoggerFactory.getLogger(ScalarUnit.class.getName()); + + private static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; + private static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; + private static final String SCALAR_UNIT_TIME = "scalar-unit.time"; + + public static final String SCALAR_UNIT_TYPES[] = { + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME + }; + + private Object value; + protected HashMap SCALAR_UNIT_DICT; + protected String SCALAR_UNIT_DEFAULT; + + public ScalarUnit(Object _value) { + value = _value; + SCALAR_UNIT_DICT = new HashMap<>(); + SCALAR_UNIT_DEFAULT = ""; + } + + + private String _checkUnitInScalarStandardUnits(String inputUnit) { + // Check whether the input unit is following specified standard + + // If unit is not following specified standard, convert it to standard + // unit after displaying a warning message. + + if(SCALAR_UNIT_DICT.get(inputUnit) != null) { + return inputUnit; + } + else { + for(String key: SCALAR_UNIT_DICT.keySet()) { + if(key.toUpperCase().equals(inputUnit.toUpperCase())) { + log.debug("ScalarUnit - _checkUnitInScalarStandardUnits - \n" + + "The unit {} does not follow scalar unit standards\n" + + "using {} instead", + inputUnit, key); + return key; + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE007", String.format( + "'The unit \"%s\" is not valid. Valid units are \n%s", + inputUnit,SCALAR_UNIT_DICT.keySet().toString()))); + return inputUnit; + } + } + + public Object validateScalarUnit() { + Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); + Matcher matcher = pattern.matcher(value.toString()); + if(matcher.find()) { + ValidateUtils.strToNum(matcher.group(1)); + String scalarUnit = _checkUnitInScalarStandardUnits(matcher.group(2)); + value = matcher.group(1) + " " + scalarUnit; + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE134", String.format( + "ValueError: \"%s\" is not a valid scalar-unit",value.toString()))); + } + return value; + } + + public double getNumFromScalarUnit(String unit) { + if(unit != null) { + unit = _checkUnitInScalarStandardUnits(unit); + } + else { + unit = SCALAR_UNIT_DEFAULT; + } + Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); + Matcher matcher = pattern.matcher(value.toString()); + if(matcher.find()) { + ValidateUtils.strToNum(matcher.group(1)); + String scalarUnit = _checkUnitInScalarStandardUnits(matcher.group(2)); + value = matcher.group(1) + " " + scalarUnit; + Object on1 = ValidateUtils.strToNum(matcher.group(1)) != null ? ValidateUtils.strToNum(matcher.group(1)) : 0; + Object on2 = SCALAR_UNIT_DICT.get(matcher.group(2)) != null ? SCALAR_UNIT_DICT.get(matcher.group(2)) : 0; + Object on3 = SCALAR_UNIT_DICT.get(unit) != null ? SCALAR_UNIT_DICT.get(unit) : 0; + + Double n1 = new Double(on1.toString()); + Double n2 = new Double(on2.toString()); + Double n3 = new Double(on3.toString()); + double converted = n1 * n2 / n3; + if(Math.abs(converted - Math.round(converted)) < 0.0000000000001 ) { + converted = Math.round(converted); + } + return converted; + } + return 0l; //??? + } + + protected static HashMap scalarunitMapping = _getScalarunitMappings(); + + private static HashMap _getScalarunitMappings() { + HashMap map = new HashMap<>(); + map.put(SCALAR_UNIT_FREQUENCY,"ScalarUnitFrequency"); + map.put(SCALAR_UNIT_SIZE, "ScalarUnitSize"); + map.put(SCALAR_UNIT_TIME, "ScalarUnit_Time"); + return map; + } + + public static ScalarUnit getScalarunitClass(String type,Object val) { + if(type.equals(SCALAR_UNIT_SIZE)) { + return new ScalarUnitSize(val); + } + else if(type.equals(SCALAR_UNIT_TIME)) { + return new ScalarUnitTime(val); + } + else if(type.equals(SCALAR_UNIT_FREQUENCY)) { + return new ScalarUnitFrequency(val); + } + return null; + } + + public static double getScalarunitValue(String type, Object value, String unit) { + if(type.equals(SCALAR_UNIT_SIZE)) { + return (new ScalarUnitSize(value)).getNumFromScalarUnit(unit); + } + if(type.equals(SCALAR_UNIT_TIME)) { + return (new ScalarUnitTime(value)).getNumFromScalarUnit(unit); + } + if(type.equals(SCALAR_UNIT_FREQUENCY)) { + return (new ScalarUnitFrequency(value)).getNumFromScalarUnit(unit); + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE135", String.format( + "TypeError: \"%s\" is not a valid scalar-unit type",type))); + return 0.0; + } + +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.utils.gettextutils import _ +from toscaparser.utils import validateutils + +log = logging.getLogger('tosca') + + +class ScalarUnit(object): + '''Parent class for scalar-unit type.''' + + SCALAR_UNIT_TYPES = ( + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME + ) = ( + 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time' + ) + + def __init__(self, value): + self.value = value + + def _check_unit_in_scalar_standard_units(self, input_unit): + """Check whether the input unit is following specified standard + + If unit is not following specified standard, convert it to standard + unit after displaying a warning message. + """ + if input_unit in self.SCALAR_UNIT_DICT.keys(): + return input_unit + else: + for key in self.SCALAR_UNIT_DICT.keys(): + if key.upper() == input_unit.upper(): + log.warning(_('The unit "%(unit)s" does not follow ' + 'scalar unit standards; using "%(key)s" ' + 'instead.') % {'unit': input_unit, + 'key': key}) + return key + msg = (_('The unit "%(unit)s" is not valid. Valid units are ' + '"%(valid_units)s".') % + {'unit': input_unit, + 'valid_units': sorted(self.SCALAR_UNIT_DICT.keys())}) + ValidationIssueCollector.appendException(ValueError(msg)) + + def validate_scalar_unit(self): + regex = re.compile('([0-9.]+)\s*(\w+)') + try: + result = regex.match(str(self.value)).groups() + validateutils.str_to_num(result[0]) + scalar_unit = self._check_unit_in_scalar_standard_units(result[1]) + self.value = ' '.join([result[0], scalar_unit]) + return self.value + + except Exception: + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a valid scalar-unit.') + % self.value)) + + def get_num_from_scalar_unit(self, unit=None): + if unit: + unit = self._check_unit_in_scalar_standard_units(unit) + else: + unit = self.SCALAR_UNIT_DEFAULT + self.validate_scalar_unit() + + regex = re.compile('([0-9.]+)\s*(\w+)') + result = regex.match(str(self.value)).groups() + converted = (float(validateutils.str_to_num(result[0])) + * self.SCALAR_UNIT_DICT[result[1]] + / self.SCALAR_UNIT_DICT[unit]) + if converted - int(converted) < 0.0000000000001: + converted = int(converted) + return converted + + +class ScalarUnit_Size(ScalarUnit): + + SCALAR_UNIT_DEFAULT = 'B' + SCALAR_UNIT_DICT = {'B': 1, 'kB': 1000, 'KiB': 1024, 'MB': 1000000, + 'MiB': 1048576, 'GB': 1000000000, + 'GiB': 1073741824, 'TB': 1000000000000, + 'TiB': 1099511627776} + + +class ScalarUnit_Time(ScalarUnit): + + SCALAR_UNIT_DEFAULT = 'ms' + SCALAR_UNIT_DICT = {'d': 86400, 'h': 3600, 'm': 60, 's': 1, + 'ms': 0.001, 'us': 0.000001, 'ns': 0.000000001} + + +class ScalarUnit_Frequency(ScalarUnit): + + SCALAR_UNIT_DEFAULT = 'GHz' + SCALAR_UNIT_DICT = {'Hz': 1, 'kHz': 1000, + 'MHz': 1000000, 'GHz': 1000000000} + + +scalarunit_mapping = { + ScalarUnit.SCALAR_UNIT_FREQUENCY: ScalarUnit_Frequency, + ScalarUnit.SCALAR_UNIT_SIZE: ScalarUnit_Size, + ScalarUnit.SCALAR_UNIT_TIME: ScalarUnit_Time, + } + + +def get_scalarunit_class(type): + return scalarunit_mapping.get(type) + + +def get_scalarunit_value(type, value, unit=None): + if type in ScalarUnit.SCALAR_UNIT_TYPES: + ScalarUnit_Class = get_scalarunit_class(type) + return (ScalarUnit_Class(value). + get_num_from_scalar_unit(unit)) + else: + ValidationIssueCollector.appendException( + TypeError(_('"%s" is not a valid scalar-unit type.') % type)) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java new file mode 100644 index 0000000..6c05c43 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java @@ -0,0 +1,14 @@ +package org.onap.sdc.toscaparser.api.elements; + +public class ScalarUnitFrequency extends ScalarUnit { + + public ScalarUnitFrequency(Object value) { + super(value); + SCALAR_UNIT_DEFAULT = "GHz"; + SCALAR_UNIT_DICT.put("Hz",1L); + SCALAR_UNIT_DICT.put("kHz",1000L); + SCALAR_UNIT_DICT.put("MHz",1000000L); + SCALAR_UNIT_DICT.put("GHz",1000000000L); + } + +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java new file mode 100644 index 0000000..c788c32 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java @@ -0,0 +1,19 @@ +package org.onap.sdc.toscaparser.api.elements; + +public class ScalarUnitSize extends ScalarUnit { + + public ScalarUnitSize(Object value) { + super(value); + + SCALAR_UNIT_DEFAULT = "B"; + SCALAR_UNIT_DICT.put("B",1L); + SCALAR_UNIT_DICT.put("kB",1000L); + SCALAR_UNIT_DICT.put("kiB",1024L); + SCALAR_UNIT_DICT.put("MB",1000000L); + SCALAR_UNIT_DICT.put("MiB",1048576L); + SCALAR_UNIT_DICT.put("GB",1000000000L); + SCALAR_UNIT_DICT.put("GiB",1073741824L); + SCALAR_UNIT_DICT.put("TB",1000000000000L); + SCALAR_UNIT_DICT.put("TiB",1099511627776L); + } +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java new file mode 100644 index 0000000..274fbf0 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java @@ -0,0 +1,17 @@ +package org.onap.sdc.toscaparser.api.elements; + +public class ScalarUnitTime extends ScalarUnit { + + public ScalarUnitTime(Object value) { + super(value); + SCALAR_UNIT_DEFAULT = "ms"; + SCALAR_UNIT_DICT.put("d",86400L); + SCALAR_UNIT_DICT.put("h",3600L); + SCALAR_UNIT_DICT.put("m",60L); + SCALAR_UNIT_DICT.put("s",1L); + SCALAR_UNIT_DICT.put("ms",0.001); + SCALAR_UNIT_DICT.put("us",0.000001); + SCALAR_UNIT_DICT.put("ns",0.000000001); + } + +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java new file mode 100644 index 0000000..b9ce6c8 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java @@ -0,0 +1,218 @@ +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.onap.sdc.toscaparser.api.UnsupportedType; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + + +public class StatefulEntityType extends EntityType { + // Class representing TOSCA states + + public static final String interfacesNodeLifecycleOperations[] = { + "create", "configure", "start", "stop", "delete"}; + + public static final String interfacesRelationshipConfigureOperations[] = { + "post_configure_source", "post_configure_target", "add_target", "remove_target"}; + + public StatefulEntityType() { + // void constructor for subclasses that don't want super + } + + @SuppressWarnings("unchecked") + public StatefulEntityType(String entityType, String prefix, LinkedHashMap customDef) { + + String entireEntityType = entityType; + if(UnsupportedType.validateType(entireEntityType)) { + defs = null; + } + else { + if(entityType.startsWith(TOSCA + ":")) { + entityType = entityType.substring(TOSCA.length()+1); + entireEntityType = prefix + entityType; + } + if(!entityType.startsWith(TOSCA)) { + entireEntityType = prefix + entityType; + } + if(TOSCA_DEF.get(entireEntityType) != null) { + defs = (LinkedHashMap )TOSCA_DEF.get(entireEntityType); + entityType = entireEntityType; + } + else if(customDef != null && customDef.get(entityType) != null) { + defs = (LinkedHashMap )customDef.get(entityType); + } + else{ + defs = null; + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE136", String.format( + "InvalidTypeError: \"%s\" is not a valid type",entityType))); + } + } + type = entityType; + } + + @SuppressWarnings("unchecked") + public ArrayList getPropertiesDefObjects() { + // Return a list of property definition objects + ArrayList properties = new ArrayList(); + LinkedHashMap props = (LinkedHashMap)getDefinition(PROPERTIES); + if(props != null) { + for(Map.Entry me: props.entrySet()) { + String pdname = me.getKey(); + Object to = me.getValue(); + if(to == null || !(to instanceof LinkedHashMap)) { + String s = to == null ? "null" : to.getClass().getSimpleName(); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE137", String.format( + "Unexpected type error: property \"%s\" has type \"%s\" (expected dict)",pdname,s))); + continue; + } + LinkedHashMap pdschema = (LinkedHashMap)to; + properties.add(new PropertyDef(pdname,null,pdschema)); + } + } + return properties; + } + + public LinkedHashMap getPropertiesDef() { + LinkedHashMap pds = new LinkedHashMap(); + for(PropertyDef pd: getPropertiesDefObjects()) { + pds.put(pd.getName(),pd); + } + return pds; + } + + public PropertyDef getPropertyDefValue(String name) { + // Return the property definition associated with a given name + PropertyDef pd = null; + LinkedHashMap propsDef = getPropertiesDef(); + if(propsDef != null) { + pd = propsDef.get(name); + } + return pd; + } + + public ArrayList getAttributesDefObjects() { + // Return a list of attribute definition objects + @SuppressWarnings("unchecked") + LinkedHashMap attrs = (LinkedHashMap)getValue(ATTRIBUTES,null,true); + ArrayList ads = new ArrayList<>(); + if(attrs != null) { + for(Map.Entry me: attrs.entrySet()) { + String attr = me.getKey(); + @SuppressWarnings("unchecked") + LinkedHashMap adschema = (LinkedHashMap)me.getValue(); + ads.add(new AttributeDef(attr,null,adschema)); + } + } + return ads; + } + + public LinkedHashMap getAttributesDef() { + // Return a dictionary of attribute definition name-object pairs + + LinkedHashMap ads = new LinkedHashMap<>(); + for(AttributeDef ado: getAttributesDefObjects()) { + ads.put(((AttributeDef)ado).getName(),ado); + } + return ads; + } + + public AttributeDef getAttributeDefValue(String name) { + // Return the attribute definition associated with a given name + AttributeDef ad = null; + LinkedHashMap attrsDef = getAttributesDef(); + if(attrsDef != null) { + ad = attrsDef.get(name); + } + return ad; + } + + public String getType() { + return type; + } + } + +/*python + +from toscaparser.common.exception import InvalidTypeError +from toscaparser.elements.attribute_definition import AttributeDef +from toscaparser.elements.entity_type import EntityType +from toscaparser.elements.property_definition import PropertyDef +from toscaparser.unsupportedtype import UnsupportedType + + +class StatefulEntityType(EntityType): + '''Class representing TOSCA states.''' + + interfaces_node_lifecycle_operations = ['create', + 'configure', 'start', + 'stop', 'delete'] + + interfaces_relationship_configure_operations = ['post_configure_source', + 'post_configure_target', + 'add_target', + 'remove_target'] + + def __init__(self, entitytype, prefix, custom_def=None): + entire_entitytype = entitytype + if UnsupportedType.validate_type(entire_entitytype): + self.defs = None + else: + if entitytype.startswith(self.TOSCA + ":"): + entitytype = entitytype[(len(self.TOSCA) + 1):] + entire_entitytype = prefix + entitytype + if not entitytype.startswith(self.TOSCA): + entire_entitytype = prefix + entitytype + if entire_entitytype in list(self.TOSCA_DEF.keys()): + self.defs = self.TOSCA_DEF[entire_entitytype] + entitytype = entire_entitytype + elif custom_def and entitytype in list(custom_def.keys()): + self.defs = custom_def[entitytype] + else: + self.defs = None + ValidationIssueCollector.appendException( + InvalidTypeError(what=entitytype)) + self.type = entitytype + + def get_properties_def_objects(self): + '''Return a list of property definition objects.''' + properties = [] + props = self.get_definition(self.PROPERTIES) + if props: + for prop, schema in props.items(): + properties.append(PropertyDef(prop, None, schema)) + return properties + + def get_properties_def(self): + '''Return a dictionary of property definition name-object pairs.''' + return {prop.name: prop + for prop in self.get_properties_def_objects()} + + def get_property_def_value(self, name): + '''Return the property definition associated with a given name.''' + props_def = self.get_properties_def() + if props_def and name in props_def.keys(): + return props_def[name].value + + def get_attributes_def_objects(self): + '''Return a list of attribute definition objects.''' + attrs = self.get_value(self.ATTRIBUTES, parent=True) + if attrs: + return [AttributeDef(attr, None, schema) + for attr, schema in attrs.items()] + return [] + + def get_attributes_def(self): + '''Return a dictionary of attribute definition name-object pairs.''' + return {attr.name: attr + for attr in self.get_attributes_def_objects()} + + def get_attribute_def_value(self, name): + '''Return the attribute definition associated with a given name.''' + attrs_def = self.get_attributes_def() + if attrs_def and name in attrs_def.keys(): + return attrs_def[name].value +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java new file mode 100644 index 0000000..3376c69 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java @@ -0,0 +1,153 @@ +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.onap.sdc.toscaparser.api.extensions.ExtTools; + +public class TypeValidation { + + private static final String DEFINITION_VERSION = "tosca_definitions_version"; + private static final String DESCRIPTION = "description"; + private static final String IMPORTS = "imports"; + private static final String DSL_DEFINITIONS = "dsl_definitions"; + private static final String NODE_TYPES = "node_types"; + private static final String REPOSITORIES = "repositories"; + private static final String DATA_TYPES = "data_types"; + private static final String ARTIFACT_TYPES = "artifact_types"; + private static final String GROUP_TYPES = "group_types"; + private static final String RELATIONSHIP_TYPES = "relationship_types"; + private static final String CAPABILITY_TYPES = "capability_types"; + private static final String INTERFACE_TYPES = "interface_types"; + private static final String POLICY_TYPES = "policy_types"; + private static final String TOPOLOGY_TEMPLATE = "topology_template"; + //Pavel + private static final String METADATA = "metadata"; + + private String ALLOWED_TYPE_SECTIONS[] = { + DEFINITION_VERSION, DESCRIPTION, IMPORTS, + DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES, + DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES, + RELATIONSHIP_TYPES, CAPABILITY_TYPES, + INTERFACE_TYPES, POLICY_TYPES, + TOPOLOGY_TEMPLATE, METADATA + }; + + private static ArrayList VALID_TEMPLATE_VERSIONS = _getVTV(); + + private static ArrayList _getVTV() { + ArrayList vtv = new ArrayList<>(); + vtv.add("tosca_simple_yaml_1_0"); + vtv.add("tosca_simple_yaml_1_1"); + ExtTools exttools = new ExtTools(); + vtv.addAll(exttools.getVersions()); + return vtv; + } + + //private LinkedHashMap customTypes; + private Object importDef; + //private String version; + + public TypeValidation(LinkedHashMap _customTypes, + Object _importDef) { + importDef = _importDef; + _validateTypeKeys(_customTypes); + } + + private void _validateTypeKeys(LinkedHashMap customTypes) { + + String sVersion = (String)customTypes.get(DEFINITION_VERSION); + if(sVersion != null) { + _validateTypeVersion(sVersion); + //version = sVersion; + } + for(String name: customTypes.keySet()) { + boolean bFound = false; + for(String ats: ALLOWED_TYPE_SECTIONS) { + if(name.equals(ats)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE138", String.format( + "UnknownFieldError: Template \"%s\" contains unknown field \"%s\"", + importDef.toString(),name))); + } + } + } + + private void _validateTypeVersion(String sVersion) { + boolean bFound = false; + String allowed = ""; + for(String atv: VALID_TEMPLATE_VERSIONS) { + allowed += "\"" + atv + "\" "; + if(sVersion.equals(atv)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE139", String.format( + "InvalidTemplateVersion: version \"%s\" in \"%s\" is not supported\n" + + "Allowed versions: [%s]", + sVersion,importDef.toString(),allowed))); + } + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidTemplateVersion +from toscaparser.common.exception import UnknownFieldError +from toscaparser.extensions.exttools import ExtTools + + +class TypeValidation(object): + + ALLOWED_TYPE_SECTIONS = (DEFINITION_VERSION, DESCRIPTION, IMPORTS, + DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES, + DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES, + RELATIONSHIP_TYPES, CAPABILITY_TYPES, + INTERFACE_TYPES, POLICY_TYPES, + TOPOLOGY_TEMPLATE) = \ + ('tosca_definitions_version', 'description', 'imports', + 'dsl_definitions', 'node_types', 'repositories', + 'data_types', 'artifact_types', 'group_types', + 'relationship_types', 'capability_types', + 'interface_types', 'policy_types', 'topology_template') + VALID_TEMPLATE_VERSIONS = ['tosca_simple_yaml_1_0'] + exttools = ExtTools() + VALID_TEMPLATE_VERSIONS.extend(exttools.get_versions()) + + def __init__(self, custom_types, import_def): + self.import_def = import_def + self._validate_type_keys(custom_types) + + def _validate_type_keys(self, custom_type): + version = custom_type[self.DEFINITION_VERSION] \ + if self.DEFINITION_VERSION in custom_type \ + else None + if version: + self._validate_type_version(version) + self.version = version + + for name in custom_type: + if name not in self.ALLOWED_TYPE_SECTIONS: + ValidationIssueCollector.appendException( +# UnknownFieldError(what='Template ' + (self.import_def), + UnknownFieldError(what= (self.import_def), + field=name)) + + def _validate_type_version(self, version): + if version not in self.VALID_TEMPLATE_VERSIONS: + ValidationIssueCollector.appendException( + InvalidTemplateVersion( +# what=version + ' in ' + self.import_def, + what=self.import_def, + valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS))) +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java new file mode 100644 index 0000000..fb183f8 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java @@ -0,0 +1,243 @@ +package org.onap.sdc.toscaparser.api.elements.constraints; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.ScalarUnit; +import org.onap.sdc.toscaparser.api.functions.Function; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public abstract class Constraint { + + // Parent class for constraints for a Property or Input + + protected static final String EQUAL = "equal"; + protected static final String GREATER_THAN = "greater_than"; + protected static final String GREATER_OR_EQUAL = "greater_or_equal"; + protected static final String LESS_THAN = "less_than"; + protected static final String LESS_OR_EQUAL = "less_or_equal"; + protected static final String IN_RANGE = "in_range"; + protected static final String VALID_VALUES = "valid_values"; + protected static final String LENGTH = "length"; + protected static final String MIN_LENGTH = "min_length"; + protected static final String MAX_LENGTH = "max_length"; + protected static final String PATTERN = "pattern"; + + protected static final String CONSTRAINTS[] = { + EQUAL, GREATER_THAN,GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, + IN_RANGE, VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN}; + + @SuppressWarnings("unchecked") + public static Constraint factory(String constraintClass,String propname,String proptype,Object constraint) { + + // a factory for the different Constraint classes + // replaces Python's __new__() usage + + if(!(constraint instanceof LinkedHashMap) || + ((LinkedHashMap)constraint).size() != 1) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE101", + "InvalidSchemaError: Invalid constraint schema " + constraint.toString())); + } + + if(constraintClass.equals(EQUAL)) { + return new Equal(propname,proptype,constraint); + } + else if(constraintClass.equals(GREATER_THAN)) { + return new GreaterThan(propname,proptype,constraint); + } + else if(constraintClass.equals(GREATER_OR_EQUAL)) { + return new GreaterOrEqual(propname,proptype,constraint); + } + else if(constraintClass.equals(LESS_THAN)) { + return new LessThan(propname,proptype,constraint); + } + else if(constraintClass.equals(LESS_OR_EQUAL)) { + return new LessOrEqual(propname,proptype,constraint); + } + else if(constraintClass.equals(IN_RANGE)) { + return new InRange(propname,proptype,constraint); + } + else if(constraintClass.equals(VALID_VALUES)) { + return new ValidValues(propname,proptype,constraint); + } + else if(constraintClass.equals(LENGTH)) { + return new Length(propname,proptype,constraint); + } + else if(constraintClass.equals(MIN_LENGTH)) { + return new MinLength(propname,proptype,constraint); + } + else if(constraintClass.equals(MAX_LENGTH)) { + return new MaxLength(propname,proptype,constraint); + } + else if(constraintClass.equals(PATTERN)) { + return new Pattern(propname,proptype,constraint); + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE102", String.format( + "InvalidSchemaError: Invalid property \"%s\"",constraintClass))); + return null; + } + } + + protected String constraintKey = "TBD"; + protected ArrayList validTypes = new ArrayList<>(); + protected ArrayList validPropTypes = new ArrayList<>(); + + protected String propertyName; + protected String propertyType; + protected Object constraintValue; + protected Object constraintValueMsg; + protected Object valueMsg; + + @SuppressWarnings("unchecked") + public Constraint(String propname,String proptype,Object constraint) { + + _setValues(); + + propertyName = propname; + propertyType = proptype; + constraintValue = ((LinkedHashMap)constraint).get(constraintKey); + constraintValueMsg = constraintValue; + boolean bFound = false; + for(String s: ScalarUnit.SCALAR_UNIT_TYPES) { + if(s.equals(propertyType)) { + bFound = true; + break; + } + } + if(bFound) { + constraintValue = _getScalarUnitConstraintValue(); + } + // check if constraint is valid for property type + bFound = false; + for(String s: validPropTypes) { + if(s.equals(propertyType)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE103", String.format( + "InvalidSchemaError: Property \"%s\" is not valid for data type \"%s\"", + constraintKey,propertyType))); + } + } + + @SuppressWarnings("unchecked") + private Object _getScalarUnitConstraintValue() { + // code differs from Python because of class creation + if(constraintValue instanceof ArrayList) { + ArrayList ret = new ArrayList<>(); + for(Object v: (ArrayList)constraintValue) { + ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType,v); + ret.add(su.getNumFromScalarUnit(null)); + } + return ret; + } + else { + ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType,constraintValue); + return su.getNumFromScalarUnit(null); + } + } + + public void validate(Object value) { + if (Function.isFunction(value)){ + //skipping constraints check for functions + return; + } + + valueMsg = value; + boolean bFound = false; + for(String s: ScalarUnit.SCALAR_UNIT_TYPES) { + if(s.equals(propertyType)) { + bFound = true; + break; + } + } + if(bFound) { + value = ScalarUnit.getScalarunitValue(propertyType,value,null); + } + if(!_isValid(value)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE008", "ValidationError: " + _errMsg(value))); + } + } + + protected abstract boolean _isValid(Object value); + + protected abstract void _setValues(); + + protected abstract String _errMsg(Object value); + +} + +/*python + +class Constraint(object): + '''Parent class for constraints for a Property or Input.''' + + CONSTRAINTS = (EQUAL, GREATER_THAN, + GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, IN_RANGE, + VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN) = \ + ('equal', 'greater_than', 'greater_or_equal', 'less_than', + 'less_or_equal', 'in_range', 'valid_values', 'length', + 'min_length', 'max_length', 'pattern') + + def __new__(cls, property_name, property_type, constraint): + if cls is not Constraint: + return super(Constraint, cls).__new__(cls) + + if(not isinstance(constraint, collections.Mapping) or + len(constraint) != 1): + ValidationIssueCollector.appendException( + InvalidSchemaError(message=_('Invalid constraint schema.'))) + + for type in constraint.keys(): + ConstraintClass = get_constraint_class(type) + if not ConstraintClass: + msg = _('Invalid property "%s".') % type + ValidationIssueCollector.appendException( + InvalidSchemaError(message=msg)) + + return ConstraintClass(property_name, property_type, constraint) + + def __init__(self, property_name, property_type, constraint): + self.property_name = property_name + self.property_type = property_type + self.constraint_value = constraint[self.constraint_key] + self.constraint_value_msg = self.constraint_value + if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: + self.constraint_value = self._get_scalarunit_constraint_value() + # check if constraint is valid for property type + if property_type not in self.valid_prop_types: + msg = _('Property "%(ctype)s" is not valid for data type ' + '"%(dtype)s".') % dict( + ctype=self.constraint_key, + dtype=property_type) + ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) + + def _get_scalarunit_constraint_value(self): + if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: + ScalarUnit_Class = (scalarunit. + get_scalarunit_class(self.property_type)) + if isinstance(self.constraint_value, list): + return [ScalarUnit_Class(v).get_num_from_scalar_unit() + for v in self.constraint_value] + else: + return (ScalarUnit_Class(self.constraint_value). + get_num_from_scalar_unit()) + + def _err_msg(self, value): + return _('Property "%s" could not be validated.') % self.property_name + + def validate(self, value): + self.value_msg = value + if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: + value = scalarunit.get_scalarunit_value(self.property_type, value) + if not self._is_valid(value): + err_msg = self._err_msg(value) + ValidationIssueCollector.appendException( + ValidationError(message=err_msg)) + + +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java new file mode 100644 index 0000000..c9a66d9 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java @@ -0,0 +1,61 @@ +package org.onap.sdc.toscaparser.api.elements.constraints; + +public class Equal extends Constraint { + + protected void _setValues() { + + constraintKey = EQUAL; + + for(String s: Schema.PROPERTY_TYPES) { + validPropTypes.add(s); + } + + } + + public Equal(String name,String type,Object c) { + super(name,type,c); + + } + + protected boolean _isValid(Object val) { + // equality of objects is tricky so we're comparing + // the toString() representation + if(val.toString().equals(constraintValue.toString())) { + return true; + } + return false; + } + + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" is not equal to \"%s\"", + valueMsg,propertyName,constraintValueMsg); + } + +} + +/*python + +class Equal(Constraint): +"""Constraint class for "equal" + +Constrains a property or parameter to a value equal to ('=') +the value declared. +""" + +constraint_key = Constraint.EQUAL + +valid_prop_types = Schema.PROPERTY_TYPES + +def _is_valid(self, value): + if value == self.constraint_value: + return True + + return False + +def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" is not ' + 'equal to "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + cvalue=self.constraint_value_msg)) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java new file mode 100644 index 0000000..f9275a5 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java @@ -0,0 +1,114 @@ +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.Date; + +import org.onap.sdc.toscaparser.api.functions.Function; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class GreaterOrEqual extends Constraint { + // Constraint class for "greater_or_equal" + + // Constrains a property or parameter to a value greater than or equal + // to ('>=') the value declared. + + protected void _setValues() { + + constraintKey = GREATER_OR_EQUAL; + + validTypes.add("Integer"); + validTypes.add("Double"); + validTypes.add("Float"); + // timestamps are loaded as Date objects + validTypes.add("Date"); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public GreaterOrEqual(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE104", "InvalidSchemaError: The property \"greater_or_equal\" expects comparable values")); + } + } + + + + @Override + protected boolean _isValid(Object value) { + if(Function.isFunction(value)) { + return true; + } + + // timestamps + if(value instanceof Date) { + if(constraintValue instanceof Date) { + return !((Date)value).before((Date)constraintValue); + } + return false; + } + // all others + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 >= n2; + } + + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be greater or equal to \"%s\"", + valueMsg,propertyName,constraintValueMsg); + } +} + +/*python + +class GreaterOrEqual(Constraint): +"""Constraint class for "greater_or_equal" + +Constrains a property or parameter to a value greater than or equal +to ('>=') the value declared. +""" + +constraint_key = Constraint.GREATER_OR_EQUAL + +valid_types = (int, float, datetime.date, + datetime.time, datetime.datetime) + +valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, + Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, + Schema.SCALAR_UNIT_TIME) + +def __init__(self, property_name, property_type, constraint): + super(GreaterOrEqual, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, self.valid_types): + ThreadLocalsHolder.getCollector().appendException( + InvalidSchemaError(message=_('The property ' + '"greater_or_equal" expects ' + 'comparable values.'))) + +def _is_valid(self, value): + if toscaparser.functions.is_function(value) or \ + value >= self.constraint_value: + return True + return False + +def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' + 'greater than or equal to "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + cvalue=self.constraint_value_msg)) + + +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java new file mode 100644 index 0000000..1ffe3f1 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java @@ -0,0 +1,103 @@ +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.Date; + +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class GreaterThan extends Constraint { + + @Override + protected void _setValues() { + + constraintKey = GREATER_THAN; + + validTypes.add("Integer"); + validTypes.add("Double"); + validTypes.add("Float"); + // timestamps are loaded as Date objects + validTypes.add("Date"); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public GreaterThan(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE105", "InvalidSchemaError: The property \"greater_than\" expects comparable values")); + } + } + + @Override + protected boolean _isValid(Object value) { + + // timestamps + if(value instanceof Date) { + if(constraintValue instanceof Date) { + return ((Date)value).after((Date)constraintValue); + } + return false; + } + + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 > n2; + } + + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be greater than \"%s\"", + valueMsg,propertyName,constraintValueMsg); + } + +} + +/* +class GreaterThan(Constraint): + """Constraint class for "greater_than" + + Constrains a property or parameter to a value greater than ('>') + the value declared. + """ + + constraint_key = Constraint.GREATER_THAN + + valid_types = (int, float, datetime.date, + datetime.time, datetime.datetime) + + valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, + Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, + Schema.SCALAR_UNIT_TIME) + + def __init__(self, property_name, property_type, constraint): + super(GreaterThan, self).__init__(property_name, property_type, + constraint) + if not isinstance(constraint[self.GREATER_THAN], self.valid_types): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "greater_than" ' + 'expects comparable values.'))) + + def _is_valid(self, value): + if value > self.constraint_value: + return True + + return False + + def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' + 'greater than "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + cvalue=self.constraint_value_msg)) +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java new file mode 100644 index 0000000..829bc1f --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java @@ -0,0 +1,171 @@ +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.Date; + +import java.util.ArrayList; + +public class InRange extends Constraint { + // Constraint class for "in_range" + + //Constrains a property or parameter to a value in range of (inclusive) + //the two values declared. + + private static final String UNBOUNDED = "UNBOUNDED"; + + private Object min,max; + + protected void _setValues() { + + constraintKey = IN_RANGE; + + validTypes.add("Integer"); + validTypes.add("Double"); + validTypes.add("Float"); + validTypes.add("String"); + // timestamps are loaded as Date objects + validTypes.add("Date"); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + validPropTypes.add(Schema.RANGE); + + } + + @SuppressWarnings("unchecked") + public InRange(String name,String type,Object c) { + super(name,type,c); + + if(!(constraintValue instanceof ArrayList) || ((ArrayList)constraintValue).size() != 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE106", "InvalidSchemaError: The property \"in_range\" expects a list")); + + } + + ArrayList alcv = (ArrayList)constraintValue; + String msg = "The property \"in_range\" expects comparable values"; + for(Object vo: alcv) { + if(!validTypes.contains(vo.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE107", "InvalidSchemaError: " + msg)); + } + // The only string we allow for range is the special value 'UNBOUNDED' + if((vo instanceof String) && !((String)vo).equals(UNBOUNDED)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE108", "InvalidSchemaError: " + msg)); + } + } + min = alcv.get(0); + max = alcv.get(1); + + } + + @Override + protected boolean _isValid(Object value) { + + // timestamps + if(value instanceof Date) { + if(min instanceof Date && max instanceof Date) { + return !((Date)value).before((Date)min) && + !((Date)value).after((Date)max); + } + return false; + } + + Double dvalue = new Double(value.toString()); + if(!(min instanceof String)) { + if(dvalue < new Double(min.toString())) { + return false; + } + } + else if(!((String)min).equals(UNBOUNDED)) { + return false; + } + if(!(max instanceof String)) { + if(dvalue > new Double(max.toString())) { + return false; + } + } + else if(!((String)max).equals(UNBOUNDED)) { + return false; + } + return true; + } + + @Override + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" is out of range \"(min:%s, max:%s)\"", + valueMsg,propertyName,min.toString(),max.toString()); + } + +} + +/*python + +class InRange(Constraint): + """Constraint class for "in_range" + + Constrains a property or parameter to a value in range of (inclusive) + the two values declared. + """ + UNBOUNDED = 'UNBOUNDED' + + constraint_key = Constraint.IN_RANGE + + valid_types = (int, float, datetime.date, + datetime.time, datetime.datetime, str) + + valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, + Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, + Schema.SCALAR_UNIT_TIME, Schema.RANGE) + + def __init__(self, property_name, property_type, constraint): + super(InRange, self).__init__(property_name, property_type, constraint) + if(not isinstance(self.constraint_value, collections.Sequence) or + (len(constraint[self.IN_RANGE]) != 2)): + ValidationIssueCollector.appendException( + InvalidSchemaError(message=_('The property "in_range" ' + 'expects a list.'))) + + msg = _('The property "in_range" expects comparable values.') + for value in self.constraint_value: + if not isinstance(value, self.valid_types): + ValidationIssueCollector.appendException( + InvalidSchemaError(message=msg)) + # The only string we allow for range is the special value + # 'UNBOUNDED' + if(isinstance(value, str) and value != self.UNBOUNDED): + ValidationIssueCollector.appendException( + InvalidSchemaError(message=msg)) + + self.min = self.constraint_value[0] + self.max = self.constraint_value[1] + + def _is_valid(self, value): + if not isinstance(self.min, str): + if value < self.min: + return False + elif self.min != self.UNBOUNDED: + return False + if not isinstance(self.max, str): + if value > self.max: + return False + elif self.max != self.UNBOUNDED: + return False + return True + + def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" is out of ' + 'range "(min:%(vmin)s, max:%(vmax)s)".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + vmin=self.constraint_value_msg[0], + vmax=self.constraint_value_msg[1])) + +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java new file mode 100644 index 0000000..db0eaac --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java @@ -0,0 +1,79 @@ +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class Length extends Constraint { + // Constraint class for "length" + + // Constrains the property or parameter to a value of a given length. + + @Override + protected void _setValues() { + + constraintKey = LENGTH; + + validTypes.add("Integer"); + + validPropTypes.add(Schema.STRING); + + } + + public Length(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE109", "InvalidSchemaError: The property \"length\" expects an integer")); + } + } + + @Override + protected boolean _isValid(Object value) { + if(value instanceof String && constraintValue instanceof Integer && + ((String)value).length() == (Integer)constraintValue) { + return true; + } + return false; + } + + @Override + protected String _errMsg(Object value) { + return String.format("Length of value \"%s\" of property \"%s\" must be equal to \"%s\"", + value.toString(),propertyName,constraintValue.toString()); + } + +} + +/*python + class Length(Constraint): + """Constraint class for "length" + + Constrains the property or parameter to a value of a given length. + """ + + constraint_key = Constraint.LENGTH + + valid_types = (int, ) + + valid_prop_types = (Schema.STRING, ) + + def __init__(self, property_name, property_type, constraint): + super(Length, self).__init__(property_name, property_type, constraint) + if not isinstance(self.constraint_value, self.valid_types): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "length" expects ' + 'an integer.'))) + + def _is_valid(self, value): + if isinstance(value, str) and len(value) == self.constraint_value: + return True + + return False + + def _err_msg(self, value): + return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' + 'must be equal to "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=value, + cvalue=self.constraint_value)) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java new file mode 100644 index 0000000..7ea333d --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java @@ -0,0 +1,107 @@ +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.Date; + +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class LessOrEqual extends Constraint { + // Constraint class for "less_or_equal" + + // Constrains a property or parameter to a value less than or equal + // to ('<=') the value declared. + + protected void _setValues() { + + constraintKey = LESS_OR_EQUAL; + + validTypes.add("Integer"); + validTypes.add("Double"); + validTypes.add("Float"); + // timestamps are loaded as Date objects + validTypes.add("Date"); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public LessOrEqual(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE110", "InvalidSchemaError: The property \"less_or_equal\" expects comparable values")); + } + } + + @Override + protected boolean _isValid(Object value) { + + // timestamps + if(value instanceof Date) { + if(constraintValue instanceof Date) { + return !((Date)value).after((Date)constraintValue); + } + return false; + } + + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 <= n2; + } + + @Override + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be less or equal to \"%s\"", + valueMsg,propertyName,constraintValueMsg); + } + +} + +/*python + +class LessOrEqual(Constraint): + """Constraint class for "less_or_equal" + + Constrains a property or parameter to a value less than or equal + to ('<=') the value declared. + """ + + constraint_key = Constraint.LESS_OR_EQUAL + + valid_types = (int, float, datetime.date, + datetime.time, datetime.datetime) + + valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, + Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, + Schema.SCALAR_UNIT_TIME) + + def __init__(self, property_name, property_type, constraint): + super(LessOrEqual, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, self.valid_types): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "less_or_equal" ' + 'expects comparable values.'))) + + def _is_valid(self, value): + if value <= self.constraint_value: + return True + + return False + + def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' + 'less than or equal to "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + cvalue=self.constraint_value_msg)) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java new file mode 100644 index 0000000..428f10c --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java @@ -0,0 +1,104 @@ +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.Date; + +public class LessThan extends Constraint { + + @Override + protected void _setValues() { + + constraintKey = LESS_THAN; + + validTypes.add("Integer"); + validTypes.add("Double"); + validTypes.add("Float"); + // timestamps are loaded as Date objects + validTypes.add("Date"); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public LessThan(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE111", "InvalidSchemaError: The property \"less_than\" expects comparable values")); + } + } + + @Override + protected boolean _isValid(Object value) { + + // timestamps + if(value instanceof Date) { + if(constraintValue instanceof Date) { + return ((Date)value).before((Date)constraintValue); + } + return false; + } + + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 < n2; + } + + @Override + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be less than \"%s\"", + valueMsg,propertyName,constraintValueMsg); + } + +} + +/*python + +class LessThan(Constraint): +"""Constraint class for "less_than" + +Constrains a property or parameter to a value less than ('<') +the value declared. +""" + +constraint_key = Constraint.LESS_THAN + +valid_types = (int, float, datetime.date, + datetime.time, datetime.datetime) + +valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, + Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, + Schema.SCALAR_UNIT_TIME) + +def __init__(self, property_name, property_type, constraint): + super(LessThan, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, self.valid_types): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "less_than" ' + 'expects comparable values.'))) + +def _is_valid(self, value): + if value < self.constraint_value: + return True + + return False + +def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' + 'less than "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + cvalue=self.constraint_value_msg)) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java new file mode 100644 index 0000000..7ac7df9 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java @@ -0,0 +1,91 @@ +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.LinkedHashMap; + +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class MaxLength extends Constraint { + // Constraint class for "min_length" + + // Constrains the property or parameter to a value of a maximum length. + + @Override + protected void _setValues() { + + constraintKey = MAX_LENGTH; + + validTypes.add("Integer"); + + validPropTypes.add(Schema.STRING); + validPropTypes.add(Schema.MAP); + + } + + public MaxLength(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE112", "InvalidSchemaError: The property \"max_length\" expects an integer")); + } + } + + @SuppressWarnings("unchecked") + @Override + protected boolean _isValid(Object value) { + if(value instanceof String && constraintValue instanceof Integer && + ((String)value).length() <= (Integer)constraintValue) { + return true; + } + else if(value instanceof LinkedHashMap && constraintValue instanceof Integer && + ((LinkedHashMap)value).size() <= (Integer)constraintValue) { + return true; + } + return false; + } + + @Override + protected String _errMsg(Object value) { + return String.format("Length of value \"%s\" of property \"%s\" must be no greater than \"%s\"", + value.toString(),propertyName,constraintValue.toString()); + } + +} + +/*python + +class MaxLength(Constraint): + """Constraint class for "max_length" + + Constrains the property or parameter to a value to a maximum length. + """ + + constraint_key = Constraint.MAX_LENGTH + + valid_types = (int, ) + + valid_prop_types = (Schema.STRING, Schema.MAP) + + def __init__(self, property_name, property_type, constraint): + super(MaxLength, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, self.valid_types): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "max_length" ' + 'expects an integer.'))) + + def _is_valid(self, value): + if ((isinstance(value, str) or isinstance(value, dict)) and + len(value) <= self.constraint_value): + return True + + return False + + def _err_msg(self, value): + return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' + 'must be no greater than "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=value, + cvalue=self.constraint_value)) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java new file mode 100644 index 0000000..fa1fbe2 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java @@ -0,0 +1,91 @@ +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.LinkedHashMap; + +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class MinLength extends Constraint { + // Constraint class for "min_length" + + // Constrains the property or parameter to a value of a minimum length. + + @Override + protected void _setValues() { + + constraintKey = MIN_LENGTH; + + validTypes.add("Integer"); + + validPropTypes.add(Schema.STRING); + validPropTypes.add(Schema.MAP); + + } + + public MinLength(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE113", "InvalidSchemaError: The property \"min_length\" expects an integer")); + } + } + + @SuppressWarnings("unchecked") + @Override + protected boolean _isValid(Object value) { + if(value instanceof String && constraintValue instanceof Integer && + ((String)value).length() >= (Integer)constraintValue) { + return true; + } + else if(value instanceof LinkedHashMap && constraintValue instanceof Integer && + ((LinkedHashMap)value).size() >= (Integer)constraintValue) { + return true; + } + return false; + } + + @Override + protected String _errMsg(Object value) { + return String.format("Length of value \"%s\" of property \"%s\" must be at least \"%s\"", + value.toString(),propertyName,constraintValue.toString()); + } + +} + +/*python + +class MinLength(Constraint): + """Constraint class for "min_length" + + Constrains the property or parameter to a value to a minimum length. + """ + + constraint_key = Constraint.MIN_LENGTH + + valid_types = (int, ) + + valid_prop_types = (Schema.STRING, Schema.MAP) + + def __init__(self, property_name, property_type, constraint): + super(MinLength, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, self.valid_types): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "min_length" ' + 'expects an integer.'))) + + def _is_valid(self, value): + if ((isinstance(value, str) or isinstance(value, dict)) and + len(value) >= self.constraint_value): + return True + + return False + + def _err_msg(self, value): + return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' + 'must be at least "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=value, + cvalue=self.constraint_value)) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java new file mode 100644 index 0000000..cf3b856 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java @@ -0,0 +1,97 @@ +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.regex.Matcher; +import java.util.regex.PatternSyntaxException; + +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class Pattern extends Constraint { + + @Override + protected void _setValues() { + + constraintKey = PATTERN; + + validTypes.add("String"); + + validPropTypes.add(Schema.STRING); + + } + + + public Pattern(String name,String type,Object c) { + super(name,type,c); + + if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE114", "InvalidSchemaError: The property \"pattern\" expects a string")); + } + } + + @Override + protected boolean _isValid(Object value) { + try { + if(!(value instanceof String)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE115", String.format("ValueError: Input value \"%s\" to \"pattern\" property \"%s\" must be a string", + value.toString(),propertyName))); + return false; + } + String strp = constraintValue.toString(); + String strm = value.toString(); + java.util.regex.Pattern pattern = java.util.regex.Pattern.compile(strp); + Matcher matcher = pattern.matcher(strm); + if(matcher.find() && matcher.end() == strm.length()) { + return true; + } + return false; + } + catch(PatternSyntaxException pse) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE116", String.format("ValueError: Invalid regex \"%s\" in \"pattern\" property \"%s\"", + constraintValue.toString(),propertyName))); + return false; + } + } + + @Override + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" does not match the pattern \"%s\"", + value.toString(),propertyName,constraintValue.toString()); + } + +} + +/*python + +class Pattern(Constraint): + """Constraint class for "pattern" + + Constrains the property or parameter to a value that is allowed by + the provided regular expression. + """ + + constraint_key = Constraint.PATTERN + + valid_types = (str, ) + + valid_prop_types = (Schema.STRING, ) + + def __init__(self, property_name, property_type, constraint): + super(Pattern, self).__init__(property_name, property_type, constraint) + if not isinstance(self.constraint_value, self.valid_types): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "pattern" ' + 'expects a string.'))) + self.match = re.compile(self.constraint_value).match + + def _is_valid(self, value): + match = self.match(value) + return match is not None and match.end() == len(value) + + def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" does not ' + 'match pattern "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=value, + cvalue=self.constraint_value)) +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java new file mode 100644 index 0000000..73a63ef --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java @@ -0,0 +1,278 @@ +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + + +public class Schema { + + private static final String TYPE = "type"; + private static final String REQUIRED = "required"; + private static final String DESCRIPTION = "description"; + private static final String DEFAULT = "default"; + private static final String CONSTRAINTS = "constraints"; + private static final String STATUS = "status"; + private static final String ENTRYSCHEMA = "entry_schema"; + private static final String KEYS[] = { + TYPE, REQUIRED, DESCRIPTION,DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS}; + + public static final String INTEGER = "integer"; + public static final String STRING = "string"; + public static final String BOOLEAN = "boolean"; + public static final String FLOAT = "float"; + public static final String RANGE = "range"; + public static final String NUMBER = "number"; + public static final String TIMESTAMP = "timestamp"; + public static final String LIST = "list"; + public static final String MAP = "map"; + public static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; + public static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; + public static final String SCALAR_UNIT_TIME = "scalar-unit.time"; + public static final String VERSION = "version"; + public static final String PORTDEF = "PortDef"; + public static final String PORTSPEC = "PortSpec"; //??? PortSpec.SHORTNAME + public static final String JSON = "json"; + + public static final String PROPERTY_TYPES[] = { + INTEGER, STRING, BOOLEAN, FLOAT, RANGE,NUMBER, TIMESTAMP, LIST, MAP, + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, + VERSION, PORTDEF, PORTSPEC, JSON}; + + @SuppressWarnings("unused") + private static final String SCALAR_UNIT_SIZE_DEFAULT = "B"; + + private static Map SCALAR_UNIT_SIZE_DICT = new HashMap<>(); + static { + SCALAR_UNIT_SIZE_DICT.put("B", 1L); + SCALAR_UNIT_SIZE_DICT.put("KB", 1000L); + SCALAR_UNIT_SIZE_DICT.put("KIB", 1024L); + SCALAR_UNIT_SIZE_DICT.put("MB", 1000000L); + SCALAR_UNIT_SIZE_DICT.put("MIB", 1048576L); + SCALAR_UNIT_SIZE_DICT.put("GB", 1000000000L); + SCALAR_UNIT_SIZE_DICT.put("GIB", 1073741824L); + SCALAR_UNIT_SIZE_DICT.put("TB", 1000000000000L); + SCALAR_UNIT_SIZE_DICT.put("TIB", 1099511627776L); + } + + private String name; + private LinkedHashMap schema; + private int _len; + private ArrayList constraintsList; + + + public Schema(String _name,LinkedHashMap _schemaDict) { + name = _name; + + if(!(_schemaDict instanceof LinkedHashMap)) { + //msg = (_('Schema definition of "%(pname)s" must be a dict.') + // % dict(pname=name)) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE117", String.format( + "InvalidSchemaError: Schema definition of \"%s\" must be a dict",name))); + } + + if(_schemaDict.get("type") == null) { + //msg = (_('Schema definition of "%(pname)s" must have a "type" ' + // 'attribute.') % dict(pname=name)) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE118", String.format( + "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name))); + } + + schema = _schemaDict; + _len = 0; //??? None + constraintsList = new ArrayList<>(); + } + + public String getType() { + return (String)schema.get(TYPE); + } + + public boolean isRequired() { + return (boolean)schema.getOrDefault(REQUIRED, true); + } + + public String getDescription() { + return (String)schema.getOrDefault(DESCRIPTION,""); + } + + public Object getDefault() { + return schema.get(DEFAULT); + } + + public String getStatus() { + return (String)schema.getOrDefault(STATUS,""); + } + + @SuppressWarnings("unchecked") + public ArrayList getConstraints() { + if(constraintsList.size() == 0) { + Object cob = schema.get(CONSTRAINTS); + if(cob instanceof ArrayList) { + ArrayList constraintSchemata = (ArrayList)cob; + for(Object ob: constraintSchemata) { + if(ob instanceof LinkedHashMap) { + for(String cClass: ((LinkedHashMap)ob).keySet()) { + Constraint c = Constraint.factory(cClass,name,getType(),ob); + if(c != null) { + constraintsList.add(c); + } + else { + // error + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE119", String.format( + "UnknownFieldError: Constraint type \"%s\" for property \"%s\" is not supported", + cClass,name))); + } + break; + } + } + } + } + } + return constraintsList; + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getEntrySchema() { + return (LinkedHashMap)schema.get(ENTRYSCHEMA); + } + + // Python intrinsic methods... + + // substitute for __getitem__ (aka self[key]) + public Object getItem(String key) { + return schema.get(key); + } + + /* + def __iter__(self): + for k in self.KEYS: + try: + self.schema[k] + except KeyError: + pass + else: + yield k + */ + + // substitute for __len__ (aka self.len()) + public int getLen() { + int len = 0; + for(String k: KEYS) { + if(schema.get(k) != null) { + len++; + } + _len = len; + } + return _len; + } + // getter + public LinkedHashMap getSchema() { + return schema; + } + +} + +/*python + +class Schema(collections.Mapping): + +KEYS = ( + TYPE, REQUIRED, DESCRIPTION, + DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS +) = ( + 'type', 'required', 'description', + 'default', 'constraints', 'entry_schema', 'status' +) + +PROPERTY_TYPES = ( + INTEGER, STRING, BOOLEAN, FLOAT, RANGE, + NUMBER, TIMESTAMP, LIST, MAP, + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, + VERSION, PORTDEF, PORTSPEC +) = ( + 'integer', 'string', 'boolean', 'float', 'range', + 'number', 'timestamp', 'list', 'map', + 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time', + 'version', 'PortDef', PortSpec.SHORTNAME +) + +SCALAR_UNIT_SIZE_DEFAULT = 'B' +SCALAR_UNIT_SIZE_DICT = {'B': 1, 'KB': 1000, 'KIB': 1024, 'MB': 1000000, + 'MIB': 1048576, 'GB': 1000000000, + 'GIB': 1073741824, 'TB': 1000000000000, + 'TIB': 1099511627776} + +def __init__(self, name, schema_dict): + self.name = name + if not isinstance(schema_dict, collections.Mapping): + msg = (_('Schema definition of "%(pname)s" must be a dict.') + % dict(pname=name)) + ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) + + try: + schema_dict['type'] + except KeyError: + msg = (_('Schema definition of "%(pname)s" must have a "type" ' + 'attribute.') % dict(pname=name)) + ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) + + self.schema = schema_dict + self._len = None + self.constraints_list = [] + +@property +def type(self): + return self.schema[self.TYPE] + +@property +def required(self): + return self.schema.get(self.REQUIRED, True) + +@property +def description(self): + return self.schema.get(self.DESCRIPTION, '') + +@property +def default(self): + return self.schema.get(self.DEFAULT) + +@property +def status(self): + return self.schema.get(self.STATUS, '') + +@property +def constraints(self): + if not self.constraints_list: + constraint_schemata = self.schema.get(self.CONSTRAINTS) + if constraint_schemata: + self.constraints_list = [Constraint(self.name, + self.type, + cschema) + for cschema in constraint_schemata] + return self.constraints_list + +@property +def entry_schema(self): + return self.schema.get(self.ENTRYSCHEMA) + +def __getitem__(self, key): + return self.schema[key] + +def __iter__(self): + for k in self.KEYS: + try: + self.schema[k] + except KeyError: + pass + else: + yield k + +def __len__(self): + if self._len is None: + self._len = len(list(iter(self))) + return self._len +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java new file mode 100644 index 0000000..23f25ed --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java @@ -0,0 +1,84 @@ +package org.onap.sdc.toscaparser.api.elements.constraints; + +import java.util.ArrayList; + +public class ValidValues extends Constraint { + + + protected void _setValues() { + + constraintKey = VALID_VALUES; + + for(String s: Schema.PROPERTY_TYPES) { + validPropTypes.add(s); + } + + } + + + public ValidValues(String name,String type,Object c) { + super(name,type,c); + + } + + @SuppressWarnings("unchecked") + protected boolean _isValid(Object val) { + if(!(constraintValue instanceof ArrayList)) { + return false; + } + if(val instanceof ArrayList) { + boolean bAll = true; + for(Object v: (ArrayList)val) { + if(!((ArrayList)constraintValue).contains(v)) { + bAll = false; + break; + }; + } + return bAll; + } + return ((ArrayList)constraintValue).contains(val); + } + + protected String _errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" is not valid. Expected a value from \"%s\"", + value.toString(),propertyName,constraintValue.toString()); + } + +} + +/*python + +class ValidValues(Constraint): +"""Constraint class for "valid_values" + +Constrains a property or parameter to a value that is in the list of +declared values. +""" +constraint_key = Constraint.VALID_VALUES + +valid_prop_types = Schema.PROPERTY_TYPES + +def __init__(self, property_name, property_type, constraint): + super(ValidValues, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, collections.Sequence): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "valid_values" ' + 'expects a list.'))) + +def _is_valid(self, value): + print '*** payton parser validating ',value,' in ',self.constraint_value#GGG + if isinstance(value, list): + return all(v in self.constraint_value for v in value) + return value in self.constraint_value + +def _err_msg(self, value): + allowed = '[%s]' % ', '.join(str(a) for a in self.constraint_value) + return (_('The value "%(pvalue)s" of property "%(pname)s" is not ' + 'valid. Expected a value from "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=value, + cvalue=allowed)) + + +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java b/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java new file mode 100644 index 0000000..4a8309e --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java @@ -0,0 +1,192 @@ +package org.onap.sdc.toscaparser.api.extensions; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.reflections.Reflections; +import org.reflections.scanners.ResourcesScanner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class ExtTools { + + private static Logger log = LoggerFactory.getLogger(ExtTools.class.getName()); + + private static LinkedHashMap EXTENSION_INFO = new LinkedHashMap<>(); + + public ExtTools() { + + EXTENSION_INFO = _loadExtensions(); + } + + private LinkedHashMap _loadExtensions() { + + LinkedHashMap extensions = new LinkedHashMap<>(); + + Reflections reflections = new Reflections("extensions", new ResourcesScanner()); + Set resourcePaths = reflections.getResources(Pattern.compile(".*\\.py$")); + + for(String resourcePath : resourcePaths) { + try (InputStream is = ExtTools.class.getClassLoader().getResourceAsStream(resourcePath); + InputStreamReader isr = new InputStreamReader(is, Charset.forName("UTF-8")); + BufferedReader br = new BufferedReader(isr);){ + String version = null; + ArrayList sections = null; + String defsFile = null; + String line; + + Pattern pattern = Pattern.compile("^([^#]\\S+)\\s*=\\s*(\\S.*)$"); + while ((line = br.readLine()) != null) { + line = line.replace("'", "\""); + Matcher matcher = pattern.matcher(line.toString()); + if (matcher.find()) { + if (matcher.group(1).equals("VERSION")) { + version = matcher.group(2); + if (version.startsWith("'") || version.startsWith("\"")) { + version = version.substring(1, version.length() - 1); + } + } + else if (matcher.group(1).equals("DEFS_FILE")) { + String fn = matcher.group(2); + if (fn.startsWith("'") || fn.startsWith("\"")) { + fn = fn.substring(1, fn.length() - 1); + } + defsFile = resourcePath.replaceFirst("\\w*.py$", fn); + } + else if (matcher.group(1).equals("SECTIONS")) { + sections = new ArrayList<>(); + Pattern secpat = Pattern.compile("\"([^\"]+)\""); + Matcher secmat = secpat.matcher(matcher.group(2)); + while (secmat.find()) { + sections.add(secmat.group(1)); + } + } + } + } + + if (version != null && defsFile != null) { + LinkedHashMap ext = new LinkedHashMap<>(); + ext.put("defs_file", defsFile); + if (sections != null) { + ext.put("sections", sections); + } + extensions.put(version, ext); + } + else { + // error + } + } + catch (Exception e) { + log.error("ExtTools - _loadExtensions - {}", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue + ("JE281", "Failed to load extensions" + e.getMessage())); + // ... + } + } + return extensions; + } + + public ArrayList getVersions() { + return new ArrayList(EXTENSION_INFO.keySet()); + } + + public LinkedHashMap> getSections() { + LinkedHashMap> sections = new LinkedHashMap<>(); + for(String version: EXTENSION_INFO.keySet()) { + LinkedHashMap eiv = (LinkedHashMap)EXTENSION_INFO.get(version); + sections.put(version,(ArrayList)eiv.get("sections")); + } + return sections; + } + + public String getDefsFile(String version) { + LinkedHashMap eiv = (LinkedHashMap)EXTENSION_INFO.get(version); + return (String)eiv.get("defs_file"); + } + +} + +/*python + +from toscaparser.common.exception import ToscaExtAttributeError +from toscaparser.common.exception import ToscaExtImportError + +log = logging.getLogger("tosca.model") + +REQUIRED_ATTRIBUTES = ['VERSION', 'DEFS_FILE'] + + +class ExtTools(object): + def __init__(self): + self.EXTENSION_INFO = self._load_extensions() + + def _load_extensions(self): + '''Dynamically load all the extensions .''' + extensions = {} + + # Use the absolute path of the class path + abs_path = os.path.dirname(os.path.abspath(__file__)) + + extdirs = [e for e in os.listdir(abs_path) if + not e.startswith('tests') and + os.path.isdir(os.path.join(abs_path, e))] + + for e in extdirs: + log.info(e) + extpath = abs_path + '/' + e + # Grab all the extension files in the given path + ext_files = [f for f in os.listdir(extpath) if f.endswith('.py') + and not f.startswith('__init__')] + + # For each module, pick out the target translation class + for f in ext_files: + log.info(f) + ext_name = 'toscaparser/extensions/' + e + '/' + f.strip('.py') + ext_name = ext_name.replace('/', '.') + try: + extinfo = importlib.import_module(ext_name) + version = getattr(extinfo, 'VERSION') + defs_file = extpath + '/' + getattr(extinfo, 'DEFS_FILE') + + # Sections is an optional attribute + sections = getattr(extinfo, 'SECTIONS', ()) + + extensions[version] = {'sections': sections, + 'defs_file': defs_file} + except ImportError: + raise ToscaExtImportError(ext_name=ext_name) + except AttributeError: + attrs = ', '.join(REQUIRED_ATTRIBUTES) + raise ToscaExtAttributeError(ext_name=ext_name, + attrs=attrs) + + print 'Extensions ',extensions#GGG + return extensions + + def get_versions(self): + return self.EXTENSION_INFO.keys() + + def get_sections(self): + sections = {} + for version in self.EXTENSION_INFO.keys(): + sections[version] = self.EXTENSION_INFO[version]['sections'] + + return sections + + def get_defs_file(self, version): + versiondata = self.EXTENSION_INFO.get(version) + + if versiondata: + return versiondata.get('defs_file') + else: + return None +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java new file mode 100644 index 0000000..0b09c73 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java @@ -0,0 +1,77 @@ +package org.onap.sdc.toscaparser.api.functions; + +import org.onap.sdc.toscaparser.api.TopologyTemplate; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; + +public class Concat extends Function { + // Validate the function and provide an instance of the function + + // Concatenation of values are supposed to be produced at runtime and + // therefore its the responsibility of the TOSCA engine to implement the + // evaluation of Concat functions. + + // Arguments: + + // * List of strings that needs to be concatenated + + // Example: + + // [ 'http://', + // get_attribute: [ server, public_address ], + // ':' , + // get_attribute: [ server, port ] ] + + + public Concat(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl,context,name,args); + } + + @Override + public Object result() { + return this; + } + + @Override + void validate() { + if(args.size() < 1) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE145", + "ValueError: Invalid arguments for function \"concat\". " + + "Expected at least one argument")); + } + } + +} + +/*python + +class Concat(Function): +"""Validate the function and provide an instance of the function + +Concatenation of values are supposed to be produced at runtime and +therefore its the responsibility of the TOSCA engine to implement the +evaluation of Concat functions. + +Arguments: + +* List of strings that needs to be concatenated + +Example: + + [ 'http://', + get_attribute: [ server, public_address ], + ':' , + get_attribute: [ server, port ] ] +""" + +def validate(self): + if len(self.args) < 1: + ValidationIsshueCollector.appendException( + ValueError(_('Invalid arguments for function "{0}". Expected ' + 'at least one arguments.').format(CONCAT))) + +def result(self): + return self +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java new file mode 100644 index 0000000..cb40c4c --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java @@ -0,0 +1,235 @@ +package org.onap.sdc.toscaparser.api.functions; + +import java.util.*; + +import org.onap.sdc.toscaparser.api.TopologyTemplate; + +public abstract class Function { + + protected static final String GET_PROPERTY = "get_property"; + protected static final String GET_ATTRIBUTE = "get_attribute"; + protected static final String GET_INPUT = "get_input"; + protected static final String GET_OPERATION_OUTPUT = "get_operation_output"; + protected static final String CONCAT = "concat"; + protected static final String TOKEN = "token"; + + protected static final String SELF = "SELF"; + protected static final String HOST = "HOST"; + protected static final String TARGET = "TARGET"; + protected static final String SOURCE = "SOURCE"; + + protected static final String HOSTED_ON = "tosca.relationships.HostedOn"; + + protected static HashMap functionMappings = _getFunctionMappings(); + + private static HashMap _getFunctionMappings() { + HashMap map = new HashMap<>(); + map.put(GET_PROPERTY,"GetProperty"); + map.put(GET_INPUT, "GetInput"); + map.put(GET_ATTRIBUTE, "GetAttribute"); + map.put(GET_OPERATION_OUTPUT, "GetOperationOutput"); + map.put(CONCAT, "Concat"); + map.put(TOKEN, "Token"); + return map; + } + + protected TopologyTemplate toscaTpl; + protected Object context; + protected String name; + protected ArrayList args; + + + public Function(TopologyTemplate _toscaTpl,Object _context,String _name,ArrayList _args) { + toscaTpl = _toscaTpl; + context = _context; + name = _name; + args = _args; + validate(); + + } + + abstract Object result(); + + abstract void validate(); + + @SuppressWarnings("unchecked") + public static boolean isFunction(Object funcObj) { + // Returns True if the provided function is a Tosca intrinsic function. + // + //Examples: + // + //* "{ get_property: { SELF, port } }" + //* "{ get_input: db_name }" + //* Function instance + + //:param function: Function as string or a Function instance. + //:return: True if function is a Tosca intrinsic function, otherwise False. + // + + if(funcObj instanceof LinkedHashMap) { + LinkedHashMap function = (LinkedHashMap)funcObj; + if(function.size() == 1) { + String funcName = (new ArrayList(function.keySet())).get(0); + return functionMappings.keySet().contains(funcName); + } + } + return (funcObj instanceof Function); + } + + @SuppressWarnings("unchecked") + public static Object getFunction(TopologyTemplate ttpl,Object context,Object rawFunctionObj, boolean resolveGetInput) { + // Gets a Function instance representing the provided template function. + + // If the format provided raw_function format is not relevant for template + // functions or if the function name doesn't exist in function mapping the + // method returns the provided raw_function. + // + // :param tosca_tpl: The tosca template. + // :param node_template: The node template the function is specified for. + // :param raw_function: The raw function as dict. + // :return: Template function as Function instance or the raw_function if + // parsing was unsuccessful. + + + // iterate over leaves of the properties's tree and convert function leaves to function object, + // support List and Map nested, + // assuming that leaf value of function is always map type contains 1 item (e.g. my_leaf: {get_input: xxx}). + + if (rawFunctionObj instanceof LinkedHashMap) { // In map type case + LinkedHashMap rawFunction = ((LinkedHashMap) rawFunctionObj); + if(rawFunction.size() == 1 && + !(rawFunction.values().iterator().next() instanceof LinkedHashMap)) { // End point + return getFunctionForObjectItem(ttpl, context, rawFunction, resolveGetInput); + } else { + return getFunctionForMap(ttpl, context, rawFunction, resolveGetInput); + } + } else if (rawFunctionObj instanceof ArrayList) { // In list type case + return getFunctionForList(ttpl, context, (ArrayList) rawFunctionObj, resolveGetInput); + } + + return rawFunctionObj; + } + + private static Object getFunctionForList(TopologyTemplate ttpl, Object context, ArrayList rawFunctionObj, boolean resolveGetInput) { + // iterate over list properties in recursion, convert leaves to function, + // and collect them in the same hierarchy as the original list. + ArrayList rawFunctionObjList = new ArrayList<>(); + for (Object rawFunctionObjItem: rawFunctionObj) { + rawFunctionObjList.add(getFunction(ttpl, context, rawFunctionObjItem, resolveGetInput)); + } + return rawFunctionObjList; + } + + private static Object getFunctionForMap(TopologyTemplate ttpl, Object context, LinkedHashMap rawFunction, boolean resolveGetInput) { + // iterate over map nested properties in recursion, convert leaves to function, + // and collect them in the same hierarchy as the original map. + LinkedHashMap rawFunctionObjMap = new LinkedHashMap(); + for (Object rawFunctionObjItem: rawFunction.entrySet()) { + Object itemValue = getFunction(ttpl, context, ((Map.Entry)rawFunctionObjItem).getValue(), resolveGetInput); + rawFunctionObjMap.put(((Map.Entry)rawFunctionObjItem).getKey(), itemValue); + } + return rawFunctionObjMap; + } + + private static Object getFunctionForObjectItem(TopologyTemplate ttpl, Object context, Object rawFunctionObjItem, boolean resolveGetInput) { + if(isFunction(rawFunctionObjItem)) { + LinkedHashMap rawFunction = (LinkedHashMap) rawFunctionObjItem; + String funcName = (new ArrayList(rawFunction.keySet())).get(0); + if (functionMappings.keySet().contains(funcName)) { + String funcType = functionMappings.get(funcName); + Object oargs = (new ArrayList(rawFunction.values())).get(0); + ArrayList funcArgs; + if (oargs instanceof ArrayList) { + funcArgs = (ArrayList) oargs; + } else { + funcArgs = new ArrayList<>(); + funcArgs.add(oargs); + } + + switch (funcType) { + case "GetInput": + if (resolveGetInput) { + GetInput input = new GetInput(ttpl, context, funcName, funcArgs); + return input.result(); + } + return new GetInput(ttpl, context, funcName, funcArgs); + case "GetAttribute": + return new GetAttribute(ttpl, context, funcName, funcArgs); + case "GetProperty": + return new GetProperty(ttpl, context, funcName, funcArgs); + case "GetOperationOutput": + return new GetOperationOutput(ttpl, context, funcName, funcArgs); + case "Concat": + return new Concat(ttpl, context, funcName, funcArgs); + case "Token": + return new Token(ttpl, context, funcName, funcArgs); + } + } + } + + return rawFunctionObjItem; + } + + @Override + public String toString() { + String argsStr = args.size() > 1 ? args.toString() : args.get(0).toString(); + return name + ":" + argsStr; + } +} + +/*python + +from toscaparser.common.exception import ValidationIsshueCollector +from toscaparser.common.exception import UnknownInputError +from toscaparser.dataentity import DataEntity +from toscaparser.elements.constraints import Schema +from toscaparser.elements.datatype import DataType +from toscaparser.elements.entity_type import EntityType +from toscaparser.elements.relationshiptype import RelationshipType +from toscaparser.elements.statefulentitytype import StatefulEntityType +from toscaparser.utils.gettextutils import _ + + +GET_PROPERTY = 'get_property' +GET_ATTRIBUTE = 'get_attribute' +GET_INPUT = 'get_input' +GET_OPERATION_OUTPUT = 'get_operation_output' +CONCAT = 'concat' +TOKEN = 'token' + +SELF = 'SELF' +HOST = 'HOST' +TARGET = 'TARGET' +SOURCE = 'SOURCE' + +HOSTED_ON = 'tosca.relationships.HostedOn' + + +@six.add_metaclass(abc.ABCMeta) +class Function(object): + """An abstract type for representing a Tosca template function.""" + + def __init__(self, tosca_tpl, context, name, args): + self.tosca_tpl = tosca_tpl + self.context = context + self.name = name + self.args = args + self.validate() + + @abc.abstractmethod + def result(self): + """Invokes the function and returns its result + + Some methods invocation may only be relevant on runtime (for example, + getting runtime properties) and therefore its the responsibility of + the orchestrator/translator to take care of such functions invocation. + + :return: Function invocation result. + """ + return {self.name: self.args} + + @abc.abstractmethod + def validate(self): + """Validates function arguments.""" + pass +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java new file mode 100644 index 0000000..8648e4e --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java @@ -0,0 +1,524 @@ +package org.onap.sdc.toscaparser.api.functions; + +import org.onap.sdc.toscaparser.api.*; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.onap.sdc.toscaparser.api.*; +import org.onap.sdc.toscaparser.api.elements.AttributeDef; +import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef; +import org.onap.sdc.toscaparser.api.elements.DataType; +import org.onap.sdc.toscaparser.api.elements.EntityType; +import org.onap.sdc.toscaparser.api.elements.NodeType; +import org.onap.sdc.toscaparser.api.elements.PropertyDef; +import org.onap.sdc.toscaparser.api.elements.RelationshipType; +import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; +import org.onap.sdc.toscaparser.api.elements.constraints.Schema; + +public class GetAttribute extends Function { + // Get an attribute value of an entity defined in the service template + + // Node template attributes values are set in runtime and therefore its the + // responsibility of the Tosca engine to implement the evaluation of + // get_attribute functions. + + // Arguments: + + // * Node template name | HOST. + // * Attribute name. + + // If the HOST keyword is passed as the node template name argument the + // function will search each node template along the HostedOn relationship + // chain until a node which contains the attribute is found. + + // Examples: + + // * { get_attribute: [ server, private_address ] } + // * { get_attribute: [ HOST, private_address ] } + // * { get_attribute: [ HOST, private_address, 0 ] } + // * { get_attribute: [ HOST, private_address, 0, some_prop] } + + public GetAttribute(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); + } + + @Override + void validate() { + if (args.size() < 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", + "ValueError: Illegal arguments for function \"get_attribute\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); + return; + } else if (args.size() == 2) { + _findNodeTemplateContainingAttribute(); + } else { + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl == null) { + return; + } + int index = 2; + AttributeDef attr = nodeTpl.getTypeDefinition().getAttributeDefValue((String) args.get(1)); + if (attr != null) { + // found + } else { + index = 3; + // then check the req or caps + if (!(args.get(1) instanceof String) || !(args.get(2) instanceof String)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", "ValueError: Illegal arguments for function \"get_attribute\". Expected a String argument")); + } + + attr = _findReqOrCapAttribute(args.get(1).toString(), args.get(2).toString()); + if (attr == null) { + return; + } + } + + + String valueType = (String) attr.getSchema().get("type"); + if (args.size() > index) { + for (Object elem : args.subList(index, args.size())) { + if (valueType.equals("list")) { + if (!(elem instanceof Integer)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE147", String.format( + "ValueError: Illegal arguments for function \"get_attribute\" \"%s\". Expected positive integer argument", + elem.toString()))); + } + Object ob = attr.getSchema().get("entry_schema"); + valueType = (String) + ((LinkedHashMap) ob).get("type"); + } else if (valueType.equals("map")) { + Object ob = attr.getSchema().get("entry_schema"); + valueType = (String) + ((LinkedHashMap) ob).get("type"); + } else { + boolean bFound = false; + for (String p : Schema.PROPERTY_TYPES) { + if (p.equals(valueType)) { + bFound = true; + break; + } + } + if (bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE148", String.format( + "ValueError: 'Illegal arguments for function \"get_attribute\". Unexpected attribute/index value \"%s\"", + elem))); + return; + } else { // It is a complex type + DataType dataType = new DataType(valueType, null); + LinkedHashMap props = + dataType.getAllProperties(); + PropertyDef prop = props.get((String) elem); + if (prop != null) { + valueType = (String) prop.getSchema().get("type"); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE149", String.format( + "KeyError: Illegal arguments for function \"get_attribute\". Attribute name \"%s\" not found in \"%\"", + elem, valueType))); + } + } + } + } + } + } + } + + @Override + public Object result() { + return this; + } + + private NodeTemplate getReferencedNodeTemplate() { + // Gets the NodeTemplate instance the get_attribute function refers to + + // If HOST keyword was used as the node template argument, the node + // template which contains the attribute along the HostedOn relationship + // chain will be returned. + + return _findNodeTemplateContainingAttribute(); + + } + + // Attributes can be explicitly created as part of the type definition + // or a property name can be implicitly used as an attribute name + private NodeTemplate _findNodeTemplateContainingAttribute() { + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl != null && + !_attributeExistsInType(nodeTpl.getTypeDefinition()) && + !nodeTpl.getProperties().keySet().contains(getAttributeName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE150", String.format( + "KeyError: Attribute \"%s\" was not found in node template \"%s\"", + getAttributeName(), nodeTpl.getName()))); + } + return nodeTpl; + } + + private boolean _attributeExistsInType(StatefulEntityType typeDefinition) { + LinkedHashMap attrsDef = typeDefinition.getAttributesDef(); + return attrsDef.get(getAttributeName()) != null; + } + + private NodeTemplate _findHostContainingAttribute(String nodeTemplateName) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); + if (nodeTemplate != null) { + LinkedHashMap hostedOnRel = + (LinkedHashMap) EntityType.TOSCA_DEF.get(HOSTED_ON); + for (RequirementAssignment r : nodeTemplate.getRequirements().getAll()) { + String targetName = r.getNodeTemplateName(); + NodeTemplate targetNode = _findNodeTemplate(targetName); + NodeType targetType = (NodeType) targetNode.getTypeDefinition(); + for (CapabilityTypeDef capability : targetType.getCapabilitiesObjects()) { +// if(((ArrayList)hostedOnRel.get("valid_target_types")).contains(capability.getType())) { + if (capability.inheritsFrom((ArrayList) hostedOnRel.get("valid_target_types"))) { + if (_attributeExistsInType(targetType)) { + return targetNode; + } + return _findHostContainingAttribute(targetName); + } + } + } + } + return null; + } + + + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { + if (nodeTemplateName.equals(HOST)) { + // Currently this is the only way to tell whether the function + // is used within the outputs section of the TOSCA template. + if (context instanceof ArrayList) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE151", + "ValueError: \"get_attribute: [ HOST, ... ]\" is not allowed in \"outputs\" section of the TOSCA template")); + return null; + } + NodeTemplate nodeTpl = _findHostContainingAttribute(SELF); + if (nodeTpl == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE152", String.format( + "ValueError: \"get_attribute: [ HOST, ... ]\" was used in " + + "node template \"%s\" but \"%s\" was not found in " + + "the relationship chain", ((NodeTemplate) context).getName(), HOSTED_ON))); + return null; + } + return nodeTpl; + } + if (nodeTemplateName.equals(TARGET)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE153", + "KeyError: \"TARGET\" keyword can only be used in context " + + " to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + if (nodeTemplateName.equals(SOURCE)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE154", + "KeyError: \"SOURCE\" keyword can only be used in context " + + " to \"Relationships\" source node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + String name; + if (nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { + name = ((NodeTemplate) context).getName(); + } else { + name = nodeTemplateName; + } + for (NodeTemplate nt : toscaTpl.getNodeTemplates()) { + if (nt.getName().equals(name)) { + return nt; + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE155", String.format( + "KeyError: Node template \"%s\" was not found", nodeTemplateName))); + return null; + } + + public AttributeDef _findReqOrCapAttribute(String reqOrCap, String attrName) { + + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + // Find attribute in node template's requirements + for (RequirementAssignment r : nodeTpl.getRequirements().getAll()) { + String nodeName = r.getNodeTemplateName(); + if (r.getName().equals(reqOrCap)) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); + return _getCapabilityAttribute(nodeTemplate, r.getName(), attrName); + } + } + // If requirement was not found, look in node template's capabilities + return _getCapabilityAttribute(nodeTpl, reqOrCap, attrName); + } + + private AttributeDef _getCapabilityAttribute(NodeTemplate nodeTemplate, + String capabilityName, + String attrName) { + // Gets a node template capability attribute + CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); + + if (cap != null) { + AttributeDef attribute = null; + LinkedHashMap attrs = + cap.getDefinition().getAttributesDef(); + if (attrs != null && attrs.keySet().contains(attrName)) { + attribute = attrs.get(attrName); + } + if (attribute == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE156", String.format( + "KeyError: Attribute \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + attrName, capabilityName, nodeTemplate.getName(), ((NodeTemplate) context).getName()))); + } + return attribute; + } + String msg = String.format( + "Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", + capabilityName, ((NodeTemplate) context).getName(), nodeTemplate.getName()); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE157", "KeyError: " + msg)); + return null; + } + + String getNodeTemplateName() { + return (String) args.get(0); + } + + String getAttributeName() { + return (String) args.get(1); + } + +} + +/*python + +class GetAttribute(Function): +"""Get an attribute value of an entity defined in the service template + +Node template attributes values are set in runtime and therefore its the +responsibility of the Tosca engine to implement the evaluation of +get_attribute functions. + +Arguments: + +* Node template name | HOST. +* Attribute name. + +If the HOST keyword is passed as the node template name argument the +function will search each node template along the HostedOn relationship +chain until a node which contains the attribute is found. + +Examples: + +* { get_attribute: [ server, private_address ] } +* { get_attribute: [ HOST, private_address ] } +* { get_attribute: [ HOST, private_address, 0 ] } +* { get_attribute: [ HOST, private_address, 0, some_prop] } +""" + +def validate(self): + if len(self.args) < 2: + ValidationIssueCollector.appendException( + ValueError(_('Illegal arguments for function "{0}". Expected ' + 'arguments: "node-template-name", "req-or-cap"' + '(optional), "property name"' + ).format(GET_ATTRIBUTE))) + return + elif len(self.args) == 2: + self._find_node_template_containing_attribute() + else: + node_tpl = self._find_node_template(self.args[0]) + if node_tpl is None: + return + index = 2 + attrs = node_tpl.type_definition.get_attributes_def() + found = [attrs[self.args[1]]] if self.args[1] in attrs else [] + if found: + attr = found[0] + else: + index = 3 + # then check the req or caps + attr = self._find_req_or_cap_attribute(self.args[1], + self.args[2]) + + value_type = attr.schema['type'] + if len(self.args) > index: + for elem in self.args[index:]: + if value_type == "list": + if not isinstance(elem, int): + ValidationIssueCollector.appendException( + ValueError(_('Illegal arguments for function' + ' "{0}". "{1}" Expected positive' + ' integer argument' + ).format(GET_ATTRIBUTE, elem))) + value_type = attr.schema['entry_schema']['type'] + elif value_type == "map": + value_type = attr.schema['entry_schema']['type'] + elif value_type in Schema.PROPERTY_TYPES: + ValidationIssueCollector.appendException( + ValueError(_('Illegal arguments for function' + ' "{0}". Unexpected attribute/' + 'index value "{1}"' + ).format(GET_ATTRIBUTE, elem))) + return + else: # It is a complex type + data_type = DataType(value_type) + props = data_type.get_all_properties() + found = [props[elem]] if elem in props else [] + if found: + prop = found[0] + value_type = prop.schema['type'] + else: + ValidationIssueCollector.appendException( + KeyError(_('Illegal arguments for function' + ' "{0}". Attribute name "{1}" not' + ' found in "{2}"' + ).format(GET_ATTRIBUTE, + elem, + value_type))) + +def result(self): + return self + +def get_referenced_node_template(self): + """Gets the NodeTemplate instance the get_attribute function refers to. + + If HOST keyword was used as the node template argument, the node + template which contains the attribute along the HostedOn relationship + chain will be returned. + """ + return self._find_node_template_containing_attribute() + +# Attributes can be explicitly created as part of the type definition +# or a property name can be implicitly used as an attribute name +def _find_node_template_containing_attribute(self): + node_tpl = self._find_node_template(self.args[0]) + if node_tpl and \ + not self._attribute_exists_in_type(node_tpl.type_definition) \ + and self.attribute_name not in node_tpl.get_properties(): + ValidationIssueCollector.appendException( + KeyError(_('Attribute "%(att)s" was not found in node ' + 'template "%(ntpl)s".') % + {'att': self.attribute_name, + 'ntpl': node_tpl.name})) + return node_tpl + +def _attribute_exists_in_type(self, type_definition): + attrs_def = type_definition.get_attributes_def() + found = [attrs_def[self.attribute_name]] \ + if self.attribute_name in attrs_def else [] + return len(found) == 1 + +def _find_host_containing_attribute(self, node_template_name=SELF): + node_template = self._find_node_template(node_template_name) + if node_template: + hosted_on_rel = EntityType.TOSCA_DEF[HOSTED_ON] + for r in node_template.requirements: + for requirement, target_name in r.items(): + target_node = self._find_node_template(target_name) + target_type = target_node.type_definition + for capability in target_type.get_capabilities_objects(): + if capability.type in \ + hosted_on_rel['valid_target_types']: + if self._attribute_exists_in_type(target_type): + return target_node + return self._find_host_containing_attribute( + target_name) + +def _find_node_template(self, node_template_name): + if node_template_name == HOST: + # Currently this is the only way to tell whether the function + # is used within the outputs section of the TOSCA template. + if isinstance(self.context, list): + ValidationIssueCollector.appendException( + ValueError(_( + '"get_attribute: [ HOST, ... ]" is not allowed in ' + '"outputs" section of the TOSCA template.'))) + return + node_tpl = self._find_host_containing_attribute() + if not node_tpl: + ValidationIssueCollector.appendException( + ValueError(_( + '"get_attribute: [ HOST, ... ]" was used in node ' + 'template "{0}" but "{1}" was not found in ' + 'the relationship chain.').format(self.context.name, + HOSTED_ON))) + return + return node_tpl + if node_template_name == TARGET: + if not isinstance(self.context.type_definition, RelationshipType): + ValidationIssueCollector.appendException( + KeyError(_('"TARGET" keyword can only be used in context' + ' to "Relationships" target node'))) + return + return self.context.target + if node_template_name == SOURCE: + if not isinstance(self.context.type_definition, RelationshipType): + ValidationIssueCollector.appendException( + KeyError(_('"SOURCE" keyword can only be used in context' + ' to "Relationships" source node'))) + return + return self.context.source + name = self.context.name \ + if node_template_name == SELF and \ + not isinstance(self.context, list) \ + else node_template_name + for node_template in self.tosca_tpl.nodetemplates: + if node_template.name == name: + return node_template + ValidationIssueCollector.appendException( + KeyError(_( + 'Node template "{0}" was not found.' + ).format(node_template_name))) + +def _find_req_or_cap_attribute(self, req_or_cap, attr_name): + node_tpl = self._find_node_template(self.args[0]) + # Find attribute in node template's requirements + for r in node_tpl.requirements: + for req, node_name in r.items(): + if req == req_or_cap: + node_template = self._find_node_template(node_name) + return self._get_capability_attribute( + node_template, + req, + attr_name) + # If requirement was not found, look in node template's capabilities + return self._get_capability_attribute(node_tpl, + req_or_cap, + attr_name) + +def _get_capability_attribute(self, + node_template, + capability_name, + attr_name): + """Gets a node template capability attribute.""" + caps = node_template.get_capabilities() + if caps and capability_name in caps.keys(): + cap = caps[capability_name] + attribute = None + attrs = cap.definition.get_attributes_def() + if attrs and attr_name in attrs.keys(): + attribute = attrs[attr_name] + if not attribute: + ValidationIssueCollector.appendException( + KeyError(_('Attribute "%(attr)s" was not found in ' + 'capability "%(cap)s" of node template ' + '"%(ntpl1)s" referenced from node template ' + '"%(ntpl2)s".') % {'attr': attr_name, + 'cap': capability_name, + 'ntpl1': node_template.name, + 'ntpl2': self.context.name})) + return attribute + msg = _('Requirement/CapabilityAssignment "{0}" referenced from node template ' + '"{1}" was not found in node template "{2}".').format( + capability_name, + self.context.name, + node_template.name) + ValidationIssueCollector.appendException(KeyError(msg)) + +@property +def node_template_name(self): + return self.args[0] + +@property +def attribute_name(self): + return self.args[1] +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java new file mode 100644 index 0000000..262d99a --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java @@ -0,0 +1,137 @@ +package org.onap.sdc.toscaparser.api.functions; + +import org.onap.sdc.toscaparser.api.DataEntity; +import org.onap.sdc.toscaparser.api.TopologyTemplate; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.parameters.Input; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +public class GetInput extends Function { + + public GetInput(TopologyTemplate toscaTpl, Object context, String name, ArrayList _args) { + super(toscaTpl,context,name,_args); + + } + + @Override + void validate() { +// if(args.size() != 1) { +// //PA - changed to WARNING from CRITICAL after talking to Renana, 22/05/2017 +// ThreadLocalsHolder.getCollector().appendWarning(String.format( +// "ValueError: Expected one argument for function \"get_input\" but received \"%s\"", +// args.toString())); +// } + if(args.size() > 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE009", String.format( + "ValueError: Expected max 2 arguments for function \"get_input\" but received \"%s\"", + args.size()))); + } + boolean bFound = false; + for(Input inp: toscaTpl.getInputs()) { + if(inp.getName().equals(args.get(0))) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE158", String.format( + "UnknownInputError: Unknown input \"%s\"",args.get(0)))); + } + } + + public Object result() { + if(toscaTpl.getParsedParams() != null && + toscaTpl.getParsedParams().get(getInputName()) != null) { + LinkedHashMap ttinp = (LinkedHashMap)toscaTpl.getTpl().get("inputs"); + LinkedHashMap ttinpinp = (LinkedHashMap)ttinp.get(getInputName()); + String type = (String)ttinpinp.get("type"); + + Object value = DataEntity.validateDatatype( + type, toscaTpl.getParsedParams().get(getInputName()),null,null,null); + //SDC resolving Get Input + if (value instanceof ArrayList){ + if(args.size() == 2 && args.get(1) instanceof Integer && ((ArrayList) value).size()> (Integer)args.get(1)){ + return ((ArrayList) value).get((Integer) args.get(1)); + } + else{ + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE273",String.format( + "GetInputError: cannot resolve input name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0)))); + return null; + } + } + return value; + } + + Input inputDef = null; + for(Input inpDef: toscaTpl.getInputs()) { + if(getInputName().equals(inpDef.getName())) { + inputDef = inpDef; + break; + } + } + if(inputDef != null) { + if (args.size() == 2 && inputDef.getDefault() != null && inputDef.getDefault() instanceof ArrayList){ + if ( args.get(1) instanceof Integer + && ((ArrayList) inputDef.getDefault()).size()> ((Integer)args.get(1)).intValue()) { + return ((ArrayList) inputDef.getDefault()).get(((Integer)args.get(1)).intValue()); + }else{ + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE274",(String.format( + "GetInputError: cannot resolve input Def name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0))))); + return null; + } + } + return inputDef.getDefault(); + } + return null; + } + + public String getInputName() { + return (String)args.get(0); + } + +} + +/*python + +class GetInput(Function): +"""Get a property value declared within the input of the service template. + +Arguments: + +* Input name. + +Example: + +* get_input: port +""" + +def validate(self): + if len(self.args) != 1: + ValidationIssueCollector.appendException( + ValueError(_( + 'Expected one argument for function "get_input" but ' + 'received "%s".') % self.args)) + inputs = [input.name for input in self.tosca_tpl.inputs] + if self.args[0] not in inputs: + ValidationIssueCollector.appendException( + UnknownInputError(input_name=self.args[0])) + +def result(self): + if self.tosca_tpl.parsed_params and \ + self.input_name in self.tosca_tpl.parsed_params: + return DataEntity.validate_datatype( + self.tosca_tpl.tpl['inputs'][self.input_name]['type'], + self.tosca_tpl.parsed_params[self.input_name]) + + input = [input_def for input_def in self.tosca_tpl.inputs + if self.input_name == input_def.name][0] + return input.default + +@property +def input_name(self): + return self.args[0] + +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java new file mode 100644 index 0000000..342e18a --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java @@ -0,0 +1,229 @@ +package org.onap.sdc.toscaparser.api.functions; + +import org.onap.sdc.toscaparser.api.EntityTemplate; +import org.onap.sdc.toscaparser.api.NodeTemplate; +import org.onap.sdc.toscaparser.api.RelationshipTemplate; +import org.onap.sdc.toscaparser.api.TopologyTemplate; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.InterfacesDef; +import org.onap.sdc.toscaparser.api.elements.RelationshipType; +import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; + + +public class GetOperationOutput extends Function { + + public GetOperationOutput(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl,context,name,args); + } + + @Override + public void validate() { + if(args.size() == 4) { + _findNodeTemplate((String)args.get(0)); + String interfaceName = _findInterfaceName((String)args.get(1)); + _findOperationName(interfaceName,(String)args.get(2)); + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE159", + "ValueError: Illegal arguments for function \"get_operation_output\". " + + "Expected arguments: \"template_name\",\"interface_name\"," + + "\"operation_name\",\"output_variable_name\"")); + } + } + + private String _findInterfaceName(String _interfaceName) { + boolean bFound = false; + for(String sect: InterfacesDef.SECTIONS) { + if(sect.equals(_interfaceName)) { + bFound = true; + break; + } + } + if(bFound) { + return _interfaceName; + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE160", String.format( + "ValueError: invalid interface name \"%s\" in \"get_operation_output\"", + _interfaceName))); + return null; + } + } + + private String _findOperationName(String interfaceName,String operationName) { + + if(interfaceName.equals("Configure") || + interfaceName.equals("tosca.interfaces.node.relationship.Configure")) { + boolean bFound = false; + for(String sect: StatefulEntityType.interfacesRelationshipConfigureOperations) { + if(sect.equals(operationName)) { + bFound = true; + break; + } + } + if(bFound) { + return operationName; + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE161", String.format( + "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", + operationName))); + return null; + } + } + if(interfaceName.equals("Standard") || + interfaceName.equals("tosca.interfaces.node.lifecycle.Standard")) { + boolean bFound = false; + for(String sect: StatefulEntityType.interfacesNodeLifecycleOperations) { + if(sect.equals(operationName)) { + bFound = true; + break; + } + } + if(bFound) { + return operationName; + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE162", String.format( + "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", + operationName))); + return null; + } + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE163", String.format( + "ValueError: Invalid interface name \"%s\" in \"get_operation_output\"", + interfaceName))); + return null; + } + } + + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { + if(nodeTemplateName.equals(TARGET)) { + if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE164", + "KeyError: \"TARGET\" keyword can only be used in context " + + " to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate)context).getTarget(); + } + if(nodeTemplateName.equals(SOURCE)) { + if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE165", + "KeyError: \"SOURCE\" keyword can only be used in context " + + " to \"Relationships\" source node")); + return null; + } + return ((RelationshipTemplate)context).getTarget(); + } + String name; + if(nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { + name = ((NodeTemplate)context).getName(); + } + else { + name = nodeTemplateName; + } + for(NodeTemplate nt: toscaTpl.getNodeTemplates()) { + if(nodeTemplateName.equals(name)) { + return nt; + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE166", String.format( + "KeyError: Node template \"%s\" was not found",nodeTemplateName))); + return null; + } + + @Override + public Object result() { + return this; + } + +} + +/*python + +class GetOperationOutput(Function): +def validate(self): + if len(self.args) == 4: + self._find_node_template(self.args[0]) + interface_name = self._find_interface_name(self.args[1]) + self._find_operation_name(interface_name, self.args[2]) + else: + ValidationIssueCollector.appendException( + ValueError(_('Illegal arguments for function "{0}". Expected ' + 'arguments: "template_name","interface_name",' + '"operation_name","output_variable_name"' + ).format(GET_OPERATION_OUTPUT))) + return + +def _find_interface_name(self, interface_name): + if interface_name in toscaparser.elements.interfaces.SECTIONS: + return interface_name + else: + ValidationIssueCollector.appendException( + ValueError(_('Enter a valid interface name' + ).format(GET_OPERATION_OUTPUT))) + return + +def _find_operation_name(self, interface_name, operation_name): + if(interface_name == 'Configure' or + interface_name == 'tosca.interfaces.node.relationship.Configure'): + if(operation_name in + StatefulEntityType. + interfaces_relationship_configure_operations): + return operation_name + else: + ValidationIssueCollector.appendException( + ValueError(_('Enter an operation of Configure interface' + ).format(GET_OPERATION_OUTPUT))) + return + elif(interface_name == 'Standard' or + interface_name == 'tosca.interfaces.node.lifecycle.Standard'): + if(operation_name in + StatefulEntityType.interfaces_node_lifecycle_operations): + return operation_name + else: + ValidationIssueCollector.appendException( + ValueError(_('Enter an operation of Standard interface' + ).format(GET_OPERATION_OUTPUT))) + return + else: + ValidationIssueCollector.appendException( + ValueError(_('Enter a valid operation name' + ).format(GET_OPERATION_OUTPUT))) + return + +def _find_node_template(self, node_template_name): + if node_template_name == TARGET: + if not isinstance(self.context.type_definition, RelationshipType): + ValidationIssueCollector.appendException( + KeyError(_('"TARGET" keyword can only be used in context' + ' to "Relationships" target node'))) + return + return self.context.target + if node_template_name == SOURCE: + if not isinstance(self.context.type_definition, RelationshipType): + ValidationIssueCollector.appendException( + KeyError(_('"SOURCE" keyword can only be used in context' + ' to "Relationships" source node'))) + return + return self.context.source + name = self.context.name \ + if node_template_name == SELF and \ + not isinstance(self.context, list) \ + else node_template_name + for node_template in self.tosca_tpl.nodetemplates: + if node_template.name == name: + return node_template + ValidationIssueCollector.appendException( + KeyError(_( + 'Node template "{0}" was not found.' + ).format(node_template_name))) + +def result(self): + return self +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java new file mode 100644 index 0000000..fca5f7f --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java @@ -0,0 +1,628 @@ +package org.onap.sdc.toscaparser.api.functions; + +import org.onap.sdc.toscaparser.api.*; +import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.onap.sdc.toscaparser.api.*; +import org.onap.sdc.toscaparser.api.elements.EntityType; +import org.onap.sdc.toscaparser.api.elements.NodeType; +import org.onap.sdc.toscaparser.api.elements.PropertyDef; +import org.onap.sdc.toscaparser.api.elements.RelationshipType; +import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class GetProperty extends Function { + // Get a property value of an entity defined in the same service template + + // Arguments: + + // * Node template name | SELF | HOST | SOURCE | TARGET. + // * Requirement or capability name (optional). + // * Property name. + + // If requirement or capability name is specified, the behavior is as follows: + // The req or cap name is first looked up in the specified node template's + // requirements. + // If found, it would search for a matching capability + // of an other node template and get its property as specified in function + // arguments. + // Otherwise, the req or cap name would be looked up in the specified + // node template's capabilities and if found, it would return the property of + // the capability as specified in function arguments. + + // Examples: + + // * { get_property: [ mysql_server, port ] } + // * { get_property: [ SELF, db_port ] } + // * { get_property: [ SELF, database_endpoint, port ] } + // * { get_property: [ SELF, database_endpoint, port, 1 ] } + + + public GetProperty(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl,context,name,args); + } + + @Override + void validate() { + if(args.size() < 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE167", + "ValueError: Illegal arguments for function \"get_property\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); + return; + } + if(args.size() == 2) { + Property foundProp = _findProperty((String)args.get(1)); + if(foundProp == null) { + return; + } + Object prop = foundProp.getValue(); + if(prop instanceof Function) { + getFunction(toscaTpl,context, prop, toscaTpl.getResolveGetInput()); + } + } + else if(args.size() >= 3) { + // do not use _find_property to avoid raise KeyError + // if the prop is not found + // First check if there is property with this name + NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); + LinkedHashMap props; + if(nodeTpl != null) { + props = nodeTpl.getProperties(); + } + else { + props = new LinkedHashMap<>(); + } + int index = 2; + Object propertyValue; + if(props.get(args.get(1)) != null) { + propertyValue = ((Property)props.get(args.get(1))).getValue(); + } + else { + index = 3; + // then check the req or caps + propertyValue = _findReqOrCapProperty((String)args.get(1),(String)args.get(2)); + } + + if(args.size() > index) { + for(Object elem: args.subList(index,args.size()-1)) { + if(propertyValue instanceof ArrayList) { + int intElem = (int)elem; + propertyValue = _getIndexValue(propertyValue,intElem); + } + else { + propertyValue = _getAttributeValue(propertyValue,(String)elem); + } + } + } + } + } + + @SuppressWarnings("unchecked") + private Object _findReqOrCapProperty(String reqOrCap,String propertyName) { + NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); + if(nodeTpl == null) { + return null; + } + // look for property in node template's requirements + for(RequirementAssignment req: nodeTpl.getRequirements().getAll()) { + String nodeName = req.getNodeTemplateName(); + if(req.getName().equals(reqOrCap)) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); + return _getCapabilityProperty(nodeTemplate,req.getName(),propertyName,true); + } + } + // If requirement was not found, look in node template's capabilities + return _getCapabilityProperty(nodeTpl,reqOrCap,propertyName,true); + } + + private Object _getCapabilityProperty(NodeTemplate nodeTemplate, + String capabilityName, + String propertyName, + boolean throwErrors) { + + // Gets a node template capability property + Object property = null; + CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); + if(cap != null) { + LinkedHashMap props = cap.getProperties(); + if(props != null && props.get(propertyName) != null) { + property = ((Property)props.get(propertyName)).getValue(); + } + if(property == null && throwErrors) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE168", String.format( + "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + propertyName,capabilityName,nodeTemplate.getName(),((NodeTemplate)context).getName()))); + } + return property; + } + if(throwErrors) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE169", String.format( + "KeyError: Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", + capabilityName,((NodeTemplate)context).getName(),nodeTemplate.getName()))); + } + + return null; + } + + private Property _findProperty(String propertyName) { + NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); + if(nodeTpl == null) { + return null; + } + LinkedHashMap props = nodeTpl.getProperties(); + Property found = props.get(propertyName); + if(found == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE170", String.format( + "KeyError: Property \"%s\" was not found in node template \"%s\"", + propertyName,nodeTpl.getName()))); + } + return found; + } + + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { + if(nodeTemplateName.equals(SELF)) { + return (NodeTemplate)context; + } + // enable the HOST value in the function + if(nodeTemplateName.equals(HOST)) { + NodeTemplate node = _findHostContainingProperty(null); + if(node == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE171", String.format( + "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + (String)args.get(2),(String)args.get(1),((NodeTemplate)context).getName()))); + return null; + } + return node; + } + if(nodeTemplateName.equals(TARGET)) { + if(!(((RelationshipTemplate)context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE172", + "KeyError: \"TARGET\" keyword can only be used in context to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate)context).getTarget(); + } + if(nodeTemplateName.equals(SOURCE)) { + if(!(((RelationshipTemplate)context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE173", + "KeyError: \"SOURCE\" keyword can only be used in context to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate)context).getSource(); + } + if(toscaTpl.getNodeTemplates() == null) { + return null; + } + for(NodeTemplate nodeTemplate: toscaTpl.getNodeTemplates()) { + if(nodeTemplate.getName().equals(nodeTemplateName)) { + return nodeTemplate; + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE174", String.format( + "KeyError: Node template \"%s\" was not found. Referenced from Node Template \"%s\"", + nodeTemplateName,((NodeTemplate)context).getName()))); + + return null; + } + + @SuppressWarnings("rawtypes") + private Object _getIndexValue(Object value,int index) { + if(value instanceof ArrayList) { + if(index < ((ArrayList)value).size()) { + return ((ArrayList)value).get(index); + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE175", String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an element with index %d", + args.get(2),args.get(1),((NodeTemplate)context).getName(),index))); + + } + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE176", String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a list", + args.get(2),args.get(1),((NodeTemplate)context).getName()))); + } + return null; + } + + @SuppressWarnings("unchecked") + private Object _getAttributeValue(Object value,String attribute) { + if(value instanceof LinkedHashMap) { + Object ov = ((LinkedHashMap)value).get(attribute); + if(ov != null) { + return ov; + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE177", String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an attribute named \"%s\"", + args.get(2),args.get(1),((NodeTemplate)context).getName(),attribute))); + } + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE178", String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a dict", + args.get(2),args.get(1),((NodeTemplate)context).getName()))); + } + return null; + } + + // Add this functions similar to get_attribute case + private NodeTemplate _findHostContainingProperty(String nodeTemplateName) { + if(nodeTemplateName == null) { + nodeTemplateName = SELF; + } + NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); + LinkedHashMap hostedOnRel = (LinkedHashMap) + EntityType.TOSCA_DEF.get(HOSTED_ON); + for(RequirementAssignment requirement: nodeTemplate.getRequirements().getAll()) { + String targetName = requirement.getNodeTemplateName(); + NodeTemplate targetNode = _findNodeTemplate(targetName); + NodeType targetType = (NodeType)targetNode.getTypeDefinition(); + for(CapabilityTypeDef capDef: targetType.getCapabilitiesObjects()) { + if(capDef.inheritsFrom((ArrayList)hostedOnRel.get("valid_target_types"))) { + if(_propertyExistsInType(targetType)) { + return targetNode; + } + // If requirement was not found, look in node + // template's capabilities + if(args.size() > 2 && + _getCapabilityProperty(targetNode,(String)args.get(1),(String)args.get(2),false) != null) { + return targetNode; + } + + return _findHostContainingProperty(targetName); + } + } + + } + return null; + } + + private boolean _propertyExistsInType(StatefulEntityType typeDefinition) { + LinkedHashMap propsDef = typeDefinition.getPropertiesDef(); + return propsDef.keySet().contains((String)args.get(1)); + } + + @Override + public Object result() { + Object propertyValue; + if(args.size() >= 3) { + // First check if there is property with this name + NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); + LinkedHashMap props; + if(nodeTpl != null) { + props = nodeTpl.getProperties(); + } + else { + props = new LinkedHashMap<>(); + } + int index = 2; + if(props.get(args.get(1)) != null) { + propertyValue = ((Property)props.get(args.get(1))).getValue(); + } + else { + index = 3; + // then check the req or caps + propertyValue = _findReqOrCapProperty((String)args.get(1),(String)args.get(2)); + } + + if(args.size() > index) { + for(Object elem: args.subList(index,args.size()-1)) { + if(propertyValue instanceof ArrayList) { + int intElem = (int)elem; + propertyValue = _getIndexValue(propertyValue,intElem); + } + else { + propertyValue = _getAttributeValue(propertyValue,(String)elem); + } + } + } + } + else { + propertyValue = _findProperty((String)args.get(1)).getValue(); + } + if(propertyValue instanceof Function) { + return ((Function)propertyValue).result(); + } + return getFunction(toscaTpl,context,propertyValue, toscaTpl.getResolveGetInput()); + } + + public String getNodeTemplateName() { + return (String)args.get(0); + } + + public String getPropertyName() { + if(args.size() > 2) { + return (String)args.get(2); + } + return (String)args.get(1); + } + + public String getReqorCap() { + if(args.size() > 2) { + return (String)args.get(1); + } + return null; + } + +} + +/*python + +class GetProperty(Function): +"""Get a property value of an entity defined in the same service template. + +Arguments: + +* Node template name | SELF | HOST | SOURCE | TARGET. +* Requirement or capability name (optional). +* Property name. + +If requirement or capability name is specified, the behavior is as follows: +The req or cap name is first looked up in the specified node template's +requirements. +If found, it would search for a matching capability +of an other node template and get its property as specified in function +arguments. +Otherwise, the req or cap name would be looked up in the specified +node template's capabilities and if found, it would return the property of +the capability as specified in function arguments. + +Examples: + +* { get_property: [ mysql_server, port ] } +* { get_property: [ SELF, db_port ] } +* { get_property: [ SELF, database_endpoint, port ] } +* { get_property: [ SELF, database_endpoint, port, 1 ] } +""" + +def validate(self): + if len(self.args) < 2: + ValidationIssueCollector.appendException( + ValueError(_( + 'Expected arguments: "node-template-name", "req-or-cap" ' + '(optional), "property name".'))) + return + if len(self.args) == 2: + found_prop = self._find_property(self.args[1]) + if not found_prop: + return + prop = found_prop.value + if not isinstance(prop, Function): + get_function(self.tosca_tpl, self.context, prop) + elif len(self.args) >= 3: + # do not use _find_property to avoid raise KeyError + # if the prop is not found + # First check if there is property with this name + node_tpl = self._find_node_template(self.args[0]) + props = node_tpl.get_properties() if node_tpl else [] + index = 2 + found = [props[self.args[1]]] if self.args[1] in props else [] + if found: + property_value = found[0].value + else: + index = 3 + # then check the req or caps + property_value = self._find_req_or_cap_property(self.args[1], + self.args[2]) + if len(self.args) > index: + for elem in self.args[index:]: + if isinstance(property_value, list): + int_elem = int(elem) + property_value = self._get_index_value(property_value, + int_elem) + else: + property_value = self._get_attribute_value( + property_value, + elem) + +def _find_req_or_cap_property(self, req_or_cap, property_name): + node_tpl = self._find_node_template(self.args[0]) + # Find property in node template's requirements + for r in node_tpl.requirements: + for req, node_name in r.items(): + if req == req_or_cap: + node_template = self._find_node_template(node_name) + return self._get_capability_property( + node_template, + req, + property_name) + # If requirement was not found, look in node template's capabilities + return self._get_capability_property(node_tpl, + req_or_cap, + property_name) + +def _get_capability_property(self, + node_template, + capability_name, + property_name): + """Gets a node template capability property.""" + caps = node_template.get_capabilities() + if caps and capability_name in caps.keys(): + cap = caps[capability_name] + property = None + props = cap.get_properties() + if props and property_name in props.keys(): + property = props[property_name].value + if not property: + ValidationIssueCollector.appendException( + KeyError(_('Property "%(prop)s" was not found in ' + 'capability "%(cap)s" of node template ' + '"%(ntpl1)s" referenced from node template ' + '"%(ntpl2)s".') % {'prop': property_name, + 'cap': capability_name, + 'ntpl1': node_template.name, + 'ntpl2': self.context.name})) + return property + msg = _('Requirement/CapabilityAssignment "{0}" referenced from node template ' + '"{1}" was not found in node template "{2}".').format( + capability_name, + self.context.name, + node_template.name) + ValidationIssueCollector.appendException(KeyError(msg)) + +def _find_property(self, property_name): + node_tpl = self._find_node_template(self.args[0]) + if not node_tpl: + return + props = node_tpl.get_properties() + found = [props[property_name]] if property_name in props else [] + if len(found) == 0: + ValidationIssueCollector.appendException( + KeyError(_('Property "%(prop)s" was not found in node ' + 'template "%(ntpl)s".') % + {'prop': property_name, + 'ntpl': node_tpl.name})) + return None + return found[0] + +def _find_node_template(self, node_template_name): + if node_template_name == SELF: + return self.context + # enable the HOST value in the function + if node_template_name == HOST: + return self._find_host_containing_property() + if node_template_name == TARGET: + if not isinstance(self.context.type_definition, RelationshipType): + ValidationIssueCollector.appendException( + KeyError(_('"TARGET" keyword can only be used in context' + ' to "Relationships" target node'))) + return + return self.context.target + if node_template_name == SOURCE: + if not isinstance(self.context.type_definition, RelationshipType): + ValidationIssueCollector.appendException( + KeyError(_('"SOURCE" keyword can only be used in context' + ' to "Relationships" source node'))) + return + return self.context.source + if not hasattr(self.tosca_tpl, 'nodetemplates'): + return + for node_template in self.tosca_tpl.nodetemplates: + if node_template.name == node_template_name: + return node_template + ValidationIssueCollector.appendException( + KeyError(_( + 'Node template "{0}" was not found.' + ).format(node_template_name))) + +def _get_index_value(self, value, index): + if isinstance(value, list): + if index < len(value): + return value[index] + else: + ValidationIssueCollector.appendException( + KeyError(_( + "Property '{0}' found in capability '{1}'" + " referenced from node template {2}" + " must have an element with index {3}."). + format(self.args[2], + self.args[1], + self.context.name, + index))) + else: + ValidationIssueCollector.appendException( + KeyError(_( + "Property '{0}' found in capability '{1}'" + " referenced from node template {2}" + " must be a list.").format(self.args[2], + self.args[1], + self.context.name))) + +def _get_attribute_value(self, value, attibute): + if isinstance(value, dict): + if attibute in value: + return value[attibute] + else: + ValidationIssueCollector.appendException( + KeyError(_( + "Property '{0}' found in capability '{1}'" + " referenced from node template {2}" + " must have an attribute named {3}."). + format(self.args[2], + self.args[1], + self.context.name, + attibute))) + else: + ValidationIssueCollector.appendException( + KeyError(_( + "Property '{0}' found in capability '{1}'" + " referenced from node template {2}" + " must be a dict.").format(self.args[2], + self.args[1], + self.context.name))) + +# Add this functions similar to get_attribute case +def _find_host_containing_property(self, node_template_name=SELF): + node_template = self._find_node_template(node_template_name) + hosted_on_rel = EntityType.TOSCA_DEF[HOSTED_ON] + for r in node_template.requirements: + for requirement, target_name in r.items(): + target_node = self._find_node_template(target_name) + target_type = target_node.type_definition + for capability in target_type.get_capabilities_objects(): + if capability.type in hosted_on_rel['valid_target_types']: + if self._property_exists_in_type(target_type): + return target_node + return self._find_host_containing_property( + target_name) + return None + +def _property_exists_in_type(self, type_definition): + props_def = type_definition.get_properties_def() + found = [props_def[self.args[1]]] \ + if self.args[1] in props_def else [] + return len(found) == 1 + +def result(self): + if len(self.args) >= 3: + # First check if there is property with this name + node_tpl = self._find_node_template(self.args[0]) + props = node_tpl.get_properties() if node_tpl else [] + index = 2 + found = [props[self.args[1]]] if self.args[1] in props else [] + if found: + property_value = found[0].value + else: + index = 3 + # then check the req or caps + property_value = self._find_req_or_cap_property(self.args[1], + self.args[2]) + if len(self.args) > index: + for elem in self.args[index:]: + if isinstance(property_value, list): + int_elem = int(elem) + property_value = self._get_index_value(property_value, + int_elem) + else: + property_value = self._get_attribute_value( + property_value, + elem) + else: + property_value = self._find_property(self.args[1]).value + if isinstance(property_value, Function): + return property_value.result() + return get_function(self.tosca_tpl, + self.context, + property_value) + +@property +def node_template_name(self): + return self.args[0] + +@property +def property_name(self): + if len(self.args) > 2: + return self.args[2] + return self.args[1] + +@property +def req_or_cap(self): + if len(self.args) > 2: + return self.args[1] + return None +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java new file mode 100644 index 0000000..771345b --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java @@ -0,0 +1,111 @@ +package org.onap.sdc.toscaparser.api.functions; + +import org.onap.sdc.toscaparser.api.TopologyTemplate; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.ArrayList; + +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class Token extends Function { + // Validate the function and provide an instance of the function + + //The token function is used within a TOSCA service template on a string to + //parse out (tokenize) substrings separated by one or more token characters + //within a larger string. + + //Arguments: + + //* The composite string that contains one or more substrings separated by + // token characters. + //* The string that contains one or more token characters that separate + // substrings within the composite string. + //* The integer indicates the index of the substring to return from the + // composite string. Note that the first substring is denoted by using + // the '0' (zero) integer value. + + //Example: + + // [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ] + + + public Token(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl,context,name,args); + } + + @Override + public Object result() { + return this; + } + + @Override + void validate() { + if(args.size() < 3) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE180", + "ValueError: Invalid arguments for function \"token\". " + + "Expected at least three arguments")); + } + else { + if(!(args.get(1) instanceof String) || + ((String)args.get(1)).length() != 1) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE181", + "ValueError: Invalid arguments for function \"token\". " + + "Expected single char value as second argument")); + } + if(!(args.get(2) instanceof Integer)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE182", + "ValueError: Invalid arguments for function \"token\"" + + "Expected integer value as third argument")); + } + } + } + +} + +/*python + +class Token(Function): +"""Validate the function and provide an instance of the function + +The token function is used within a TOSCA service template on a string to +parse out (tokenize) substrings separated by one or more token characters +within a larger string. + + +Arguments: + +* The composite string that contains one or more substrings separated by + token characters. +* The string that contains one or more token characters that separate + substrings within the composite string. +* The integer indicates the index of the substring to return from the + composite string. Note that the first substring is denoted by using + the '0' (zero) integer value. + +Example: + + [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ] + +""" + +def validate(self): + if len(self.args) < 3: + ValidationIssueCollector.appendException( + ValueError(_('Invalid arguments for function "{0}". Expected ' + 'at least three arguments.').format(TOKEN))) + else: + if not isinstance(self.args[1], str) or len(self.args[1]) != 1: + ValidationIssueCollector.appendException( + ValueError(_('Invalid arguments for function "{0}". ' + 'Expected single char value as second ' + 'argument.').format(TOKEN))) + + if not isinstance(self.args[2], int): + ValidationIssueCollector.appendException( + ValueError(_('Invalid arguments for function "{0}". ' + 'Expected integer value as third ' + 'argument.').format(TOKEN))) + +def result(self): + return self +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java new file mode 100644 index 0000000..7e83cfb --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java @@ -0,0 +1,233 @@ +package org.onap.sdc.toscaparser.api.parameters; + +import org.onap.sdc.toscaparser.api.DataEntity; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashMap; + +import org.onap.sdc.toscaparser.api.elements.EntityType; +import org.onap.sdc.toscaparser.api.elements.constraints.Constraint; +import org.onap.sdc.toscaparser.api.elements.constraints.Schema; + +public class Input { + + private static final String TYPE = "type"; + private static final String DESCRIPTION = "description"; + private static final String DEFAULT = "default"; + private static final String CONSTRAINTS = "constraints"; + private static final String REQUIRED = "required"; + private static final String STATUS = "status"; + private static final String ENTRY_SCHEMA = "entry_schema"; + + public static final String INTEGER = "integer"; + public static final String STRING = "string"; + public static final String BOOLEAN = "boolean"; + public static final String FLOAT = "float"; + public static final String LIST = "list"; + public static final String MAP = "map"; + public static final String JSON = "json"; + + private static String INPUTFIELD[] = { + TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED,STATUS, ENTRY_SCHEMA + }; + + private static String PRIMITIVE_TYPES[] = { + INTEGER, STRING, BOOLEAN, FLOAT, LIST, MAP, JSON + }; + + private String name; + private Schema schema; + private LinkedHashMap customDefs; + + public Input(){ + /** + * Added to support Input serialization + */ + } + + public Input(String _name,LinkedHashMap _schemaDict,LinkedHashMap _customDefs) { + name = _name; + schema = new Schema(_name,_schemaDict); + customDefs = _customDefs; + } + + public String getName() { + return name; + } + + public String getType() { + return schema.getType(); + } + + public String getDescription() { + return schema.getDescription(); + } + + public boolean isRequired() { + return schema.isRequired(); + } + + public Object getDefault() { + return schema.getDefault(); + } + + public ArrayList getConstraints() { + return schema.getConstraints(); + } + + public void validate(Object value) { + _validateField(); + _validateType(getType()); + if(value != null) { + _validateValue(value); + } + } + + private void _validateField() { + for(String key: schema.getSchema().keySet()) { + boolean bFound = false; + for(String ifld: INPUTFIELD) { + if(key.equals(ifld)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE214", String.format( + "UnknownFieldError: Input \"%s\" contains unknown field \"%s\"", + name,key))); + } + } + } + + private void _validateType(String inputType) { + boolean bFound = false; + for(String pt: Schema.PROPERTY_TYPES) { + if(pt.equals(inputType)) { + bFound = true; + break; + } + } + + if(!bFound) { + if(customDefs.get(inputType) != null) { + bFound = true; + } + } + + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE215", String.format( + "ValueError: Invalid type \"%s\"",inputType))); + } + } + + private void _validateValue(Object value) { + Object datatype = null; + if(EntityType.TOSCA_DEF.get(getType()) != null) { + datatype = EntityType.TOSCA_DEF.get(getType()); + } + else if(EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()) != null) { + datatype = EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()); + } + + String type = getType(); + // if it's one of the basic types DON'T look in customDefs + if(Arrays.asList(PRIMITIVE_TYPES).contains(type)) { + DataEntity.validateDatatype(getType(), value, null, (LinkedHashMap)datatype, null); + return; + } + else if(customDefs.get(getType()) != null) { + datatype = customDefs.get(getType()); + DataEntity.validateDatatype(getType(), value, (LinkedHashMap)datatype, customDefs, null); + return; + } + + DataEntity.validateDatatype(getType(), value, null, (LinkedHashMap)datatype, null); + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.dataentity import DataEntity +from toscaparser.elements.constraints import Schema +from toscaparser.elements.entity_type import EntityType +from toscaparser.utils.gettextutils import _ + + +log = logging.getLogger('tosca') + + +class Input(object): + + INPUTFIELD = (TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED, STATUS, + ENTRY_SCHEMA) = ('type', 'description', 'default', + 'constraints', 'required', 'status', + 'entry_schema') + + def __init__(self, name, schema_dict): + self.name = name + self.schema = Schema(name, schema_dict) + + self._validate_field() + self.validate_type(self.type) + + @property + def type(self): + return self.schema.type + + @property + def required(self): + return self.schema.required + + @property + def description(self): + return self.schema.description + + @property + def default(self): + return self.schema.default + + @property + def constraints(self): + return self.schema.constraints + + @property + def status(self): + return self.schema.status + + def validate(self, value=None): + if value is not None: + self._validate_value(value) + + def _validate_field(self): + for name in self.schema.schema: + if name not in self.INPUTFIELD: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Input "%s"' % self.name, + field=name)) + + def validate_type(self, input_type): + if input_type not in Schema.PROPERTY_TYPES: + ValidationIssueCollector.appendException( + ValueError(_('Invalid type "%s".') % type)) + + # tODO(anyone) Need to test for any built-in datatype not just network + # that is, tosca.datatypes.* and not assume tosca.datatypes.network.* + # tODO(anyone) Add support for tosca.datatypes.Credential + def _validate_value(self, value): + tosca = EntityType.TOSCA_DEF + datatype = None + if self.type in tosca: + datatype = tosca[self.type] + elif EntityType.DATATYPE_NETWORK_PREFIX + self.type in tosca: + datatype = tosca[EntityType.DATATYPE_NETWORK_PREFIX + self.type] + + DataEntity.validate_datatype(self.type, value, None, datatype) + +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java new file mode 100644 index 0000000..093c6cf --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java @@ -0,0 +1,110 @@ +package org.onap.sdc.toscaparser.api.parameters; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.LinkedHashMap; + +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class Output { + + private static final String DESCRIPTION = "description"; + public static final String VALUE = "value"; + private static final String OUTPUTFIELD[] = {DESCRIPTION, VALUE}; + + private String name; + private LinkedHashMap attrs;//TYPE??? + + public Output(String oname,LinkedHashMap oattrs) { + name = oname; + attrs = oattrs; + } + + public String getDescription() { + return (String)attrs.get(DESCRIPTION); + } + + public Object getValue() { + return attrs.get(VALUE); + } + + public void validate() { + _validateField(); + } + + private void _validateField() { + if(!(attrs instanceof LinkedHashMap)) { + //TODO wrong error message... + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE216", String.format( + "ValidationError: Output \"%s\" has wrong type. Expecting a dict", + name))); + } + + if(getValue() == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE217", String.format( + "MissingRequiredFieldError: Output \"%s\" is missing required \"%s\"", + name,VALUE))); + } + for(String key: attrs.keySet()) { + boolean bFound = false; + for(String of: OUTPUTFIELD) { + if(key.equals(of)) { + bFound = true; + break; + } + } + if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE218", String.format( + "UnknownFieldError: Output \"%s\" contains unknown field \"%s\"", + name,key))); + } + } + } + + // getter/setter + + public String getName() { + return name; + } + + public void setAttr(String name,Object value) { + attrs.put(name, value); + } +} + +/*python + +class Output(object): + + OUTPUTFIELD = (DESCRIPTION, VALUE) = ('description', 'value') + + def __init__(self, name, attrs): + self.name = name + self.attrs = attrs + + @property + def description(self): + return self.attrs.get(self.DESCRIPTION) + + @property + def value(self): + return self.attrs.get(self.VALUE) + + def validate(self): + self._validate_field() + + def _validate_field(self): + if not isinstance(self.attrs, dict): + ValidationIssueCollector.appendException( + MissingRequiredFieldError(what='Output "%s"' % self.name, + required=self.VALUE)) + if self.value is None: + ValidationIssueCollector.appendException( + MissingRequiredFieldError(what='Output "%s"' % self.name, + required=self.VALUE)) + for name in self.attrs: + if name not in self.OUTPUTFIELD: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Output "%s"' % self.name, + field=name)) +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java b/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java new file mode 100644 index 0000000..98625e0 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java @@ -0,0 +1,785 @@ +package org.onap.sdc.toscaparser.api.prereq; + +import org.onap.sdc.toscaparser.api.ImportsLoader; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.UrlUtils; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.RandomAccessFile; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.*; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; +import java.util.zip.ZipInputStream; + +import org.onap.sdc.toscaparser.api.common.JToscaException; +import org.onap.sdc.toscaparser.api.utils.JToscaErrorCodes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.Yaml; + +public class CSAR { + + private static Logger log = LoggerFactory.getLogger(CSAR.class.getName()); + private static final ArrayList META_PROPERTIES_FILES = new ArrayList<>(Arrays.asList("TOSCA-Metadata/TOSCA.meta", "csar.meta")); + + private String path; + private boolean isFile; + private boolean isValidated; + private boolean errorCaught; + private String csar; + private String tempDir; +// private Metadata metaData; + private File tempFile; + private LinkedHashMap> metaProperties; + + public CSAR(String csarPath, boolean aFile) { + path = csarPath; + isFile = aFile; + isValidated = false; + errorCaught = false; + csar = null; + tempDir = null; + tempFile = null; + metaProperties = new LinkedHashMap<>(); + } + + public boolean validate() throws JToscaException { + isValidated = true; + + //validate that the file or URL exists + + if(isFile) { + File f = new File(path); + if (!f.isFile()) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE220", String.format("\"%s\" is not a file", path))); + return false; + } + else { + this.csar = path; + } + } + else { + if(!UrlUtils.validateUrl(path)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE221", String.format("ImportError: \"%s\" does not exist",path))); + return false; + } + // get it to a local file + try { + File tempFile = File.createTempFile("csartmp",".csar"); + Path ptf = Paths.get(tempFile.getPath()); + URL webfile = new URL(path); + InputStream in = webfile.openStream(); + Files.copy(in,ptf,StandardCopyOption.REPLACE_EXISTING); + } + catch(Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE222", "ImportError: failed to load CSAR from " + path)); + return false; + } + + log.debug("CSAR - validate - currently only files are supported"); + return false; + } + + _parseAndValidateMetaProperties(); + + if(errorCaught) { + return false; + } + + // validate that external references in the main template actually exist and are accessible + _validateExternalReferences(); + + return !errorCaught; + + } + + private void _parseAndValidateMetaProperties() throws JToscaException { + + ZipFile zf = null; + + try { + + // validate that it is a valid zip file + RandomAccessFile raf = new RandomAccessFile(csar, "r"); + long n = raf.readInt(); + raf.close(); + // check if Zip's magic number + if (n != 0x504B0304) { + String errorString = String.format("\"%s\" is not a valid zip file", csar); + log.error(errorString); + throw new JToscaException(errorString , JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue()); + } + + // validate that it contains the metadata file in the correct location + zf = new ZipFile(csar); + ZipEntry ze = zf.getEntry("TOSCA-Metadata/TOSCA.meta"); + if (ze == null) { + + String errorString = String.format( + "\"%s\" is not a valid CSAR as it does not contain the " + + "required file \"TOSCA.meta\" in the folder \"TOSCA-Metadata\"", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.MISSING_META_FILE.getValue()); + } + + //Going over expected metadata files and parsing them + for (String metaFile: META_PROPERTIES_FILES) { + + byte ba[] = new byte[4096]; + ze = zf.getEntry(metaFile); + if (ze != null) { + InputStream inputStream = zf.getInputStream(ze); + n = inputStream.read(ba, 0, 4096); + String md = new String(ba); + md = md.substring(0, (int) n); + + String errorString = String.format( + "The file \"%s\" in the" + + " CSAR \"%s\" does not contain valid YAML content", ze.getName(), csar); + + try { + Yaml yaml = new Yaml(); + Object mdo = yaml.load(md); + if (!(mdo instanceof LinkedHashMap)) { + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); + } + + String[] split = ze.getName().split("/"); + String fileName = split[split.length - 1]; + + if (!metaProperties.containsKey(fileName)) { + metaProperties.put(fileName, (LinkedHashMap) mdo); + } + } + catch(Exception e) { + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); + } + } + } + + // verify it has "Entry-Definition" + String edf = _getMetadata("Entry-Definitions"); + if (edf == null) { + String errorString = String.format( + "The CSAR \"%s\" is missing the required metadata " + + "\"Entry-Definitions\" in \"TOSCA-Metadata/TOSCA.meta\"", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.ENTRY_DEFINITION_NOT_DEFINED.getValue()); + } + + //validate that "Entry-Definitions' metadata value points to an existing file in the CSAR + boolean foundEDF = false; + Enumeration entries = zf.entries(); + while (entries.hasMoreElements()) { + ze = entries.nextElement(); + if (ze.getName().equals(edf)) { + foundEDF = true; + break; + } + } + if (!foundEDF) { + String errorString = String.format( + "The \"Entry-Definitions\" file defined in the CSAR \"%s\" does not exist", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.MISSING_ENTRY_DEFINITION_FILE.getValue()); + } + } catch (JToscaException e) { + //ThreadLocalsHolder.getCollector().appendCriticalException(e.getMessage()); + throw e; + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE223", "ValidationError: " + e.getMessage())); + errorCaught = true; + } + + try { + if (zf != null) { + zf.close(); + } + } catch (IOException e) { + } + } + + public void cleanup() { + try { + if(tempFile != null) { + tempFile.delete(); + } + } + catch(Exception e) { + } + } + + private String _getMetadata(String key) throws JToscaException { + if(!isValidated) { + validate(); + } + Object value = _getMetaProperty("TOSCA.meta").get(key); + return value != null ? value.toString() : null; + } + + public String getAuthor() throws JToscaException { + return _getMetadata("Created-By"); + } + + public String getVersion() throws JToscaException { + return _getMetadata("CSAR-Version"); + } + + public LinkedHashMap> getMetaProperties() { + return metaProperties; + } + + private LinkedHashMap _getMetaProperty(String propertiesFile) { + return metaProperties.get(propertiesFile); + } + + public String getMainTemplate() throws JToscaException { + String entryDef = _getMetadata("Entry-Definitions"); + ZipFile zf; + boolean ok = false; + try { + zf = new ZipFile(path); + ok = (zf.getEntry(entryDef) != null); + zf.close(); + } + catch(IOException e) { + if(!ok) { + log.error("CSAR - getMainTemplate - failed to open {}", path); + } + } + if(ok) { + return entryDef; + } + else { + return null; + } + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getMainTemplateYaml() throws JToscaException { + String mainTemplate = tempDir + File.separator + getMainTemplate(); + if(mainTemplate != null) { + try (InputStream input = new FileInputStream(new File(mainTemplate));){ + Yaml yaml = new Yaml(); + Object data = yaml.load(input); + if(!(data instanceof LinkedHashMap)) { + throw new IOException(); + } + return (LinkedHashMap)data; + } + catch(Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE224", String.format( + "The file \"%s\" in the CSAR \"%s\" does not " + + "contain valid TOSCA YAML content", + mainTemplate,csar))); + } + } + return null; + } + + public String getDescription() throws JToscaException { + String desc = _getMetadata("Description"); + if(desc != null) { + return desc; + } + + Map metaData = metaProperties.get("TOSCA.meta"); + metaData.put("Description", getMainTemplateYaml().get("description")); + return _getMetadata("Description"); + } + + public String getTempDir() { + return tempDir; + } + + public void decompress() throws IOException, JToscaException { + if(!isValidated) { + validate(); + } + + if(tempDir == null || tempDir.isEmpty()) { + tempDir = Files.createTempDirectory("JTP").toString(); + unzip(path,tempDir); + } + } + + private void _validateExternalReferences() throws JToscaException { + // Extracts files referenced in the main template + // These references are currently supported: + // * imports + // * interface implementations + // * artifacts + try { + decompress(); + String mainTplFile = getMainTemplate(); + if(mainTplFile == null) { + return; + } + + LinkedHashMap mainTpl = getMainTemplateYaml(); + if(mainTpl.get("imports") != null) { + // this loads the imports + ImportsLoader il = new ImportsLoader((ArrayList)mainTpl.get("imports"), + tempDir + File.separator + mainTplFile, + (Object)null, + (LinkedHashMap)null); + } + + if(mainTpl.get("topology_template") != null) { + LinkedHashMap topologyTemplate = + (LinkedHashMap)mainTpl.get("topology_template"); + + if(topologyTemplate.get("node_templates") != null) { + LinkedHashMap nodeTemplates = + (LinkedHashMap)topologyTemplate.get("node_templates"); + for(String nodeTemplateKey: nodeTemplates.keySet()) { + LinkedHashMap nodeTemplate = + (LinkedHashMap)nodeTemplates.get(nodeTemplateKey); + if(nodeTemplate.get("artifacts") != null) { + LinkedHashMap artifacts = + (LinkedHashMap)nodeTemplate.get("artifacts"); + for(String artifactKey: artifacts.keySet()) { + Object artifact = artifacts.get(artifactKey); + if(artifact instanceof String) { + _validateExternalReference(mainTplFile,(String)artifact,true); + } + else if(artifact instanceof LinkedHashMap) { + String file = (String)((LinkedHashMap)artifact).get("file"); + if(file != null) { + _validateExternalReference(mainTplFile,file,true); + } + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE225", String.format( + "ValueError: Unexpected artifact definition for \"%s\"", + artifactKey))); + errorCaught = true; + } + } + } + if(nodeTemplate.get("interfaces") != null) { + LinkedHashMap interfaces = + (LinkedHashMap)nodeTemplate.get("interfaces"); + for(String interfaceKey: interfaces.keySet()) { + LinkedHashMap _interface = + (LinkedHashMap)interfaces.get(interfaceKey); + for(String operationKey: _interface.keySet()) { + Object operation = _interface.get(operationKey); + if(operation instanceof String) { + _validateExternalReference(mainTplFile,(String)operation,false); + } + else if(operation instanceof LinkedHashMap) { + String imp = (String)((LinkedHashMap)operation).get("implementation"); + if(imp != null) { + _validateExternalReference(mainTplFile,imp,true); + } + } + } + } + } + } + } + } + } + catch(IOException e) { + errorCaught = true; + } + finally { + // delete tempDir (only here?!?) + File fdir = new File(tempDir); + deleteDir(fdir); + tempDir = null; + } + } + + public static void deleteDir(File fdir) { + try { + if (fdir.isDirectory()) { + for (File c : fdir.listFiles()) + deleteDir(c); + } + fdir.delete(); + } + catch(Exception e) { + } + } + + private void _validateExternalReference(String tplFile,String resourceFile,boolean raiseExc) { + // Verify that the external resource exists + + // If resource_file is a URL verify that the URL is valid. + // If resource_file is a relative path verify that the path is valid + // considering base folder (self.temp_dir) and tpl_file. + // Note that in a CSAR resource_file cannot be an absolute path. + if(UrlUtils.validateUrl(resourceFile)) { + String msg = String.format("URLException: The resource at \"%s\" cannot be accessed",resourceFile); + try { + if(UrlUtils.isUrlAccessible(resourceFile)) { + return; + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE226", msg)); + errorCaught = true; + } + } + catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE227", msg)); + } + } + + String dirPath = Paths.get(tplFile).getParent().toString(); + String filePath = tempDir + File.separator + dirPath + File.separator + resourceFile; + File f = new File(filePath); + if(f.isFile()) { + return; + } + + if(raiseExc) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE228", String.format( + "ValueError: The resource \"%s\" does not exist",resourceFile))); + } + errorCaught = true; + } + + private void unzip(String zipFilePath, String destDirectory) throws IOException { + File destDir = new File(destDirectory); + if (!destDir.exists()) { + destDir.mkdir(); + } + + try (ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFilePath));){ + ZipEntry entry = zipIn.getNextEntry(); + // iterates over entries in the zip file + while (entry != null) { + // create all directories needed for nested items + String[] parts = entry.getName().split("/"); + String s = destDirectory + File.separator ; + for(int i=0; i< parts.length-1; i++) { + s += parts[i]; + File idir = new File(s); + if(!idir.exists()) { + idir.mkdir(); + } + s += File.separator; + } + String filePath = destDirectory + File.separator + entry.getName(); + if (!entry.isDirectory()) { + // if the entry is a file, extracts it + extractFile(zipIn, filePath); + } else { + // if the entry is a directory, make the directory + File dir = new File(filePath); + dir.mkdir(); + } + zipIn.closeEntry(); + entry = zipIn.getNextEntry(); + } + } + } + + /** + * Extracts a zip entry (file entry) + * @param zipIn + * @param filePath + * @throws IOException + */ + private static final int BUFFER_SIZE = 4096; + + private void extractFile(ZipInputStream zipIn, String filePath) throws IOException { + //BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath)); + try (FileOutputStream fos = new FileOutputStream(filePath); + BufferedOutputStream bos = new BufferedOutputStream(fos);){ + byte[] bytesIn = new byte[BUFFER_SIZE]; + int read = 0; + while ((read = zipIn.read(bytesIn)) != -1) { + bos.write(bytesIn, 0, read); + } + } + } + +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import URLException +from toscaparser.common.exception import ValidationError +from toscaparser.imports import ImportsLoader +from toscaparser.utils.gettextutils import _ +from toscaparser.utils.urlutils import UrlUtils + +try: # Python 2.x + from BytesIO import BytesIO +except ImportError: # Python 3.x + from io import BytesIO + + +class CSAR(object): + + def __init__(self, csar_file, a_file=True): + self.path = csar_file + self.a_file = a_file + self.is_validated = False + self.error_caught = False + self.csar = None + self.temp_dir = None + + def validate(self): + """Validate the provided CSAR file.""" + + self.is_validated = True + + # validate that the file or URL exists + missing_err_msg = (_('"%s" does not exist.') % self.path) + if self.a_file: + if not os.path.isfile(self.path): + ValidationIssueCollector.appendException( + ValidationError(message=missing_err_msg)) + return False + else: + self.csar = self.path + else: # a URL + if not UrlUtils.validate_url(self.path): + ValidationIssueCollector.appendException( + ValidationError(message=missing_err_msg)) + return False + else: + response = requests.get(self.path) + self.csar = BytesIO(response.content) + + # validate that it is a valid zip file + if not zipfile.is_zipfile(self.csar): + err_msg = (_('"%s" is not a valid zip file.') % self.path) + ValidationIssueCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that it contains the metadata file in the correct location + self.zfile = zipfile.ZipFile(self.csar, 'r') + filelist = self.zfile.namelist() + if 'TOSCA-Metadata/TOSCA.meta' not in filelist: + err_msg = (_('"%s" is not a valid CSAR as it does not contain the ' + 'required file "TOSCA.meta" in the folder ' + '"TOSCA-Metadata".') % self.path) + ValidationIssueCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that 'Entry-Definitions' property exists in TOSCA.meta + data = self.zfile.read('TOSCA-Metadata/TOSCA.meta') + invalid_yaml_err_msg = (_('The file "TOSCA-Metadata/TOSCA.meta" in ' + 'the CSAR "%s" does not contain valid YAML ' + 'content.') % self.path) + try: + meta = yaml.load(data) + if type(meta) is dict: + self.metadata = meta + else: + ValidationIssueCollector.appendException( + ValidationError(message=invalid_yaml_err_msg)) + return False + except yaml.YAMLError: + ValidationIssueCollector.appendException( + ValidationError(message=invalid_yaml_err_msg)) + return False + + if 'Entry-Definitions' not in self.metadata: + err_msg = (_('The CSAR "%s" is missing the required metadata ' + '"Entry-Definitions" in ' + '"TOSCA-Metadata/TOSCA.meta".') + % self.path) + ValidationIssueCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that 'Entry-Definitions' metadata value points to an + # existing file in the CSAR + entry = self.metadata.get('Entry-Definitions') + if entry and entry not in filelist: + err_msg = (_('The "Entry-Definitions" file defined in the ' + 'CSAR "%s" does not exist.') % self.path) + ValidationIssueCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that external references in the main template actually + # exist and are accessible + self._validate_external_references() + return not self.error_caught + + def get_metadata(self): + """Return the metadata dictionary.""" + + # validate the csar if not already validated + if not self.is_validated: + self.validate() + + # return a copy to avoid changes overwrite the original + return dict(self.metadata) if self.metadata else None + + def _get_metadata(self, key): + if not self.is_validated: + self.validate() + return self.metadata.get(key) + + def get_author(self): + return self._get_metadata('Created-By') + + def get_version(self): + return self._get_metadata('CSAR-Version') + + def get_main_template(self): + entry_def = self._get_metadata('Entry-Definitions') + if entry_def in self.zfile.namelist(): + return entry_def + + def get_main_template_yaml(self): + main_template = self.get_main_template() + if main_template: + data = self.zfile.read(main_template) + invalid_tosca_yaml_err_msg = ( + _('The file "%(template)s" in the CSAR "%(csar)s" does not ' + 'contain valid TOSCA YAML content.') % + {'template': main_template, 'csar': self.path}) + try: + tosca_yaml = yaml.load(data) + if type(tosca_yaml) is not dict: + ValidationIssueCollector.appendException( + ValidationError(message=invalid_tosca_yaml_err_msg)) + return tosca_yaml + except Exception: + ValidationIssueCollector.appendException( + ValidationError(message=invalid_tosca_yaml_err_msg)) + + def get_description(self): + desc = self._get_metadata('Description') + if desc is not None: + return desc + + self.metadata['Description'] = \ + self.get_main_template_yaml().get('description') + return self.metadata['Description'] + + def decompress(self): + if not self.is_validated: + self.validate() + self.temp_dir = tempfile.NamedTemporaryFile().name + with zipfile.ZipFile(self.csar, "r") as zf: + zf.extractall(self.temp_dir) + + def _validate_external_references(self): + """Extracts files referenced in the main template + + These references are currently supported: + * imports + * interface implementations + * artifacts + """ + try: + self.decompress() + main_tpl_file = self.get_main_template() + if not main_tpl_file: + return + main_tpl = self.get_main_template_yaml() + + if 'imports' in main_tpl: + ImportsLoader(main_tpl['imports'], + os.path.join(self.temp_dir, main_tpl_file)) + + if 'topology_template' in main_tpl: + topology_template = main_tpl['topology_template'] + + if 'node_templates' in topology_template: + node_templates = topology_template['node_templates'] + + for node_template_key in node_templates: + node_template = node_templates[node_template_key] + if 'artifacts' in node_template: + artifacts = node_template['artifacts'] + for artifact_key in artifacts: + artifact = artifacts[artifact_key] + if isinstance(artifact, six.string_types): + self._validate_external_reference( + main_tpl_file, + artifact) + elif isinstance(artifact, dict): + if 'file' in artifact: + self._validate_external_reference( + main_tpl_file, + artifact['file']) + else: + ValidationIssueCollector.appendException( + ValueError(_('Unexpected artifact ' + 'definition for "%s".') + % artifact_key)) + self.error_caught = True + if 'interfaces' in node_template: + interfaces = node_template['interfaces'] + for interface_key in interfaces: + interface = interfaces[interface_key] + for opertation_key in interface: + operation = interface[opertation_key] + if isinstance(operation, six.string_types): + self._validate_external_reference( + main_tpl_file, + operation, + False) + elif isinstance(operation, dict): + if 'implementation' in operation: + self._validate_external_reference( + main_tpl_file, + operation['implementation']) + finally: + if self.temp_dir: + shutil.rmtree(self.temp_dir) + + def _validate_external_reference(self, tpl_file, resource_file, + raise_exc=True): + """Verify that the external resource exists + + If resource_file is a URL verify that the URL is valid. + If resource_file is a relative path verify that the path is valid + considering base folder (self.temp_dir) and tpl_file. + Note that in a CSAR resource_file cannot be an absolute path. + """ + if UrlUtils.validate_url(resource_file): + msg = (_('The resource at "%s" cannot be accessed.') % + resource_file) + try: + if UrlUtils.url_accessible(resource_file): + return + else: + ValidationIssueCollector.appendException( + URLException(what=msg)) + self.error_caught = True + except Exception: + ValidationIssueCollector.appendException( + URLException(what=msg)) + self.error_caught = True + + if os.path.isfile(os.path.join(self.temp_dir, + os.path.dirname(tpl_file), + resource_file)): + return + + if raise_exc: + ValidationIssueCollector.appendException( + ValueError(_('The resource "%s" does not exist.') + % resource_file)) + self.error_caught = True +*/ + + diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java new file mode 100644 index 0000000..55e9ba1 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java @@ -0,0 +1,29 @@ +package org.onap.sdc.toscaparser.api.utils; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public class CopyUtils { + + @SuppressWarnings("unchecked") + public static Object copyLhmOrAl(Object src) { + if(src instanceof LinkedHashMap) { + LinkedHashMap dst = new LinkedHashMap(); + for(Map.Entry me: ((LinkedHashMap)src).entrySet()) { + dst.put(me.getKey(),me.getValue()); + } + return dst; + } + else if(src instanceof ArrayList) { + ArrayList dst = new ArrayList(); + for(Object o: (ArrayList)src) { + dst.add(o); + } + return dst; + } + else { + return null; + } + } +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java new file mode 100644 index 0000000..f23e1c6 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java @@ -0,0 +1,55 @@ +package org.onap.sdc.toscaparser.api.utils; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public class DumpUtils { + + @SuppressWarnings("unchecked") + public static void dumpYaml(Object yo,int level) { + final String indent = " "; + try { + if(yo == null) { + System.out.println(""); + return; + } + String cname = yo.getClass().getSimpleName(); + System.out.print(cname); + if(cname.equals("LinkedHashMap")) { + LinkedHashMap lhm = (LinkedHashMap)yo; + System.out.println(); + for(Map.Entry me: lhm.entrySet()) { + System.out.print(indent.substring(0,level) + me.getKey() + ": "); + dumpYaml(me.getValue(),level+2); + } + } + else if(cname.equals("ArrayList")) { + ArrayList al = (ArrayList)yo; + System.out.println(); + for (int i=0; i \"" + (String)yo + "\""); + } + else if(cname.equals("Integer")) { + System.out.println(" ==> " + (int)yo); + } + else if(cname.equals("Boolean")) { + System.out.println(" ==> " + (boolean)yo); + } + else if(cname.equals("Double")) { + System.out.println(" ==> " + (double)yo); + } + else { + System.out.println(" !! unexpected type"); + } + } + catch(Exception e) { + System.out.println("Exception!! " + e.getMessage()); + } + } +} \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java new file mode 100644 index 0000000..3abd3b1 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java @@ -0,0 +1,32 @@ +package org.onap.sdc.toscaparser.api.utils; + + +public enum JToscaErrorCodes { + MISSING_META_FILE("JE1001"), + INVALID_META_YAML_CONTENT("JE1002"), + ENTRY_DEFINITION_NOT_DEFINED("JE1003"), + MISSING_ENTRY_DEFINITION_FILE ("JE1004"), + GENERAL_ERROR("JE1005"), + PATH_NOT_VALID("JE1006"), + CSAR_TOSCA_VALIDATION_ERROR("JE1007"), + INVALID_CSAR_FORMAT("JE1008"); + + private String value; + + private JToscaErrorCodes(String value) { + this.value = value; + } + + public String getValue() { + return value; + } + + public static JToscaErrorCodes getByCode(String code) { + for(JToscaErrorCodes v : values()){ + if( v.getValue().equals(code)){ + return v; + } + } + return null; + } +} \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java new file mode 100644 index 0000000..caba044 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java @@ -0,0 +1,182 @@ +package org.onap.sdc.toscaparser.api.utils; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class TOSCAVersionProperty {// test with functions/test_concat.yaml + + private String version; + + private static final String versionRe = + "^(?([0-9][0-9]*))" + + "(\\.(?([0-9][0-9]*)))?" + + "(\\.(?([0-9][0-9]*)))?" + + "(\\.(?([0-9A-Za-z]+)))?" + + "(\\-(?[0-9])*)?$"; + + private String minorVersion = null; + private String majorVersion = null; + private String fixVersion = null; + private String qualifier = null; + private String buildVersion = null; + + + public TOSCAVersionProperty(Object _version) { + version = _version.toString(); + + if(version.equals("0") || version.equals("0.0") || version.equals("0.0.0")) { + //log.warning(_('Version assumed as not provided')) + version = ""; + return; + } + + Pattern pattern = Pattern.compile(versionRe); + Matcher matcher = pattern.matcher(version); + if(!matcher.find()) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE252", String.format( + "InvalidTOSCAVersionPropertyException: " + + "Value of TOSCA version property \"%s\" is invalid", + version))); + return; + } + minorVersion = matcher.group("gMinorVersion"); + majorVersion = matcher.group("gMajorVersion"); + fixVersion = matcher.group("gFixVersion"); + qualifier = _validateQualifier(matcher.group("gQualifier")); + buildVersion = _validateBuild(matcher.group("gBuildVersion")); + _validateMajorVersion(majorVersion); + + } + + private String _validateMajorVersion(String value) { + // Validate major version + + // Checks if only major version is provided and assumes + // minor version as 0. + // Eg: If version = 18, then it returns version = '18.0' + + if(minorVersion == null && buildVersion == null && !value.equals("0")) { + //log.warning(_('Minor version assumed "0".')) + version = version + "0"; + } + return value; + } + + private String _validateQualifier(String value) { + // Validate qualifier + + // TOSCA version is invalid if a qualifier is present without the + // fix version or with all of major, minor and fix version 0s. + + // For example, the following versions are invalid + // 18.0.abc + // 0.0.0.abc + + if((fixVersion == null && value != null) || + (minorVersion.equals("0") && majorVersion.equals("0") && + fixVersion.equals("0") && value != null)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE253", String.format( + "InvalidTOSCAVersionPropertyException: " + + "Value of TOSCA version property \"%s\" is invalid", + version))); + } + return value; + } + + private String _validateBuild(String value) { + // Validate build version + + // TOSCA version is invalid if build version is present without the qualifier. + // Eg: version = 18.0.0-1 is invalid. + + if(qualifier == null && value != null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE254", String.format( + "InvalidTOSCAVersionPropertyException: " + + "Value of TOSCA version property \"%s\" is invalid", + version))); + } + return value; + } + + public Object getVersion() { + return version; + } + +} + +/*python + +class TOSCAVersionProperty(object): + + VERSION_RE = re.compile('^(?P([0-9][0-9]*))' + '(\.(?P([0-9][0-9]*)))?' + '(\.(?P([0-9][0-9]*)))?' + '(\.(?P([0-9A-Za-z]+)))?' + '(\-(?P[0-9])*)?$') + + def __init__(self, version): + self.version = str(version) + match = self.VERSION_RE.match(self.version) + if not match: + ValidationIssueCollector.appendException( + InvalidTOSCAVersionPropertyException(what=(self.version))) + return + ver = match.groupdict() + if self.version in ['0', '0.0', '0.0.0']: + log.warning(_('Version assumed as not provided')) + self.version = None + self.minor_version = ver['minor_version'] + self.major_version = ver['major_version'] + self.fix_version = ver['fix_version'] + self.qualifier = self._validate_qualifier(ver['qualifier']) + self.build_version = self._validate_build(ver['build_version']) + self._validate_major_version(self.major_version) + + def _validate_major_version(self, value): + """Validate major version + + Checks if only major version is provided and assumes + minor version as 0. + Eg: If version = 18, then it returns version = '18.0' + """ + + if self.minor_version is None and self.build_version is None and \ + value != '0': + log.warning(_('Minor version assumed "0".')) + self.version = '.'.join([value, '0']) + return value + + def _validate_qualifier(self, value): + """Validate qualifier + + TOSCA version is invalid if a qualifier is present without the + fix version or with all of major, minor and fix version 0s. + + For example, the following versions are invalid + 18.0.abc + 0.0.0.abc + """ + if (self.fix_version is None and value) or \ + (self.minor_version == self.major_version == + self.fix_version == '0' and value): + ValidationIssueCollector.appendException( + InvalidTOSCAVersionPropertyException(what=(self.version))) + return value + + def _validate_build(self, value): + """Validate build version + + TOSCA version is invalid if build version is present without the + qualifier. + Eg: version = 18.0.0-1 is invalid. + """ + if not self.qualifier and value: + ValidationIssueCollector.appendException( + InvalidTOSCAVersionPropertyException(what=(self.version))) + return value + + def get_version(self): + return self.version +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java new file mode 100644 index 0000000..8a04c0d --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java @@ -0,0 +1,24 @@ +package org.onap.sdc.toscaparser.api.utils; + +import org.onap.sdc.toscaparser.api.common.ValidationIssueCollector; + +public class ThreadLocalsHolder { + + private static final ThreadLocal exceptionCollectorThreadLocal = new ThreadLocal<>(); + + private ThreadLocalsHolder(){} + + public static ValidationIssueCollector getCollector() { + return exceptionCollectorThreadLocal.get(); + } + + public static void setCollector(ValidationIssueCollector validationIssueCollector) { + cleanup(); + exceptionCollectorThreadLocal.set(validationIssueCollector); + } + + public static void cleanup(){ + exceptionCollectorThreadLocal.remove(); + } + +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java new file mode 100644 index 0000000..3eb156d --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java @@ -0,0 +1,123 @@ +package org.onap.sdc.toscaparser.api.utils; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URL; + +public class UrlUtils { + + public static boolean validateUrl(String sUrl) { + // Validates whether the given path is a URL or not + + // If the given path includes a scheme (http, https, ftp, ...) and a net + // location (a domain name such as www.github.com) it is validated as a URL + try { + URL url = new URL(sUrl); + if(url.getProtocol().equals("file")) { + return true; + } + return url.getAuthority() != null; + } + catch(MalformedURLException e) { + return false; + } + } + + public static String joinUrl(String sUrl,String relativePath) { + // Builds a new URL from the given URL and the relative path + + // Example: + // url: http://www.githib.com/openstack/heat + // relative_path: heat-translator + // - joined: http://www.githib.com/openstack/heat-translator + if(!validateUrl(sUrl)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE255", String.format( + "ValueError: The URL \"%s\" is malformed",sUrl))); + } + try { + URL base = new URL(sUrl); + return (new URL(base,relativePath)).toString(); + } + catch(MalformedURLException e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE256", String.format( + "ValueError: Joining URL \"%s\" and relative path \"%s\" caused an exception",sUrl,relativePath))); + return sUrl; + } + } + + public static boolean isUrlAccessible(String sUrl) { + // Validates whether the given URL is accessible + + // Returns true if the get call returns a 200 response code. + // Otherwise, returns false. + try { + HttpURLConnection connection = (HttpURLConnection) new URL(sUrl).openConnection(); + connection.setRequestMethod("HEAD"); + int responseCode = connection.getResponseCode(); + return responseCode == 200; + } + catch(IOException e) { + return false; + } + } + +} + +/*python + +from six.moves.urllib.parse import urljoin +from six.moves.urllib.parse import urlparse +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.utils.gettextutils import _ + +try: + # Python 3.x + import urllib.request as urllib2 +except ImportError: + # Python 2.x + import urllib2 + + +class UrlUtils(object): + + @staticmethod + def validate_url(path): + """Validates whether the given path is a URL or not. + + If the given path includes a scheme (http, https, ftp, ...) and a net + location (a domain name such as www.github.com) it is validated as a + URL. + """ + parsed = urlparse(path) + if parsed.scheme == 'file': + # If the url uses the file scheme netloc will be "" + return True + else: + return bool(parsed.scheme) and bool(parsed.netloc) + + @staticmethod + def join_url(url, relative_path): + """Builds a new URL from the given URL and the relative path. + + Example: + url: http://www.githib.com/openstack/heat + relative_path: heat-translator + - joined: http://www.githib.com/openstack/heat-translator + """ + if not UrlUtils.validate_url(url): + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a valid URL.') % url)) + return urljoin(url, relative_path) + + @staticmethod + def url_accessible(url): + """Validates whether the given URL is accessible. + + Returns true if the get call returns a 200 response code. + Otherwise, returns false. + """ + return urllib2.urlopen(url).getcode() == 200 +*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java new file mode 100644 index 0000000..6c26f18 --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java @@ -0,0 +1,425 @@ +package org.onap.sdc.toscaparser.api.utils; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.ArrayList; +import java.util.Date; +import java.util.LinkedHashMap; + +public class ValidateUtils { + + private static final String RANGE_UNBOUNDED = "UNBOUNDED"; + + public static Object strToNum(Object value) { + // Convert a string representation of a number into a numeric type + // tODO(TBD) we should not allow numeric values in, input should be str + if(value instanceof Number) { + return value; + } + if(!(value instanceof String)) { + + } + try { + return Integer.parseInt((String)value); + } + catch(NumberFormatException e) { + } + try { + return Float.parseFloat((String)value); + } + catch(Exception e) { + } + return null; + } + + public static Object validateNumeric(Object value) { + if(value != null) { + if (!(value instanceof Number)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE257", String.format( + "ValueError: \"%s\" is not a numeric",value.toString()))); + } + } + return value; + } + + public static Object validateInteger(Object value) { + if(value != null) { + if (!(value instanceof Integer)) { + // allow "true" and "false" + if (value instanceof Boolean) { + return (Boolean) value ? 1 : 0; + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE258", String.format( + "ValueError: \"%s\" is not an integer",value.toString()))); + } + } + return value; + } + + public static Object validateFloat(Object value) { + if(value != null) { + if (!(value instanceof Float || value instanceof Double)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE259", String.format( + "ValueError: \"%s\" is not a float",value.toString()))); + } + } + return value; + } + + public static Object validateString(Object value) { + if(value != null) { + if (!(value instanceof String)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE260", String.format( + "ValueError: \'%s\' is not a string",value.toString()))); + } + } + return value; + } + + public static Object validateList(Object value) { + if(value != null) { + if (!(value instanceof ArrayList)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE261", String.format( + "ValueError: \"%s\" is not a list",value.toString()))); + } + } + return value; + } + + + @SuppressWarnings("unchecked") + public static Object validateRange(Object range) { + // list class check + validateList(range); + // validate range list has a min and max + if(range instanceof ArrayList && ((ArrayList)range).size() != 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE262", String.format( + "ValueError: \"%s\" is not a valid range",range.toString()))); + // too dangerous to continue... + return range; + } + // validate min and max are numerics or the keyword UNBOUNDED + boolean minTest = false; + boolean maxTest = false; + Object r0 = ((ArrayList)range).get(0); + Object r1 = ((ArrayList)range).get(1); + + if(!(r0 instanceof Integer) && !(r0 instanceof Float) || + !(r1 instanceof Integer) && !(r1 instanceof Float)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE263", String.format( + "ValueError: \"%s\" is not a valid range",range.toString()))); + // too dangerous to continue... + return range; + } + + Float min = 0.0F; + Float max = 0.0F; + if(r0 instanceof String && ((String)r0).equals(RANGE_UNBOUNDED)) { + minTest = true; + } + else { + min = r0 instanceof Integer ? ((Integer)r0).floatValue() : (Float)r0; + } + if(r1 instanceof String && ((String)r1).equals(RANGE_UNBOUNDED)) { + maxTest = true; + } + else { + max = r1 instanceof Integer ? ((Integer)r1).floatValue() : (Float)r1; + } + + // validate the max > min (account for UNBOUNDED) + if(!minTest && !maxTest) { + // Note: min == max is allowed + if(min > max) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE264", String.format( + "ValueError:\"%s\" is not a valid range",range.toString()))); + } + } + return range; + } + + @SuppressWarnings("unchecked") + public static Object validateValueInRange(Object value,Object range,String propName) { + // verify all 3 are numeric and convert to Floats + if(!(value instanceof Integer || value instanceof Float)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE265", String.format( + "ValueError: validateInRange: \"%s\" is not a number",range.toString()))); + return value; + } + Float fval = value instanceof Integer ? ((Integer)value).floatValue() : (Float)value; + + ////////////////////////// + //"validateRange(range);" + ////////////////////////// + // better safe than sorry... + // validate that range list has a min and max + if(range instanceof ArrayList && ((ArrayList)range).size() != 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE266", String.format( + "ValueError: \"%s\" is not a valid range",range.toString()))); + // too dangerous to continue... + return value; + } + // validate min and max are numerics or the keyword UNBOUNDED + boolean minTest = false; + boolean maxTest = false; + Object r0 = ((ArrayList)range).get(0); + Object r1 = ((ArrayList)range).get(1); + + if(!(r0 instanceof Integer) && !(r0 instanceof Float) || + !(r1 instanceof Integer) && !(r1 instanceof Float)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE267", String.format( + "ValueError: \"%s\" is not a valid range",range.toString()))); + // too dangerous to continue... + return value; + } + + Float min = 0.0F; + Float max = 0.0F; + if(r0 instanceof String && ((String)r0).equals(RANGE_UNBOUNDED)) { + minTest = true; + } + else { + min = r0 instanceof Integer ? ((Integer)r0).floatValue() : (Float)r0; + } + if(r1 instanceof String && ((String)r1).equals(RANGE_UNBOUNDED)) { + maxTest = true; + } + else { + max = r1 instanceof Integer ? ((Integer)r1).floatValue() : (Float)r1; + } + + // validate the max > min (account for UNBOUNDED) + if(!minTest && !maxTest) { + // Note: min == max is allowed + if(min > max) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE268", String.format( + "ValueError:\"%s\" is not a valid range",range.toString()))); + } + } + // finally... + boolean bError = false; + //Note: value is valid if equal to min + if(!minTest) { + if(fval < min) { + bError = true; + } + } + // Note: value is valid if equal to max + if(!maxTest) { + if(fval > max) { + bError = true; + } + } + if(bError) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE269", String.format( + "RangeValueError: Property \"%s\", \"%s\" not in range [\"%s\" - \"%s\"", + propName,value.toString(),r0.toString(),r1.toString()))); + } + return value; + } + + public static Object validateMap(Object ob) { + if(ob != null) { + if (!(ob instanceof LinkedHashMap)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE270", String.format( + "ValueError\"%s\" is not a map.",ob.toString()))); + } + } + return ob; + } + + public static Object validateBoolean(Object value) { + if(value != null) { + if (value instanceof Boolean) { + return value; + } + if (value instanceof String) { + String normalized = ((String) value).toLowerCase(); + if (normalized.equals("true") || normalized.equals("false")) { + return normalized.equals("true"); + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE271", String.format( + "ValueError: \"%s\" is not a boolean",value.toString()))); + } + return value; + } + + public static Object validateTimestamp(Object value) { + /* + try: + # Note: we must return our own exception message + # as dateutil's parser returns different types / values on + # different systems. OSX, for example, returns a tuple + # containing a different error message than Linux + dateutil.parser.parse(value) + except Exception as e: + original_err_msg = str(e) + log.error(original_err_msg) + ValidationIssueCollector.appendException( + ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') % + {'val': value, 'msg': original_err_msg})) + */ + + // timestamps are loaded as Date objects by the YAML parser + if(value != null) { + if (!(value instanceof Date)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE272", String.format( + "ValueError: \"%s\" is not a valid timestamp", + value.toString()))); + + } + } + return value; + } + +} + +/*python + +from toscaparser.elements import constraints +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidTOSCAVersionPropertyException +from toscaparser.common.exception import RangeValueError +from toscaparser.utils.gettextutils import _ + +log = logging.getLogger('tosca') + +RANGE_UNBOUNDED = 'UNBOUNDED' + + +def str_to_num(value): + '''Convert a string representation of a number into a numeric type.''' + # tODO(TBD) we should not allow numeric values in, input should be str + if isinstance(value, numbers.Number): + return value + try: + return int(value) + except ValueError: + return float(value) + + +def validate_numeric(value): + if not isinstance(value, numbers.Number): + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a numeric.') % value)) + return value + + +def validate_integer(value): + if not isinstance(value, int): + try: + value = int(value) + except Exception: + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not an integer.') % value)) + return value + + +def validate_float(value): + if not isinstance(value, float): + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a float.') % value)) + return value + + +def validate_string(value): + if not isinstance(value, six.string_types): + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a string.') % value)) + return value + + +def validate_list(value): + if not isinstance(value, list): + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a list.') % value)) + return value + + +def validate_range(range): + # list class check + validate_list(range) + # validate range list has a min and max + if len(range) != 2: + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a valid range.') % range)) + # validate min and max are numerics or the keyword UNBOUNDED + min_test = max_test = False + if not range[0] == RANGE_UNBOUNDED: + min = validate_numeric(range[0]) + else: + min_test = True + if not range[1] == RANGE_UNBOUNDED: + max = validate_numeric(range[1]) + else: + max_test = True + # validate the max > min (account for UNBOUNDED) + if not min_test and not max_test: + # Note: min == max is allowed + if min > max: + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a valid range.') % range)) + + return range + + +def validate_value_in_range(value, range, prop_name): + validate_numeric(value) + validate_range(range) + + # Note: value is valid if equal to min + if range[0] != RANGE_UNBOUNDED: + if value < range[0]: + ValidationIssueCollector.appendException( + RangeValueError(pname=prop_name, + pvalue=value, + vmin=range[0], + vmax=range[1])) + # Note: value is valid if equal to max + if range[1] != RANGE_UNBOUNDED: + if value > range[1]: + ValidationIssueCollector.appendException( + RangeValueError(pname=prop_name, + pvalue=value, + vmin=range[0], + vmax=range[1])) + return value + + +def validate_map(value): + if not isinstance(value, collections.Mapping): + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a map.') % value)) + return value + + +def validate_boolean(value): + if isinstance(value, bool): + return value + + if isinstance(value, str): + normalised = value.lower() + if normalised in ['true', 'false']: + return normalised == 'true' + + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a boolean.') % value)) + + +def validate_timestamp(value): + try: + # Note: we must return our own exception message + # as dateutil's parser returns different types / values on + # different systems. OSX, for example, returns a tuple + # containing a different error message than Linux + dateutil.parser.parse(value) + except Exception as e: + original_err_msg = str(e) + log.error(original_err_msg) + ValidationIssueCollector.appendException( + ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') % + {'val': value, 'msg': original_err_msg})) + return + +*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignment.java b/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignment.java deleted file mode 100644 index f3bc2bd..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignment.java +++ /dev/null @@ -1,148 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.openecomp.sdc.toscaparser.api.elements.CapabilityTypeDef; -import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; - -public class CapabilityAssignment { - - private String name; - private LinkedHashMap _properties; - private CapabilityTypeDef _definition; - private LinkedHashMap _customDef; - - public CapabilityAssignment(String cname, - LinkedHashMap cproperties, - CapabilityTypeDef cdefinition, LinkedHashMap customDef) { - name = cname; - _properties = cproperties; - _definition = cdefinition; - _customDef = customDef; - } - - /** - * Get the properties list for capability - * @return list of property objects for capability - */ - public ArrayList getPropertiesObjects() { - // Return a list of property objects - ArrayList properties = new ArrayList(); - LinkedHashMap props = _properties; - if(props != null) { - for(Map.Entry me: props.entrySet()) { - String pname = me.getKey(); - Object pvalue = me.getValue(); - - LinkedHashMap propsDef = _definition.getPropertiesDef(); - if(propsDef != null) { - PropertyDef pd = (PropertyDef)propsDef.get(pname); - if(pd != null) { - properties.add(new Property(pname,pvalue,pd.getSchema(), _customDef)); - } - } - } - } - return properties; - } - - /** - * Get the map of properties - * @return map of all properties contains dictionary of property name and property object - */ - public LinkedHashMap getProperties() { - // Return a dictionary of property name-object pairs - LinkedHashMap npps = new LinkedHashMap<>(); - for(Property p: getPropertiesObjects()) { - npps.put(p.getName(),p); - } - return npps; - } - - /** - * Get the property value by name - * @param pname - the property name for capability - * @return the property value for this name - */ - public Object getPropertyValue(String pname) { - // Return the value of a given property name - LinkedHashMap props = getProperties(); - if(props != null && props.get(pname) != null) { - return props.get(name).getValue(); - } - return null; - } - - /** - * Get the name for capability - * @return the name for capability - */ - public String getName() { - return name; - } - - /** - * Get the definition for capability - * @return CapabilityTypeDef - contain definition for capability - */ - public CapabilityTypeDef getDefinition() { - return _definition; - } - - /** - * Set the property for capability - * @param pname - the property name for capability to set - * @param pvalue - the property valiue for capability to set - */ - public void setProperty(String pname,Object pvalue) { - _properties.put(pname,pvalue); - } - - @Override - public String toString() { - return "CapabilityAssignment{" + - "name='" + name + '\'' + - ", _properties=" + _properties + - ", _definition=" + _definition + - '}'; - } -} - -/*python - -from toscaparser.properties import Property - - -class CapabilityAssignment(object): - '''TOSCA built-in capabilities type.''' - - def __init__(self, name, properties, definition): - self.name = name - self._properties = properties - self.definition = definition - - def get_properties_objects(self): - '''Return a list of property objects.''' - properties = [] - props = self._properties - if props: - for name, value in props.items(): - props_def = self.definition.get_properties_def() - if props_def and name in props_def: - properties.append(Property(name, value, - props_def[name].schema)) - return properties - - def get_properties(self): - '''Return a dictionary of property name-object pairs.''' - return {prop.name: prop - for prop in self.get_properties_objects()} - - def get_property_value(self, name): - '''Return the value of a given property name.''' - props = self.get_properties() - if props and name in props: - return props[name].value -*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignments.java b/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignments.java deleted file mode 100644 index 3397960..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/CapabilityAssignments.java +++ /dev/null @@ -1,51 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import org.openecomp.sdc.toscaparser.api.CapabilityAssignment; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -public class CapabilityAssignments { - - private Map capabilityAssignments; - - public CapabilityAssignments(Map capabilityAssignments) { - this.capabilityAssignments = capabilityAssignments != null ? new HashMap<>(capabilityAssignments) : new HashMap<>(); - } - - /** - * Get all capability assignments for node template.
- * This object can be either the original one, holding all capability assignments for this node template,or a filtered one, holding a filtered subset.
- * @return list of capability assignments for the node template.
- * If there are no capability assignments, empty list is returned. - */ - public List getAll() { - return new ArrayList<>(capabilityAssignments.values()); - } - - /** - * Filter capability assignments by capability tosca type. - * @param type - The tosca type of capability assignments. - * @return CapabilityAssignments object, containing capability assignments of this type.
- * If no such found, filtering will result in an empty collection. - */ - public CapabilityAssignments getCapabilitiesByType(String type) { - Map capabilityAssignmentsMap = capabilityAssignments.entrySet().stream() - .filter(cap -> cap.getValue().getDefinition().getType().equals(type)).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - - return new CapabilityAssignments(capabilityAssignmentsMap); - } - - /** - * Get capability assignment by capability name. - * @param name - The name of capability assignment - * @return capability assignment with this name, or null if no such capability assignment was found. - */ - public CapabilityAssignment getCapabilityByName(String name) { - return capabilityAssignments.get(name); - } - -} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java b/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java deleted file mode 100644 index 08e154f..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java +++ /dev/null @@ -1,449 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.openecomp.sdc.toscaparser.api.elements.*; -import org.openecomp.sdc.toscaparser.api.elements.constraints.Constraint; -import org.openecomp.sdc.toscaparser.api.elements.constraints.Schema; -import org.openecomp.sdc.toscaparser.api.functions.Function; -import org.openecomp.sdc.toscaparser.api.utils.TOSCAVersionProperty; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; - -public class DataEntity { - // A complex data value entity - - private LinkedHashMap customDef; - private DataType dataType; - private LinkedHashMap schema; - private Object value; - private String propertyName; - - public DataEntity(String _dataTypeName,Object _valueDict, - LinkedHashMap _customDef,String _propName) { - - customDef = _customDef; - dataType = new DataType(_dataTypeName,_customDef); - schema = dataType.getAllProperties(); - value = _valueDict; - propertyName = _propName; - } - - @SuppressWarnings("unchecked") - public Object validate() { - // Validate the value by the definition of the datatype - - // A datatype can not have both 'type' and 'properties' definitions. - // If the datatype has 'type' definition - if(dataType.getValueType() != null) { - value = DataEntity.validateDatatype(dataType.getValueType(),value,null,customDef,null); - Schema schemaCls = new Schema(propertyName,dataType.getDefs()); - for(Constraint constraint: schemaCls.getConstraints()) { - constraint.validate(value); - } - } - // If the datatype has 'properties' definition - else { - if(!(value instanceof LinkedHashMap)) { - //ERROR under investigation - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE001", String.format( - "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"", - value.toString(),dataType.getType()))); - - if (value instanceof List && ((List) value).size() > 0) { - value = ((List) value).get(0); - } - - if (!(value instanceof LinkedHashMap)) { - return value; - } - } - - - - LinkedHashMap valueDict = (LinkedHashMap)value; - ArrayList allowedProps = new ArrayList<>(); - ArrayList requiredProps = new ArrayList<>(); - LinkedHashMap defaultProps = new LinkedHashMap<>(); - if(schema != null) { - allowedProps.addAll(schema.keySet()); - for(String name: schema.keySet()) { - PropertyDef propDef = schema.get(name); - if(propDef.isRequired()) { - requiredProps.add(name); - } - if(propDef.getDefault() != null) { - defaultProps.put(name,propDef.getDefault()); - } - } - } - - // check allowed field - for(String valueKey: valueDict.keySet()) { - //1710 devlop JSON validation - if(!("json").equals(dataType.getType()) && !allowedProps.contains(valueKey)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE100", String.format( - "UnknownFieldError: Data value of type \"%s\" contains unknown field \"%s\"", - dataType.getType(),valueKey))); - } - } - - // check default field - for(String defKey: defaultProps.keySet()) { - Object defValue = defaultProps.get(defKey); - if(valueDict.get(defKey) == null) { - valueDict.put(defKey, defValue); - } - - } - - // check missing field - ArrayList missingProp = new ArrayList<>(); - for(String reqKey: requiredProps) { - if(!valueDict.keySet().contains(reqKey)) { - missingProp.add(reqKey); - } - } - if(missingProp.size() > 0) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003",String.format( - "MissingRequiredFieldError: Data value of type \"%s\" is missing required field(s) \"%s\"", - dataType.getType(),missingProp.toString()))); - } - - // check every field - for(String vname: valueDict.keySet()) { - Object vvalue = valueDict.get(vname); - LinkedHashMap schemaName = _findSchema(vname); - if(schemaName == null) { - continue; - } - Schema propSchema = new Schema(vname,schemaName); - // check if field value meets type defined - DataEntity.validateDatatype(propSchema.getType(), - vvalue, - propSchema.getEntrySchema(), - customDef, - null); - - // check if field value meets constraints defined - if(propSchema.getConstraints() != null) { - for(Constraint constraint: propSchema.getConstraints()) { - if(vvalue instanceof ArrayList) { - for(Object val: (ArrayList)vvalue) { - constraint.validate(val); - } - } - else { - constraint.validate(vvalue); - } - } - } - } - } - return value; - } - - private LinkedHashMap _findSchema(String name) { - if(schema != null && schema.get(name) != null) { - return schema.get(name).getSchema(); - } - return null; - } - - public static Object validateDatatype(String type, - Object value, - LinkedHashMap entrySchema, - LinkedHashMap customDef, - String propName) { - // Validate value with given type - - // If type is list or map, validate its entry by entry_schema(if defined) - // If type is a user-defined complex datatype, custom_def is required. - - if(Function.isFunction(value)) { - return value; - } - else if (type == null) { - //NOT ANALYZED - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE002", String.format( - "MissingType: Type is missing for value \"%s\"", - value.toString()))); - return value; - } - else if(type.equals(Schema.STRING)) { - return ValidateUtils.validateString(value); - } - else if(type.equals(Schema.INTEGER)) { - return ValidateUtils.validateInteger(value); - } - else if(type.equals(Schema.FLOAT)) { - return ValidateUtils.validateFloat(value); - } - else if(type.equals(Schema.NUMBER)) { - return ValidateUtils.validateNumeric(value); - } - else if(type.equals(Schema.BOOLEAN)) { - return ValidateUtils.validateBoolean(value); - } - else if(type.equals(Schema.RANGE)) { - return ValidateUtils.validateRange(value); - } - else if(type.equals(Schema.TIMESTAMP)) { - ValidateUtils.validateTimestamp(value); - return value; - } - else if(type.equals(Schema.LIST)) { - ValidateUtils.validateList(value); - if(entrySchema != null) { - DataEntity.validateEntry(value,entrySchema,customDef); - } - return value; - } - else if(type.equals(Schema.SCALAR_UNIT_SIZE)) { - return (new ScalarUnitSize(value)).validateScalarUnit(); - } - else if(type.equals(Schema.SCALAR_UNIT_FREQUENCY)) { - return (new ScalarUnitFrequency(value)).validateScalarUnit(); - } - else if(type.equals(Schema.SCALAR_UNIT_TIME)) { - return (new ScalarUnitTime(value)).validateScalarUnit(); - } - else if(type.equals(Schema.VERSION)) { - return (new TOSCAVersionProperty(value)).getVersion(); - } - else if(type.equals(Schema.MAP)) { - ValidateUtils.validateMap(value); - if(entrySchema != null) { - DataEntity.validateEntry(value,entrySchema,customDef); - } - return value; - } - else if(type.equals(Schema.PORTSPEC)) { - // tODO(TBD) bug 1567063, validate source & target as PortDef type - // as complex types not just as integers - PortSpec.validateAdditionalReq(value,propName,customDef); - } - else { - DataEntity data = new DataEntity(type,value,customDef,null); - return data.validate(); - } - - return value; - } - - @SuppressWarnings("unchecked") - public static Object validateEntry(Object value, - LinkedHashMap entrySchema, - LinkedHashMap customDef) { - - // Validate entries for map and list - Schema schema = new Schema(null,entrySchema); - Object valueob = value; - ArrayList valueList = null; - if(valueob instanceof LinkedHashMap) { - valueList = new ArrayList(((LinkedHashMap)valueob).values()); - } - else if(valueob instanceof ArrayList) { - valueList = (ArrayList)valueob; - } - if(valueList != null) { - for(Object v: valueList) { - DataEntity.validateDatatype(schema.getType(),v,schema.getEntrySchema(),customDef,null); - if(schema.getConstraints() != null) { - for(Constraint constraint: schema.getConstraints()) { - constraint.validate(v); - } - } - } - } - return value; - } - - @Override - public String toString() { - return "DataEntity{" + - "customDef=" + customDef + - ", dataType=" + dataType + - ", schema=" + schema + - ", value=" + value + - ", propertyName='" + propertyName + '\'' + - '}'; - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import MissingRequiredFieldError -from toscaparser.common.exception import TypeMismatchError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.elements.constraints import Schema -from toscaparser.elements.datatype import DataType -from toscaparser.elements.portspectype import PortSpec -from toscaparser.elements.scalarunit import ScalarUnit_Frequency -from toscaparser.elements.scalarunit import ScalarUnit_Size -from toscaparser.elements.scalarunit import ScalarUnit_Time -from toscaparser.utils.gettextutils import _ -from toscaparser.utils import validateutils - - -class DataEntity(object): - '''A complex data value entity.''' - - def __init__(self, datatypename, value_dict, custom_def=None, - prop_name=None): - self.custom_def = custom_def - self.datatype = DataType(datatypename, custom_def) - self.schema = self.datatype.get_all_properties() - self.value = value_dict - self.property_name = prop_name - - def validate(self): - '''Validate the value by the definition of the datatype.''' - - # A datatype can not have both 'type' and 'properties' definitions. - # If the datatype has 'type' definition - if self.datatype.value_type: - self.value = DataEntity.validate_datatype(self.datatype.value_type, - self.value, - None, - self.custom_def) - schema = Schema(self.property_name, self.datatype.defs) - for constraint in schema.constraints: - constraint.validate(self.value) - # If the datatype has 'properties' definition - else: - if not isinstance(self.value, dict): - ValidationIssueCollector.appendException( - TypeMismatchError(what=self.value, - type=self.datatype.type)) - allowed_props = [] - required_props = [] - default_props = {} - if self.schema: - allowed_props = self.schema.keys() - for name, prop_def in self.schema.items(): - if prop_def.required: - required_props.append(name) - if prop_def.default: - default_props[name] = prop_def.default - - # check allowed field - for value_key in list(self.value.keys()): - if value_key not in allowed_props: - ValidationIssueCollector.appendException( - UnknownFieldError(what=(_('Data value of type "%s"') - % self.datatype.type), - field=value_key)) - - # check default field - for def_key, def_value in list(default_props.items()): - if def_key not in list(self.value.keys()): - self.value[def_key] = def_value - - # check missing field - missingprop = [] - for req_key in required_props: - if req_key not in list(self.value.keys()): - missingprop.append(req_key) - if missingprop: - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what=(_('Data value of type "%s"') - % self.datatype.type), required=missingprop)) - - # check every field - for name, value in list(self.value.items()): - schema_name = self._find_schema(name) - if not schema_name: - continue - prop_schema = Schema(name, schema_name) - # check if field value meets type defined - DataEntity.validate_datatype(prop_schema.type, value, - prop_schema.entry_schema, - self.custom_def) - # check if field value meets constraints defined - if prop_schema.constraints: - for constraint in prop_schema.constraints: - if isinstance(value, list): - for val in value: - constraint.validate(val) - else: - constraint.validate(value) - - return self.value - - def _find_schema(self, name): - if self.schema and name in self.schema.keys(): - return self.schema[name].schema - - @staticmethod - def validate_datatype(type, value, entry_schema=None, custom_def=None, - prop_name=None): - '''Validate value with given type. - - If type is list or map, validate its entry by entry_schema(if defined) - If type is a user-defined complex datatype, custom_def is required. - ''' - from toscaparser.functions import is_function - if is_function(value): - return value - if type == Schema.STRING: - return validateutils.validate_string(value) - elif type == Schema.INTEGER: - return validateutils.validate_integer(value) - elif type == Schema.FLOAT: - return validateutils.validate_float(value) - elif type == Schema.NUMBER: - return validateutils.validate_numeric(value) - elif type == Schema.BOOLEAN: - return validateutils.validate_boolean(value) - elif type == Schema.RANGE: - return validateutils.validate_range(value) - elif type == Schema.TIMESTAMP: - validateutils.validate_timestamp(value) - return value - elif type == Schema.LIST: - validateutils.validate_list(value) - if entry_schema: - DataEntity.validate_entry(value, entry_schema, custom_def) - return value - elif type == Schema.SCALAR_UNIT_SIZE: - return ScalarUnit_Size(value).validate_scalar_unit() - elif type == Schema.SCALAR_UNIT_FREQUENCY: - return ScalarUnit_Frequency(value).validate_scalar_unit() - elif type == Schema.SCALAR_UNIT_TIME: - return ScalarUnit_Time(value).validate_scalar_unit() - elif type == Schema.VERSION: - return validateutils.TOSCAVersionProperty(value).get_version() - elif type == Schema.MAP: - validateutils.validate_map(value) - if entry_schema: - DataEntity.validate_entry(value, entry_schema, custom_def) - return value - elif type == Schema.PORTSPEC: - # tODO(TBD) bug 1567063, validate source & target as PortDef type - # as complex types not just as integers - PortSpec.validate_additional_req(value, prop_name, custom_def) - else: - data = DataEntity(type, value, custom_def) - return data.validate() - - @staticmethod - def validate_entry(value, entry_schema, custom_def=None): - '''Validate entries for map and list.''' - schema = Schema(None, entry_schema) - valuelist = value - if isinstance(value, dict): - valuelist = list(value.values()) - for v in valuelist: - DataEntity.validate_datatype(schema.type, v, schema.entry_schema, - custom_def) - if schema.constraints: - for constraint in schema.constraints: - constraint.validate(v) - return value -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java deleted file mode 100644 index ed19d88..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java +++ /dev/null @@ -1,850 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.openecomp.sdc.toscaparser.api.elements.*; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -public abstract class EntityTemplate { - // Base class for TOSCA templates - - protected static final String DERIVED_FROM = "derived_from"; - protected static final String PROPERTIES = "properties"; - protected static final String REQUIREMENTS = "requirements"; - protected static final String INTERFACES = "interfaces"; - protected static final String CAPABILITIES = "capabilities"; - protected static final String TYPE = "type"; - protected static final String DESCRIPTION = "description"; - protected static final String DIRECTIVES = "directives"; - protected static final String ATTRIBUTES = "attributes"; - protected static final String ARTIFACTS = "artifacts"; - protected static final String NODE_FILTER = "node_filter"; - protected static final String COPY = "copy"; - - protected static final String SECTIONS[] = { - DERIVED_FROM, PROPERTIES, REQUIREMENTS,INTERFACES, - CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES, - ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY}; - - private static final String NODE = "node"; - private static final String CAPABILITY = "capability"; - private static final String RELATIONSHIP = "relationship"; - private static final String OCCURRENCES = "occurrences"; - - protected static final String REQUIREMENTS_SECTION[] = { - NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER}; - - //# Special key names - private static final String METADATA = "metadata"; - protected static final String SPECIAL_SECTIONS[] = {METADATA}; - - protected String name; - protected LinkedHashMap entityTpl; - protected LinkedHashMap customDef; - protected StatefulEntityType typeDefinition; - private ArrayList _properties; - private ArrayList _interfaces; - private ArrayList _requirements; - private ArrayList _capabilities; - - // dummy constructor for subclasses that don't want super - public EntityTemplate() { - return; - } - - @SuppressWarnings("unchecked") - public EntityTemplate(String _name, - LinkedHashMap _template, - String _entityName, - LinkedHashMap _customDef) { - name = _name; - entityTpl = _template; - customDef = _customDef; - _validateField(entityTpl); - String type = (String)entityTpl.get("type"); - UnsupportedType.validateType(type); - if(_entityName.equals("node_type")) { - if(type != null) { - typeDefinition = new NodeType(type, customDef); - } - else { - typeDefinition = null; - } - } - if(_entityName.equals("relationship_type")) { - Object relationship = _template.get("relationship"); - type = null; - if(relationship != null && relationship instanceof LinkedHashMap) { - type = (String)((LinkedHashMap)relationship).get("type"); - } - else if(relationship instanceof String) { - type = (String)entityTpl.get("relationship"); - } - else { - type = (String)entityTpl.get("type"); - } - UnsupportedType.validateType(type); - typeDefinition = new RelationshipType(type,null, customDef); - } - if(_entityName.equals("policy_type")) { - if(type == null) { - //msg = (_('Policy definition of "%(pname)s" must have' - // ' a "type" ''attribute.') % dict(pname=name)) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE140", String.format( - "ValidationError: Policy definition of \"%s\" must have a \"type\" attribute",name))); - } - typeDefinition = new PolicyType(type, customDef); - } - if(_entityName.equals("group_type")) { - if(type != null) { - typeDefinition = new GroupType(type, customDef); - } - else { - typeDefinition = null; - } - } - _properties = null; - _interfaces = null; - _requirements = null; - _capabilities = null; - } - - public String getType() { - if(typeDefinition != null) { - String clType = typeDefinition.getClass().getSimpleName(); - if(clType.equals("NodeType")) { - return (String)((NodeType)typeDefinition).getType(); - } - else if(clType.equals("PolicyType")) { - return (String)((PolicyType)typeDefinition).getType(); - } - else if(clType.equals("GroupType")) { - return (String)((GroupType)typeDefinition).getType(); - } - else if(clType.equals("RelationshipType")) { - return (String)((RelationshipType)typeDefinition).getType(); - } - } - return null; - } - - public Object getParentType() { - if(typeDefinition != null) { - String clType = typeDefinition.getClass().getSimpleName(); - if(clType.equals("NodeType")) { - return ((NodeType)typeDefinition).getParentType(); - } - else if(clType.equals("PolicyType")) { - return ((PolicyType)typeDefinition).getParentType(); - } - else if(clType.equals("GroupType")) { - return ((GroupType)typeDefinition).getParentType(); - } - else if(clType.equals("RelationshipType")) { - return ((RelationshipType)typeDefinition).getParentType(); - } - } - return null; - } - - @SuppressWarnings("unchecked") - public RequirementAssignments getRequirements() { - if(_requirements == null) { - _requirements = _createRequirements(); - } - return new RequirementAssignments(_requirements); - } - - private ArrayList _createRequirements() { - ArrayList reqs = new ArrayList<>(); - ArrayList> requirements = (ArrayList>) - typeDefinition.getValue(REQUIREMENTS,entityTpl,false); - if(requirements == null) { - requirements = new ArrayList<>(); - } - for (Map req: requirements) { - for(String reqName: req.keySet()) { - Object reqItem = req.get(reqName); - if(reqItem instanceof LinkedHashMap) { - Object rel = ((LinkedHashMap)reqItem).get("relationship"); -// LinkedHashMap relationship = rel instanceof LinkedHashMap ? (LinkedHashMap) rel : null; - String nodeName = ((LinkedHashMap)reqItem).get("node").toString(); - Object capability = ((LinkedHashMap)reqItem).get("capability"); - String capabilityString = capability != null ? capability.toString() : null; - - reqs.add(new RequirementAssignment(reqName, nodeName, capabilityString, rel)); - } else if (reqItem instanceof String) { //short notation - String nodeName = String.valueOf(reqItem); - reqs.add(new RequirementAssignment(reqName, nodeName)); - } - } - } - return reqs; - } - - public ArrayList getPropertiesObjects() { - // Return properties objects for this template - if(_properties ==null) { - _properties = _createProperties(); - } - return _properties; - } - - public LinkedHashMap getProperties() { - LinkedHashMap props = new LinkedHashMap<>(); - for(Property po: getPropertiesObjects()) { - props.put(((Property)po).getName(),po); - } - return props; - } - - public Object getPropertyValue(String name) { - LinkedHashMap props = getProperties(); - Property p = (Property)props.get(name); - return p != null ? p.getValue() : null; - } - - public ArrayList getInterfaces() { - if(_interfaces == null) { - _interfaces = _createInterfaces(); - } - return _interfaces; - } - - public ArrayList getCapabilitiesObjects() { - // Return capabilities objects for this template - if(_capabilities == null) { - _capabilities = _createCapabilities(); - } - return _capabilities; - - } - - public CapabilityAssignments getCapabilities() { - LinkedHashMap caps = new LinkedHashMap(); - for(CapabilityAssignment cap: getCapabilitiesObjects()) { - caps.put(cap.getName(),cap); - } - return new CapabilityAssignments(caps); - } - - public boolean isDerivedFrom(String typeStr) { - // Returns true if this object is derived from 'type_str'. - // False otherwise - - if(getType() == null) { - return false; - } - else if(getType().equals(typeStr)) { - return true; - } - else if(getParentType() != null) { - return ((EntityType)getParentType()).isDerivedFrom(typeStr); - } - return false; - } - - @SuppressWarnings("unchecked") - private ArrayList _createCapabilities() { - ArrayList capability = new ArrayList(); - LinkedHashMap caps = (LinkedHashMap) - ((EntityType)typeDefinition).getValue(CAPABILITIES,entityTpl,true); - if(caps != null) { - //?!? getCapabilities defined only for NodeType... - LinkedHashMap capabilities = ((NodeType)typeDefinition).getCapabilities(); - for(Map.Entry me: caps.entrySet()) { - String name = me. getKey(); - LinkedHashMap props = (LinkedHashMap)me.getValue(); - if(capabilities.get(name) != null) { - CapabilityTypeDef c = capabilities.get(name); // a CapabilityTypeDef - LinkedHashMap properties = new LinkedHashMap(); - // first use the definition default value - LinkedHashMap cprops = c.getProperties(); - if(cprops != null) { - for(Map.Entry cpe: cprops.entrySet()) { - String propertyName = cpe.getKey(); - LinkedHashMap propertyDef = (LinkedHashMap)cpe.getValue(); - Object dob = propertyDef.get("default"); - if(dob != null) { - properties.put(propertyName, dob); - - } - } - } - // then update (if available) with the node properties - LinkedHashMap pp = (LinkedHashMap)props.get("properties"); - if(pp != null) { - properties.putAll(pp); - } - CapabilityAssignment cap = new CapabilityAssignment(name, properties, c, customDef); - capability.add(cap); - } - } - } - return capability; - } - - protected void _validateProperties(LinkedHashMap template,StatefulEntityType entityType) { - @SuppressWarnings("unchecked") - LinkedHashMap properties = (LinkedHashMap)entityType.getValue(PROPERTIES,template,false); - _commonValidateProperties(entityType,properties); - } - - protected void _validateCapabilities() { - //BUG??? getCapabilities only defined in NodeType... - LinkedHashMap typeCapabilities = ((NodeType)typeDefinition).getCapabilities(); - ArrayList allowedCaps = new ArrayList(); - if(typeCapabilities != null) { - allowedCaps.addAll(typeCapabilities.keySet()); - } - @SuppressWarnings("unchecked") - LinkedHashMap capabilities = (LinkedHashMap) - ((EntityType)typeDefinition).getValue(CAPABILITIES, entityTpl, false); - if(capabilities != null) { - _commonValidateField(capabilities, allowedCaps, "capabilities"); - _validateCapabilitiesProperties(capabilities); - } - } - - @SuppressWarnings("unchecked") - private void _validateCapabilitiesProperties(LinkedHashMap capabilities) { - for(Map.Entry me: capabilities.entrySet()) { - String cap = me.getKey(); - LinkedHashMap props = (LinkedHashMap)me.getValue(); - CapabilityAssignment capability = getCapability(cap); - if(capability == null) { - continue; - } - CapabilityTypeDef capabilitydef = capability.getDefinition(); - _commonValidateProperties(capabilitydef,(LinkedHashMap)props.get(PROPERTIES)); - - // validating capability properties values - for(Property prop: getCapability(cap).getPropertiesObjects()) { - prop.validate(); - - if(cap.equals("scalable") && prop.getName().equals("default_instances")) { - LinkedHashMap propDict = (LinkedHashMap)props.get(PROPERTIES); - int minInstances = (int)propDict.get("min_instances"); - int maxInstances = (int)propDict.get("max_instances"); - int defaultInstances = (int)propDict.get("default_instances"); - if(defaultInstances < minInstances || defaultInstances > maxInstances) { - //err_msg = ('"properties" of template "%s": ' - // '"default_instances" value is not between ' - // '"min_instances" and "max_instances".' % - // self.name) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE141", String.format( - "ValidationError: \"properties\" of template \"%s\": \"default_instances\" value is not between \"min_instances\" and \"max_instances\"", - name))); - } - } - } - } - } - - private void _commonValidateProperties(StatefulEntityType entityType,LinkedHashMap properties) { - ArrayList allowedProps = new ArrayList(); - ArrayList requiredProps = new ArrayList(); - for(PropertyDef p: entityType.getPropertiesDefObjects()) { - allowedProps.add(p.getName()); - // If property is 'required' and has no 'default' value then record - if(p.isRequired() && p.getDefault() == null) { - requiredProps.add(p.getName()); - } - } - // validate all required properties have values - if(properties != null) { - ArrayList reqPropsNoValueOrDefault = new ArrayList(); - _commonValidateField(properties, allowedProps, "properties"); - // make sure it's not missing any property required by a tosca type - for(String r: requiredProps) { - if(properties.get(r) == null) { - reqPropsNoValueOrDefault.add(r); - } - } - // Required properties found without value or a default value - if(!reqPropsNoValueOrDefault.isEmpty()) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", String.format( - "MissingRequiredFieldError: properties of template \"%s\" are missing field(s): %s", - name,reqPropsNoValueOrDefault.toString()))); - } - } - else { - // Required properties in schema, but not in template - if(!requiredProps.isEmpty()) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE004", String.format( - "MissingRequiredFieldError2: properties of template \"%s\" are missing field(s): %s", - name,requiredProps.toString()))); - } - } - } - - @SuppressWarnings("unchecked") - private void _validateField(LinkedHashMap template) { - if(!(template instanceof LinkedHashMap)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE142", String.format( - "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE))); - return;//??? - } - boolean bBad = false; - Object relationship = ((LinkedHashMap)template).get("relationship"); - if(relationship != null) { - if(!(relationship instanceof String)) { - bBad = (((LinkedHashMap)relationship).get(TYPE) == null); - } - else if(relationship instanceof String) { - bBad = (template.get("relationship") == null); - } - } - else { - bBad = (template.get(TYPE) == null); - } - if(bBad) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE143", String.format( - "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE))); - } - } - - protected void _commonValidateField(LinkedHashMap schema, ArrayList allowedList,String section) { - for(String sname: schema.keySet()) { - boolean bFound = false; - for(String allowed: allowedList) { - if(sname.equals(allowed)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE144", String.format( - "UnknownFieldError: Section \"%s\" of template \"%s\" contains unknown field \"%s\"",section,name,sname))); - } - } - - } - - @SuppressWarnings("unchecked") - private ArrayList _createProperties() { - ArrayList props = new ArrayList(); - LinkedHashMap properties = (LinkedHashMap) - ((EntityType)typeDefinition).getValue(PROPERTIES,entityTpl,false); - if(properties == null) { - properties = new LinkedHashMap(); - } - for(Map.Entry me: properties.entrySet()) { - String pname = me.getKey(); - Object pvalue = me.getValue(); - LinkedHashMap propsDef = ((StatefulEntityType)typeDefinition).getPropertiesDef(); - if(propsDef != null && propsDef.get(pname) != null) { - PropertyDef pd = (PropertyDef)propsDef.get(pname); - Property prop = new Property(pname,pvalue,pd.getSchema(),customDef); - props.add(prop); - } - } - ArrayList pds = ((StatefulEntityType)typeDefinition).getPropertiesDefObjects(); - for(Object pdo: pds) { - PropertyDef pd = (PropertyDef)pdo; - if(pd.getDefault() != null && properties.get(pd.getName()) == null) { - Property prop = new Property(pd.getName(),pd.getDefault(),pd.getSchema(),customDef); - props.add(prop); - } - } - return props; - } - - @SuppressWarnings("unchecked") - private ArrayList _createInterfaces() { - ArrayList interfaces = new ArrayList<>(); - LinkedHashMap typeInterfaces = new LinkedHashMap(); - if(typeDefinition instanceof RelationshipType) { - if(entityTpl instanceof LinkedHashMap) { - typeInterfaces = (LinkedHashMap)entityTpl.get(INTERFACES); - if(typeInterfaces == null) { - for(String relName: entityTpl.keySet()) { - Object relValue = entityTpl.get(relName); - if(!relName.equals("type")) { - Object relDef = relValue; - LinkedHashMap rel = null; - if(relDef instanceof LinkedHashMap) { - Object relob = ((LinkedHashMap)relDef).get("relationship"); - if(relob instanceof LinkedHashMap) { - rel = (LinkedHashMap)relob; - } - } - if(rel != null) { - if(rel.get(INTERFACES) != null) { - typeInterfaces = (LinkedHashMap)rel.get(INTERFACES); - break; - } - } - } - } - } - } - } - else { - typeInterfaces = (LinkedHashMap) - ((EntityType)typeDefinition).getValue(INTERFACES,entityTpl,false); - } - if(typeInterfaces != null) { - for(Map.Entry me: typeInterfaces.entrySet()) { - String interfaceType = me.getKey(); - LinkedHashMap value = (LinkedHashMap)me.getValue(); - for(Map.Entry ve: value.entrySet()) { - String op = ve.getKey(); - Object opDef = ve.getValue(); - InterfacesDef iface = new InterfacesDef((EntityType)typeDefinition, - interfaceType, - this, - op, - opDef); - interfaces.add(iface); - } - - } - } - return interfaces; - } - - public CapabilityAssignment getCapability(String name) { - // Provide named capability - // :param name: name of capability - // :return: capability object if found, None otherwise - return getCapabilities().getCapabilityByName(name); - } - - // getter - public String getName() { - return name; - } - - public StatefulEntityType getTypeDefinition() { - return typeDefinition; - } - - public LinkedHashMap getCustomDef() { - return customDef; - } - - @Override - public String toString() { - return "EntityTemplate{" + - "name='" + name + '\'' + - ", entityTpl=" + entityTpl + - ", customDef=" + customDef + - ", typeDefinition=" + typeDefinition + - ", _properties=" + _properties + - ", _interfaces=" + _interfaces + - ", _requirements=" + _requirements + - ", _capabilities=" + _capabilities + - '}'; - } -} - -/*python - -class EntityTemplate(object): - '''Base class for TOSCA templates.''' - - SECTIONS = (DERIVED_FROM, PROPERTIES, REQUIREMENTS, - INTERFACES, CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES, - ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY) = \ - ('derived_from', 'properties', 'requirements', 'interfaces', - 'capabilities', 'type', 'description', 'directives', - 'attributes', 'artifacts', 'node_filter', 'copy') - REQUIREMENTS_SECTION = (NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER) = \ - ('node', 'capability', 'relationship', - 'occurrences', 'node_filter') - # Special key names - SPECIAL_SECTIONS = (METADATA) = ('metadata') - - def __init__(self, name, template, entity_name, custom_def=None): - self.name = name - self.entity_tpl = template - self.custom_def = custom_def - self._validate_field(self.entity_tpl) - type = self.entity_tpl.get('type') - UnsupportedType.validate_type(type) - if entity_name == 'node_type': - self.type_definition = NodeType(type, custom_def) \ - if type is not None else None - if entity_name == 'relationship_type': - relationship = template.get('relationship') - type = None - if relationship and isinstance(relationship, dict): - type = relationship.get('type') - elif isinstance(relationship, str): - type = self.entity_tpl['relationship'] - else: - type = self.entity_tpl['type'] - UnsupportedType.validate_type(type) - self.type_definition = RelationshipType(type, - None, custom_def) - if entity_name == 'policy_type': - if not type: - msg = (_('Policy definition of "%(pname)s" must have' - ' a "type" ''attribute.') % dict(pname=name)) - ValidationIssueCollector.appendException( - ValidationError(msg)) - - self.type_definition = PolicyType(type, custom_def) - if entity_name == 'group_type': - self.type_definition = GroupType(type, custom_def) \ - if type is not None else None - self._properties = None - self._interfaces = None - self._requirements = None - self._capabilities = None - - @property - def type(self): - if self.type_definition: - return self.type_definition.type - - @property - def parent_type(self): - if self.type_definition: - return self.type_definition.parent_type - - @property - def requirements(self): - if self._requirements is None: - self._requirements = self.type_definition.get_value( - self.REQUIREMENTS, - self.entity_tpl) or [] - return self._requirements - - def get_properties_objects(self): - '''Return properties objects for this template.''' - if self._properties is None: - self._properties = self._create_properties() - return self._properties - - def get_properties(self): - '''Return a dictionary of property name-object pairs.''' - return {prop.name: prop - for prop in self.get_properties_objects()} - - def get_property_value(self, name): - '''Return the value of a given property name.''' - props = self.get_properties() - if props and name in props.keys(): - return props[name].value - - @property - def interfaces(self): - if self._interfaces is None: - self._interfaces = self._create_interfaces() - return self._interfaces - - def get_capabilities_objects(self): - '''Return capabilities objects for this template.''' - if not self._capabilities: - self._capabilities = self._create_capabilities() - return self._capabilities - - def get_capabilities(self): - '''Return a dictionary of capability name-object pairs.''' - return {cap.name: cap - for cap in self.get_capabilities_objects()} - - def is_derived_from(self, type_str): - '''Check if object inherits from the given type. - - Returns true if this object is derived from 'type_str'. - False otherwise. - ''' - if not self.type: - return False - elif self.type == type_str: - return True - elif self.parent_type: - return self.parent_type.is_derived_from(type_str) - else: - return False - - def _create_capabilities(self): - capability = [] - caps = self.type_definition.get_value(self.CAPABILITIES, - self.entity_tpl, True) - if caps: - for name, props in caps.items(): - capabilities = self.type_definition.get_capabilities() - if name in capabilities.keys(): - c = capabilities[name] - properties = {} - # first use the definition default value - if c.properties: - for property_name in c.properties.keys(): - prop_def = c.properties[property_name] - if 'default' in prop_def: - properties[property_name] = prop_def['default'] - # then update (if available) with the node properties - if 'properties' in props and props['properties']: - properties.update(props['properties']) - - cap = CapabilityAssignment(name, properties, c) - capability.append(cap) - return capability - - def _validate_properties(self, template, entitytype): - properties = entitytype.get_value(self.PROPERTIES, template) - self._common_validate_properties(entitytype, properties) - - def _validate_capabilities(self): - type_capabilities = self.type_definition.get_capabilities() - allowed_caps = \ - type_capabilities.keys() if type_capabilities else [] - capabilities = self.type_definition.get_value(self.CAPABILITIES, - self.entity_tpl) - if capabilities: - self._common_validate_field(capabilities, allowed_caps, - 'capabilities') - self._validate_capabilities_properties(capabilities) - - def _validate_capabilities_properties(self, capabilities): - for cap, props in capabilities.items(): - capability = self.get_capability(cap) - if not capability: - continue - capabilitydef = capability.definition - self._common_validate_properties(capabilitydef, - props[self.PROPERTIES]) - - # validating capability properties values - for prop in self.get_capability(cap).get_properties_objects(): - prop.validate() - - # tODO(srinivas_tadepalli): temporary work around to validate - # default_instances until standardized in specification - if cap == "scalable" and prop.name == "default_instances": - prop_dict = props[self.PROPERTIES] - min_instances = prop_dict.get("min_instances") - max_instances = prop_dict.get("max_instances") - default_instances = prop_dict.get("default_instances") - if not (min_instances <= default_instances - <= max_instances): - err_msg = ('"properties" of template "%s": ' - '"default_instances" value is not between ' - '"min_instances" and "max_instances".' % - self.name) - ValidationIssueCollector.appendException( - ValidationError(message=err_msg)) - - def _common_validate_properties(self, entitytype, properties): - allowed_props = [] - required_props = [] - for p in entitytype.get_properties_def_objects(): - allowed_props.append(p.name) - # If property is 'required' and has no 'default' value then record - if p.required and p.default is None: - required_props.append(p.name) - # validate all required properties have values - if properties: - req_props_no_value_or_default = [] - self._common_validate_field(properties, allowed_props, - 'properties') - # make sure it's not missing any property required by a tosca type - for r in required_props: - if r not in properties.keys(): - req_props_no_value_or_default.append(r) - # Required properties found without value or a default value - if req_props_no_value_or_default: - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what='"properties" of template "%s"' % self.name, - required=req_props_no_value_or_default)) - else: - # Required properties in schema, but not in template - if required_props: - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what='"properties" of template "%s"' % self.name, - required=required_props)) - - def _validate_field(self, template): - if not isinstance(template, dict): - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what='Template "%s"' % self.name, required=self.TYPE)) - try: - relationship = template.get('relationship') - if relationship and not isinstance(relationship, str): - relationship[self.TYPE] - elif isinstance(relationship, str): - template['relationship'] - else: - template[self.TYPE] - except KeyError: - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what='Template "%s"' % self.name, required=self.TYPE)) - - def _common_validate_field(self, schema, allowedlist, section): - for name in schema: - if name not in allowedlist: - ValidationIssueCollector.appendException( - UnknownFieldError( - what=('"%(section)s" of template "%(nodename)s"' - % {'section': section, 'nodename': self.name}), - field=name)) - - def _create_properties(self): - props = [] - properties = self.type_definition.get_value(self.PROPERTIES, - self.entity_tpl) or {} - for name, value in properties.items(): - props_def = self.type_definition.get_properties_def() - if props_def and name in props_def: - prop = Property(name, value, - props_def[name].schema, self.custom_def) - props.append(prop) - for p in self.type_definition.get_properties_def_objects(): - if p.default is not None and p.name not in properties.keys(): - prop = Property(p.name, p.default, p.schema, self.custom_def) - props.append(prop) - return props - - def _create_interfaces(self): - interfaces = [] - type_interfaces = None - if isinstance(self.type_definition, RelationshipType): - if isinstance(self.entity_tpl, dict): - if self.INTERFACES in self.entity_tpl: - type_interfaces = self.entity_tpl[self.INTERFACES] - else: - for rel_def, value in self.entity_tpl.items(): - if rel_def != 'type': - rel_def = self.entity_tpl.get(rel_def) - rel = None - if isinstance(rel_def, dict): - rel = rel_def.get('relationship') - if rel: - if self.INTERFACES in rel: - type_interfaces = rel[self.INTERFACES] - break - else: - type_interfaces = self.type_definition.get_value(self.INTERFACES, - self.entity_tpl) - if type_interfaces: - for interface_type, value in type_interfaces.items(): - for op, op_def in value.items(): - iface = InterfacesDef(self.type_definition, - interfacetype=interface_type, - node_template=self, - name=op, - value=op_def) - interfaces.append(iface) - return interfaces - - def get_capability(self, name): - """Provide named capability - - :param name: name of capability - :return: capability object if found, None otherwise - """ - caps = self.get_capabilities() - if caps and name in caps.keys(): - return caps[name] -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Group.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Group.java deleted file mode 100644 index d183ac7..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/Group.java +++ /dev/null @@ -1,138 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.openecomp.sdc.toscaparser.api.elements.Metadata; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; - -public class Group extends EntityTemplate { - - private static final String TYPE = "type"; - private static final String METADATA = "metadata"; - private static final String DESCRIPTION = "description"; - private static final String PROPERTIES = "properties"; - private static final String MEMBERS = "members"; - private static final String INTERFACES = "interfaces"; - private static final String SECTIONS[] = { - TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; - - private String name; - LinkedHashMap tpl; - ArrayList memberNodes; - LinkedHashMap customDef; - Metadata metaData; - - - public Group(String _name, LinkedHashMap _templates, - ArrayList _memberNodes, - LinkedHashMap _customDef) { - super(_name, _templates, "group_type", _customDef); - - name = _name; - tpl = _templates; - if(tpl.get(METADATA) != null) { - Object metadataObject = tpl.get(METADATA); - ValidateUtils.validateMap(metadataObject); - metaData = new Metadata((Map)metadataObject); - } - memberNodes = _memberNodes; - _validateKeys(); - } - - public Metadata getMetadata() { - return metaData; - } - - public ArrayList getMembers() { - return (ArrayList)entityTpl.get("members"); - } - - public String getDescription() { - return (String)entityTpl.get("description"); - - } - - public ArrayList getMemberNodes() { - return memberNodes; - } - - private void _validateKeys() { - for(String key: entityTpl.keySet()) { - boolean bFound = false; - for(String sect: SECTIONS) { - if(key.equals(sect)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE183", String.format( - "UnknownFieldError: Groups \"%s\" contains unknown field \"%s\"", - name,key))); - } - } - } - - @Override - public String toString() { - return "Group{" + - "name='" + name + '\'' + - ", tpl=" + tpl + - ", memberNodes=" + memberNodes + - ", customDef=" + customDef + - ", metaData=" + metaData + - '}'; - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import UnknownFieldError -from toscaparser.entity_template import EntityTemplate -from toscaparser.utils import validateutils - -SECTIONS = (TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES) = \ - ('type', 'metadata', 'description', - 'properties', 'members', 'interfaces') - - -class Group(EntityTemplate): - - def __init__(self, name, group_templates, member_nodes, custom_defs=None): - super(Group, self).__init__(name, - group_templates, - 'group_type', - custom_defs) - self.name = name - self.tpl = group_templates - self.meta_data = None - if self.METADATA in self.tpl: - self.meta_data = self.tpl.get(self.METADATA) - validateutils.validate_map(self.meta_data) - self.member_nodes = member_nodes - self._validate_keys() - - @property - def members(self): - return self.entity_tpl.get('members') - - @property - def description(self): - return self.entity_tpl.get('description') - - def get_member_nodes(self): - return self.member_nodes - - def _validate_keys(self): - for key in self.entity_tpl.keys(): - if key not in SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Groups "%s"' % self.name, - field=key)) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java deleted file mode 100644 index b2a0da7..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java +++ /dev/null @@ -1,746 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import org.openecomp.sdc.toscaparser.api.elements.TypeValidation; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.openecomp.sdc.toscaparser.api.utils.UrlUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.yaml.snakeyaml.Yaml; - -import java.io.*; -import java.net.URL; -import java.nio.file.Paths; -import java.util.*; - -public class ImportsLoader { - - private static Logger log = LoggerFactory.getLogger(ImportsLoader.class.getName()); - private static final String FILE = "file"; - private static final String REPOSITORY = "repository"; - private static final String NAMESPACE_URI = "namespace_uri"; - private static final String NAMESPACE_PREFIX = "namespace_prefix"; - private String IMPORTS_SECTION[] = {FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX}; - - private ArrayList importslist; - private String path; - private ArrayList typeDefinitionList; - - private LinkedHashMap customDefs; - private LinkedHashMap allCustomDefs; - private ArrayList> nestedToscaTpls; - private LinkedHashMap repositories; - - @SuppressWarnings("unchecked") - public ImportsLoader(ArrayList_importslist, - String _path, - Object _typeDefinitionList, - LinkedHashMap tpl) { - - this.importslist = _importslist; - customDefs = new LinkedHashMap(); - allCustomDefs = new LinkedHashMap(); - nestedToscaTpls = new ArrayList>(); - if((_path == null || _path.isEmpty()) && tpl == null) { - //msg = _('Input tosca template is not provided.') - //log.warning(msg) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE184", "ValidationError: Input tosca template is not provided")); - } - - this.path = _path; - this.repositories = new LinkedHashMap(); - - if(tpl != null && tpl.get("repositories") != null) { - this.repositories = (LinkedHashMap)tpl.get("repositories"); - } - this.typeDefinitionList = new ArrayList(); - if(_typeDefinitionList != null) { - if(_typeDefinitionList instanceof ArrayList) { - this.typeDefinitionList = (ArrayList)_typeDefinitionList; - } - else { - this.typeDefinitionList.add((String)_typeDefinitionList); - } - } - _validateAndLoadImports(); - } - - public LinkedHashMap getCustomDefs() { - return allCustomDefs; - } - - public ArrayList> getNestedToscaTpls() { - return nestedToscaTpls; - } - - @SuppressWarnings({ "unchecked", "unused" }) - public void _validateAndLoadImports() { - Set importNames = new HashSet(); - - if(importslist == null) { - //msg = _('"imports" keyname is defined without including templates.') - //log.error(msg) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE185", - "ValidationError: \"imports\" keyname is defined without including templates")); - return; - } - - for(Object importDef: importslist) { - String fullFileName = null; - LinkedHashMap customType = null; - if(importDef instanceof LinkedHashMap) { - for(Map.Entry me: ((LinkedHashMap)importDef).entrySet()) { - String importName = me.getKey(); - Object importUri = me.getValue(); - if(importNames.contains(importName)) { - //msg = (_('Duplicate import name "%s" was found.') % import_name) - //log.error(msg) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE186", String.format( - "ValidationError: Duplicate import name \"%s\" was found",importName))); - } - importNames.add(importName); //??? - - // _loadImportTemplate returns 2 objects - Object ffnct[] = _loadImportTemplate(importName, importUri); - fullFileName = (String)ffnct[0]; - customType = (LinkedHashMap)ffnct[1]; - String namespacePrefix = ""; - if(importUri instanceof LinkedHashMap) { - namespacePrefix = (String) - ((LinkedHashMap)importUri).get(NAMESPACE_PREFIX); - } - - if(customType != null) { - TypeValidation tv = new TypeValidation(customType, importDef); - _updateCustomDefs(customType, namespacePrefix); - } - } - } - else { // old style of imports - // _loadImportTemplate returns 2 objects - Object ffnct[] = _loadImportTemplate(null,importDef); - fullFileName = (String)ffnct[0]; - customType = (LinkedHashMap)ffnct[1]; - if(customType != null) { - TypeValidation tv = new TypeValidation(customType,importDef); - _updateCustomDefs(customType,null); - } - } - _updateNestedToscaTpls(fullFileName, customType); - - - } - } - - /** - * This method is used to get consolidated custom definitions by passing custom Types from - * each import. The resultant collection is then passed back which contains all import - * definitions - * - * @param customType the custom type - * @param namespacePrefix the namespace prefix - */ - @SuppressWarnings("unchecked") - private void _updateCustomDefs(LinkedHashMap customType, String namespacePrefix) { - LinkedHashMap outerCustomTypes; - for(String typeDef: typeDefinitionList) { - if(typeDef.equals("imports")) { - customDefs.put("imports", customType.get(typeDef)); - if (allCustomDefs.isEmpty() || allCustomDefs.get("imports") == null){ - allCustomDefs.put("imports",customType.get(typeDef)); - } - else if (customType.get(typeDef) != null){ - Set allCustomImports = new HashSet<>((ArrayList)allCustomDefs.get("imports")); - allCustomImports.addAll((ArrayList) customType.get(typeDef)); - allCustomDefs.put("imports", new ArrayList<>(allCustomImports)); - } - } - else { - outerCustomTypes = (LinkedHashMap)customType.get(typeDef); - if(outerCustomTypes != null) { - if(namespacePrefix != null && !namespacePrefix.isEmpty()) { - LinkedHashMap prefixCustomTypes = new LinkedHashMap(); - for(Map.Entry me: outerCustomTypes.entrySet()) { - String typeDefKey = me.getKey(); - String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey; - prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey)); - } - customDefs.putAll(prefixCustomTypes); - allCustomDefs.putAll(prefixCustomTypes); - } - else { - customDefs.putAll(outerCustomTypes); - allCustomDefs.putAll(outerCustomTypes); - } - } - } - } - } - - private void _updateNestedToscaTpls(String fullFileName,LinkedHashMap customTpl) { - if(fullFileName != null && customTpl != null) { - LinkedHashMap tt = new LinkedHashMap(); - tt.put(fullFileName, customTpl); - nestedToscaTpls.add(tt); - } - } - - private void _validateImportKeys(String importName, LinkedHashMap importUri) { - if(importUri.get(FILE) == null) { - //log.warning(_('Missing keyname "file" in import "%(name)s".') % {'name': import_name}) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE187", String.format( - "MissingRequiredFieldError: Import of template \"%s\" is missing field %s",importName,FILE))); - } - for(String key: importUri.keySet()) { - boolean bFound = false; - for(String is: IMPORTS_SECTION) { - if(is.equals(key)) { - bFound = true; - break; - } - } - if(!bFound) { - //log.warning(_('Unknown keyname "%(key)s" error in ' - // 'imported definition "%(def)s".') - // % {'key': key, 'def': import_name}) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE188", String.format( - "UnknownFieldError: Import of template \"%s\" has unknown fiels %s",importName,key))); - } - } - } - - @SuppressWarnings("unchecked") - private Object[] _loadImportTemplate(String importName, Object importUriDef) { - /* - This method loads the custom type definitions referenced in "imports" - section of the TOSCA YAML template by determining whether each import - is specified via a file reference (by relative or absolute path) or a - URL reference. - - Possibilities: - +----------+--------+------------------------------+ - | template | import | comment | - +----------+--------+------------------------------+ - | file | file | OK | - | file | URL | OK | - | preparsed| file | file must be a full path | - | preparsed| URL | OK | - | URL | file | file must be a relative path | - | URL | URL | OK | - +----------+--------+------------------------------+ - */ - Object al[] = new Object[2]; - - boolean shortImportNotation = false; - String fileName; - String repository; - if(importUriDef instanceof LinkedHashMap) { - _validateImportKeys(importName, (LinkedHashMap)importUriDef); - fileName = (String)((LinkedHashMap)importUriDef).get(FILE); - repository = (String)((LinkedHashMap)importUriDef).get(REPOSITORY); - if(repository != null) { - if(!repositories.keySet().contains(repository)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE189", String.format( - "InvalidPropertyValueError: Repository \"%s\" not found in \"%s\"", - repository,repositories.keySet().toString()))); - } - } - } - else { - fileName = (String)importUriDef; - repository = null; - shortImportNotation = true; - } - - if(fileName == null || fileName.isEmpty()) { - //msg = (_('A template file name is not provided with import ' - // 'definition "%(import_name)s".') - // % {'import_name': import_name}) - //log.error(msg) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE190", String.format( - "ValidationError: A template file name is not provided with import definition \"%s\"",importName))); - al[0] = al[1] = null; - return al; - } - - if(UrlUtils.validateUrl(fileName)) { - try (InputStream input = new URL(fileName).openStream();) { - al[0] = fileName; - Yaml yaml = new Yaml(); - al[1] = yaml.load(input); - return al; - } - catch(IOException e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE191", String.format( - "ImportError: \"%s\" loading YAML import from \"%s\"",e.getClass().getSimpleName(),fileName))); - al[0] = al[1] = null; - return al; - } - } - else if(repository == null || repository.isEmpty()) { - boolean aFile = false; - String importTemplate = null; - if(path != null && !path.isEmpty()) { - if(UrlUtils.validateUrl(path)) { - File fp = new File(path); - if(fp.isAbsolute()) { - String msg = String.format( - "ImportError: Absolute file name \"%s\" cannot be used in the URL-based input template \"%s\"", - fileName,path); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE192", msg)); - al[0] = al[1] = null; - return al; - } - importTemplate = UrlUtils.joinUrl(path,fileName); - aFile = false; - } - else { - - aFile = true; - File fp = new File(path); - if(fp.isFile()) { - File fn = new File(fileName); - if(fn.isFile()) { - importTemplate = fileName; - } - else { - String fullPath = Paths.get(path).toAbsolutePath().getParent().toString() + File.separator + fileName; - File ffp = new File(fullPath); - if(ffp.isFile()) { - importTemplate = fullPath; - } - else { - String dirPath = Paths.get(path).toAbsolutePath().getParent().toString(); - String filePath; - if(Paths.get(fileName).getParent() != null) { - filePath = Paths.get(fileName).getParent().toString(); - } - else { - filePath = ""; - } - if(!filePath.isEmpty() && dirPath.endsWith(filePath)) { - String sFileName = Paths.get(fileName).getFileName().toString(); - importTemplate = dirPath + File.separator + sFileName; - File fit = new File(importTemplate); - if(!fit.isFile()) { - //msg = (_('"%(import_template)s" is' - // 'not a valid file') - // % {'import_template': - // import_template}) - //log.error(msg) - String msg = String.format( - "ValueError: \"%s\" is not a valid file",importTemplate); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE193", msg)); - log.debug("ImportsLoader - _loadImportTemplate - {}", msg); - } - } - } - } - } - } - } - else { // template is pre-parsed - File fn = new File(fileName); - if(fn.isAbsolute() && fn.isFile()) { - aFile = true; - importTemplate = fileName; - } - else { - String msg = String.format( - "Relative file name \"%s\" cannot be used in a pre-parsed input template",fileName); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE194", "ImportError: " + msg)); - al[0] = al[1] = null; - return al; - } - } - - if(importTemplate == null || importTemplate.isEmpty()) { - //log.error(_('Import "%(name)s" is not valid.') % - // {'name': import_uri_def}) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE195", String.format( - "ImportError: Import \"%s\" is not valid",importUriDef))); - al[0] = al[1] = null; - return al; - } - - // for now, this must be a file - if(!aFile) { - log.error("ImportsLoader - _loadImportTemplate - Error!! Expected a file. importUriDef = {}, importTemplate = {}", importUriDef, importTemplate); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE196", String.format( - "ImportError: Import \"%s\" is not a file",importName))); - al[0] = al[1] = null; - return al; - } - try (InputStream input = new FileInputStream(new File(importTemplate));) { - al[0] = importTemplate; - - Yaml yaml = new Yaml(); - al[1] = yaml.load(input); - return al; - } - catch(FileNotFoundException e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE197", String.format( - "ImportError: Failed to load YAML from \"%s\"" + e,importName))); - al[0] = al[1] = null; - return al; - } - catch(Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE198", String.format( - "ImportError: Exception from SnakeYAML file = \"%s\"" + e,importName))); - al[0] = al[1] = null; - return al; - } - } - - if(shortImportNotation) { - //log.error(_('Import "%(name)s" is not valid.') % import_uri_def) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE199", String.format( - "ImportError: Import \"%s\" is not valid",importName))); - al[0] = al[1] = null; - return al; - } - - String fullUrl = ""; - String repoUrl = ""; - if(repository != null && !repository.isEmpty()) { - if(repositories != null) { - for(String repoName: repositories.keySet()) { - if(repoName.equals(repository)) { - Object repoDef = repositories.get(repoName); - if(repoDef instanceof String) { - repoUrl = (String)repoDef; - } - else if(repoDef instanceof LinkedHashMap) { - repoUrl = (String)((LinkedHashMap)repoDef).get("url"); - } - // Remove leading, ending spaces and strip - // the last character if "/" - repoUrl = repoUrl.trim(); - if(repoUrl.endsWith("/")) { - repoUrl = repoUrl.substring(0,repoUrl.length()-1); - } - fullUrl = repoUrl + "/" + fileName; - break; - } - } - } - if(fullUrl.isEmpty()) { - String msg = String.format( - "referenced repository \"%s\" in import definition \"%s\" not found", - repository,importName); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE200", "ImportError: " + msg)); - al[0] = al[1] = null; - return al; - } - } - if(UrlUtils.validateUrl(fullUrl)) { - try (InputStream input = new URL(fullUrl).openStream();) { - al[0] = fullUrl; - Yaml yaml = new Yaml(); - al[1] = yaml.load(input); - return al; - } - catch(IOException e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE201", String.format( - "ImportError: Exception loading YAML import from \"%s\"",fullUrl))); - al[0] = al[1] = null; - return al; - } - } - else { - String msg = String.format( - "repository URL \"%s\" in import definition \"%s\" is not valid", - repoUrl,importName); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE202", "ImportError: " + msg)); - } - - // if we got here something is wrong with the flow... - log.error("ImportsLoader - _loadImportTemplate - got to dead end (importName {})", importName); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE203", String.format( - "ImportError: _loadImportTemplate got to dead end (importName %s)\n",importName))); - al[0] = al[1] = null; - return al; - } - - @Override - public String toString() { - return "ImportsLoader{" + - "IMPORTS_SECTION=" + Arrays.toString(IMPORTS_SECTION) + - ", importslist=" + importslist + - ", path='" + path + '\'' + - ", typeDefinitionList=" + typeDefinitionList + - ", customDefs=" + customDefs + - ", nestedToscaTpls=" + nestedToscaTpls + - ", repositories=" + repositories + - '}'; - } -} - -/*python - -import logging -import os - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidPropertyValueError -from toscaparser.common.exception import MissingRequiredFieldError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.common.exception import ValidationError -from toscaparser.elements.tosca_type_validation import TypeValidation -from toscaparser.utils.gettextutils import _ -import org.openecomp.sdc.toscaparser.api.utils.urlutils -import org.openecomp.sdc.toscaparser.api.utils.yamlparser - -YAML_LOADER = toscaparser.utils.yamlparser.load_yaml -log = logging.getLogger("tosca") - - -class ImportsLoader(object): - - IMPORTS_SECTION = (FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX) = \ - ('file', 'repository', 'namespace_uri', - 'namespace_prefix') - - def __init__(self, importslist, path, type_definition_list=None, - tpl=None): - self.importslist = importslist - self.custom_defs = {} - if not path and not tpl: - msg = _('Input tosca template is not provided.') - log.warning(msg) - ValidationIssueCollector.appendException(ValidationError(message=msg)) - self.path = path - self.repositories = {} - if tpl and tpl.get('repositories'): - self.repositories = tpl.get('repositories') - self.type_definition_list = [] - if type_definition_list: - if isinstance(type_definition_list, list): - self.type_definition_list = type_definition_list - else: - self.type_definition_list.append(type_definition_list) - self._validate_and_load_imports() - - def get_custom_defs(self): - return self.custom_defs - - def _validate_and_load_imports(self): - imports_names = set() - - if not self.importslist: - msg = _('"imports" keyname is defined without including ' - 'templates.') - log.error(msg) - ValidationIssueCollector.appendException(ValidationError(message=msg)) - return - - for import_def in self.importslist: - if isinstance(import_def, dict): - for import_name, import_uri in import_def.items(): - if import_name in imports_names: - msg = (_('Duplicate import name "%s" was found.') % - import_name) - log.error(msg) - ValidationIssueCollector.appendException( - ValidationError(message=msg)) - imports_names.add(import_name) - - custom_type = self._load_import_template(import_name, - import_uri) - namespace_prefix = None - if isinstance(import_uri, dict): - namespace_prefix = import_uri.get( - self.NAMESPACE_PREFIX) - if custom_type: - TypeValidation(custom_type, import_def) - self._update_custom_def(custom_type, namespace_prefix) - else: # old style of imports - custom_type = self._load_import_template(None, - import_def) - if custom_type: - TypeValidation( - custom_type, import_def) - self._update_custom_def(custom_type, None) - - def _update_custom_def(self, custom_type, namespace_prefix): - outer_custom_types = {} - for type_def in self.type_definition_list: - outer_custom_types = custom_type.get(type_def) - if outer_custom_types: - if type_def == "imports": - self.custom_defs.update({'imports': outer_custom_types}) - else: - if namespace_prefix: - prefix_custom_types = {} - for type_def_key in outer_custom_types.keys(): - namespace_prefix_to_key = (namespace_prefix + - "." + type_def_key) - prefix_custom_types[namespace_prefix_to_key] = \ - outer_custom_types[type_def_key] - self.custom_defs.update(prefix_custom_types) - else: - self.custom_defs.update(outer_custom_types) - - def _validate_import_keys(self, import_name, import_uri_def): - if self.FILE not in import_uri_def.keys(): - log.warning(_('Missing keyname "file" in import "%(name)s".') - % {'name': import_name}) - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what='Import of template "%s"' % import_name, - required=self.FILE)) - for key in import_uri_def.keys(): - if key not in self.IMPORTS_SECTION: - log.warning(_('Unknown keyname "%(key)s" error in ' - 'imported definition "%(def)s".') - % {'key': key, 'def': import_name}) - ValidationIssueCollector.appendException( - UnknownFieldError( - what='Import of template "%s"' % import_name, - field=key)) - - def _load_import_template(self, import_name, import_uri_def): - """Handle custom types defined in imported template files - - This method loads the custom type definitions referenced in "imports" - section of the TOSCA YAML template by determining whether each import - is specified via a file reference (by relative or absolute path) or a - URL reference. - - Possibilities: - +----------+--------+------------------------------+ - | template | import | comment | - +----------+--------+------------------------------+ - | file | file | OK | - | file | URL | OK | - | preparsed| file | file must be a full path | - | preparsed| URL | OK | - | URL | file | file must be a relative path | - | URL | URL | OK | - +----------+--------+------------------------------+ - """ - short_import_notation = False - if isinstance(import_uri_def, dict): - self._validate_import_keys(import_name, import_uri_def) - file_name = import_uri_def.get(self.FILE) - repository = import_uri_def.get(self.REPOSITORY) - repos = self.repositories.keys() - if repository is not None: - if repository not in repos: - ValidationIssueCollector.appendException( - InvalidPropertyValueError( - what=_('Repository is not found in "%s"') % repos)) - else: - file_name = import_uri_def - repository = None - short_import_notation = True - - if not file_name: - msg = (_('A template file name is not provided with import ' - 'definition "%(import_name)s".') - % {'import_name': import_name}) - log.error(msg) - ValidationIssueCollector.appendException(ValidationError(message=msg)) - return - - if toscaparser.utils.urlutils.UrlUtils.validate_url(file_name): - return YAML_LOADER(file_name, False) - elif not repository: - import_template = None - if self.path: - if toscaparser.utils.urlutils.UrlUtils.validate_url(self.path): - if os.path.isabs(file_name): - msg = (_('Absolute file name "%(name)s" cannot be ' - 'used in a URL-based input template ' - '"%(template)s".') - % {'name': file_name, 'template': self.path}) - log.error(msg) - ValidationIssueCollector.appendException(ImportError(msg)) - return - import_template = toscaparser.utils.urlutils.UrlUtils.\ - join_url(self.path, file_name) - a_file = False - else: - a_file = True - main_a_file = os.path.isfile(self.path) - - if main_a_file: - if os.path.isfile(file_name): - import_template = file_name - else: - full_path = os.path.join( - os.path.dirname(os.path.abspath(self.path)), - file_name) - if os.path.isfile(full_path): - import_template = full_path - else: - file_path = file_name.rpartition("/") - dir_path = os.path.dirname(os.path.abspath( - self.path)) - if file_path[0] != '' and dir_path.endswith( - file_path[0]): - import_template = dir_path + "/" +\ - file_path[2] - if not os.path.isfile(import_template): - msg = (_('"%(import_template)s" is' - 'not a valid file') - % {'import_template': - import_template}) - log.error(msg) - ValidationIssueCollector.appendException - (ValueError(msg)) - else: # template is pre-parsed - if os.path.isabs(file_name) and os.path.isfile(file_name): - a_file = True - import_template = file_name - else: - msg = (_('Relative file name "%(name)s" cannot be used ' - 'in a pre-parsed input template.') - % {'name': file_name}) - log.error(msg) - ValidationIssueCollector.appendException(ImportError(msg)) - return - - if not import_template: - log.error(_('Import "%(name)s" is not valid.') % - {'name': import_uri_def}) - ValidationIssueCollector.appendException( - ImportError(_('Import "%s" is not valid.') % - import_uri_def)) - return - return YAML_LOADER(import_template, a_file) - - if short_import_notation: - log.error(_('Import "%(name)s" is not valid.') % import_uri_def) - ValidationIssueCollector.appendException( - ImportError(_('Import "%s" is not valid.') % import_uri_def)) - return - - full_url = "" - if repository: - if self.repositories: - for repo_name, repo_def in self.repositories.items(): - if repo_name == repository: - # Remove leading, ending spaces and strip - # the last character if "/" - repo_url = ((repo_def['url']).strip()).rstrip("//") - full_url = repo_url + "/" + file_name - - if not full_url: - msg = (_('referenced repository "%(n_uri)s" in import ' - 'definition "%(tpl)s" not found.') - % {'n_uri': repository, 'tpl': import_name}) - log.error(msg) - ValidationIssueCollector.appendException(ImportError(msg)) - return - - if toscaparser.utils.urlutils.UrlUtils.validate_url(full_url): - return YAML_LOADER(full_url, False) - else: - msg = (_('repository url "%(n_uri)s" is not valid in import ' - 'definition "%(tpl)s".') - % {'n_uri': repo_url, 'tpl': import_name}) - log.error(msg) - ValidationIssueCollector.appendException(ImportError(msg)) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java deleted file mode 100644 index 1e97572..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java +++ /dev/null @@ -1,737 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - -import org.openecomp.sdc.toscaparser.api.elements.*; -import org.openecomp.sdc.toscaparser.api.utils.CopyUtils; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class NodeTemplate extends EntityTemplate { - - private LinkedHashMap templates; - private LinkedHashMap customDef; - private ArrayList availableRelTpls; - private LinkedHashMap availableRelTypes; - private LinkedHashMap related; - private ArrayList relationshipTpl; - private LinkedHashMap _relationships; - private SubstitutionMappings subMappingToscaTemplate; - private Metadata metadata; - - private static final String METADATA = "metadata"; - - @SuppressWarnings("unchecked") - public NodeTemplate(String name, - LinkedHashMap ntnodeTemplates, - LinkedHashMap ntcustomDef, - ArrayList ntavailableRelTpls, - LinkedHashMap ntavailableRelTypes) { - - super(name, (LinkedHashMap)ntnodeTemplates.get(name), "node_type", ntcustomDef); - - templates = ntnodeTemplates; - _validateFields((LinkedHashMap)templates.get(name)); - customDef = ntcustomDef; - related = new LinkedHashMap(); - relationshipTpl = new ArrayList(); - availableRelTpls = ntavailableRelTpls; - availableRelTypes = ntavailableRelTypes; - _relationships = new LinkedHashMap(); - subMappingToscaTemplate = null; - metadata = _metaData(); - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getRelationships() { - if(_relationships.isEmpty()) { - List requires = getRequirements().getAll(); - if(requires != null && requires instanceof List) { - for(RequirementAssignment r: requires) { - LinkedHashMap explicit = _getExplicitRelationship(r); - if(explicit != null) { - // _relationships.putAll(explicit)... - for(Map.Entry ee: explicit.entrySet()) { - _relationships.put(ee.getKey(), ee.getValue()); - } - } - } - } - } - return _relationships; - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _getExplicitRelationship(RequirementAssignment req) { - // Handle explicit relationship - - // For example, - // - req: - // node: DBMS - // relationship: tosca.relationships.HostedOn - - LinkedHashMap explicitRelation = new LinkedHashMap(); - String node = req.getNodeTemplateName(); - - if(node != null && !node.isEmpty()) { - //msg = _('Lookup by TOSCA types is not supported. ' - // 'Requirement for "%s" can not be full-filled.') % self.name - boolean bFound = false; - for(String k: EntityType.TOSCA_DEF.keySet()) { - if(k.equals(node)) { - bFound = true; - break; - } - } - if(bFound || customDef.get(node) != null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE205", String.format( - "NotImplementedError: Lookup by TOSCA types is not supported. Requirement for \"%s\" can not be full-filled", - getName()))); - return null; - } - if(templates.get(node) == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE206", String.format( - "KeyError: Node template \"%s\" was not found",node))); - return null; - } - NodeTemplate relatedTpl = new NodeTemplate(node,templates,customDef,null,null); - Object relationship = req.getRelationship(); - String relationshipString = null; -// // here relationship can be a string or a LHM with 'type': - - // check if its type has relationship defined - if(relationship == null) { - ArrayList parentReqs = ((NodeType)typeDefinition).getAllRequirements(); - if(parentReqs == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE207", "ValidationError: parent_req is null")); - } - else { -// for(String key: req.keySet()) { -// boolean bFoundRel = false; - for(Object rdo: parentReqs) { - LinkedHashMap reqDict = (LinkedHashMap)rdo; - LinkedHashMap relDict = (LinkedHashMap)reqDict.get(req.getName()); - if(relDict != null) { - relationship = relDict.get("relationship"); - //BUG-python??? need to break twice? -// bFoundRel = true; - break; - } - } -// if(bFoundRel) { -// break; -// } -// } - } - } - - if(relationship != null) { - // here relationship can be a string or a LHM with 'type': - if(relationship instanceof String) { - relationshipString = (String)relationship; - } - else if(relationship instanceof LinkedHashMap) { - relationshipString = (String)((LinkedHashMap)relationship).get("type"); - } - - boolean foundRelationshipTpl = false; - // apply available relationship templates if found - if(availableRelTpls != null) { - for(RelationshipTemplate tpl: availableRelTpls) { - if(tpl.getName().equals(relationshipString)) { - RelationshipType rtype = new RelationshipType(tpl.getType(),null,customDef); - explicitRelation.put(rtype, relatedTpl); - tpl.setTarget(relatedTpl); - tpl.setSource(this); - relationshipTpl.add(tpl); - foundRelationshipTpl = true; - } - } - } - // create relationship template object. - String relPrfx = EntityType.RELATIONSHIP_PREFIX; - if(!foundRelationshipTpl) { - if(relationship instanceof LinkedHashMap) { - relationshipString = (String)((LinkedHashMap)relationship).get("type"); - if(relationshipString != null) { - if(availableRelTypes != null && !availableRelTypes.isEmpty() && - availableRelTypes.get(relationshipString) != null) { - ; - } - else if(!(relationshipString).startsWith(relPrfx)) { - relationshipString = relPrfx + relationshipString; - } - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE208", String.format( - "MissingRequiredFieldError: \"relationship\" used in template \"%s\" is missing required field \"type\"", - relatedTpl.getName()))); - } - } - for(RelationshipType rtype: ((NodeType)typeDefinition).getRelationship().keySet()) { - if(rtype.getType().equals(relationshipString)) { - explicitRelation.put(rtype,relatedTpl); - relatedTpl._addRelationshipTemplate(req,rtype.getType(),this); - } - else if(availableRelTypes != null && !availableRelTypes.isEmpty()) { - LinkedHashMap relTypeDef = (LinkedHashMap)availableRelTypes.get(relationshipString); - if(relTypeDef != null) { - String superType = (String)relTypeDef.get("derived_from"); - if(superType != null) { - if(!superType.startsWith(relPrfx)) { - superType = relPrfx + superType; - } - if(rtype.getType().equals(superType)) { - explicitRelation.put(rtype,relatedTpl); - relatedTpl._addRelationshipTemplate(req,rtype.getType(),this); - } - } - } - } - } - } - } - } - return explicitRelation; - } - - @SuppressWarnings("unchecked") - private void _addRelationshipTemplate(RequirementAssignment requirement, String rtype, NodeTemplate source) { - LinkedHashMap req = new LinkedHashMap<>(); - req.put("relationship", CopyUtils.copyLhmOrAl(requirement.getRelationship())); - req.put("type",rtype); - RelationshipTemplate tpl = new RelationshipTemplate(req, rtype, customDef, this, source); - relationshipTpl.add(tpl); - } - - public ArrayList getRelationshipTemplate() { - return relationshipTpl; - } - - void _addNext(NodeTemplate nodetpl,RelationshipType relationship) { - related.put(nodetpl,relationship); - } - - public ArrayList getRelatedNodes() { - if(related.isEmpty()) { - for(Map.Entry me: ((NodeType)typeDefinition).getRelationship().entrySet()) { - RelationshipType relation = me.getKey(); - NodeType node = me.getValue(); - for(String tpl: templates.keySet()) { - if(tpl.equals(node.getType())) { - //BUG.. python has - // self.related[NodeTemplate(tpl)] = relation - // but NodeTemplate doesn't have a constructor with just name... - //???? - related.put(new NodeTemplate(tpl,null,null,null,null),relation); - } - } - } - } - return new ArrayList(related.keySet()); - } - - public void validate(/*tosca_tpl=none is not used...*/) { - _validateCapabilities(); - _validateRequirements(); - _validateProperties(entityTpl,(NodeType)typeDefinition); - _validateInterfaces(); - for(Property prop: getPropertiesObjects()) { - prop.validate(); - } - } - - private Metadata _metaData() { - if(entityTpl.get(METADATA) != null) { - return new Metadata((Map)entityTpl.get(METADATA)); - } - else { - return null; - } - } - - @SuppressWarnings("unchecked") - private void _validateRequirements() { - ArrayList typeRequires = ((NodeType)typeDefinition).getAllRequirements(); - ArrayList allowedReqs = new ArrayList<>(); - allowedReqs.add("template"); - if(typeRequires != null) { - for(Object to: typeRequires) { - LinkedHashMap treq = (LinkedHashMap)to; - for(Map.Entry me: treq.entrySet()) { - String key = me.getKey(); - Object value = me.getValue(); - allowedReqs.add(key); - if(value instanceof LinkedHashMap) { - allowedReqs.addAll(((LinkedHashMap)value).keySet()); - } - } - - } - } - - ArrayList requires = (ArrayList)((NodeType)typeDefinition).getValue(REQUIREMENTS, entityTpl, false); - if(requires != null) { - if(!(requires instanceof ArrayList)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE209", String.format( - "TypeMismatchError: \"requirements\" of template \"%s\" are not of type \"list\"",name))); - } - else { - for(Object ro: requires) { - LinkedHashMap req = (LinkedHashMap)ro; - for(Map.Entry me: req.entrySet()) { - String rl = me.getKey(); - Object vo = me.getValue(); - if(vo instanceof LinkedHashMap) { - LinkedHashMap value = (LinkedHashMap)vo; - _validateRequirementsKeys(value); - _validateRequirementsProperties(value); - allowedReqs.add(rl); - } - } - _commonValidateField(req,allowedReqs,"requirements"); - } - } - } - } - - @SuppressWarnings("unchecked") - private void _validateRequirementsProperties(LinkedHashMap reqs) { - // TO-DO(anyone): Only occurrences property of the requirements is - // validated here. Validation of other requirement properties are being - // validated in different files. Better to keep all the requirements - // properties validation here. - for(Map.Entry me: reqs.entrySet()) { - if(me.getKey().equals("occurrences")) { - ArrayList val = (ArrayList)me.getValue(); - _validateOccurrences(val); - } - - } - } - - private void _validateOccurrences(ArrayList occurrences) { - DataEntity.validateDatatype("list",occurrences,null,null,null); - for(Object val: occurrences) { - DataEntity.validateDatatype("Integer",val,null,null,null); - } - if(occurrences.size() != 2 || - !(0 <= (int)occurrences.get(0) && (int)occurrences.get(0) <= (int)occurrences.get(1)) || - (int)occurrences.get(1) == 0) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE210", String.format( - "InvalidPropertyValueError: property has invalid value %s",occurrences.toString()))); - } - } - - private void _validateRequirementsKeys(LinkedHashMap reqs) { - for(String key: reqs.keySet()) { - boolean bFound = false; - for(int i=0; i< REQUIREMENTS_SECTION.length; i++) { - if(key.equals(REQUIREMENTS_SECTION[i])) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE211", String.format( - "UnknownFieldError: \"requirements\" of template \"%s\" contains unknown field \"%s\"",name,key))); - } - } - } - - @SuppressWarnings("unchecked") - private void _validateInterfaces() { - LinkedHashMap ifaces = (LinkedHashMap) - ((NodeType)typeDefinition).getValue(INTERFACES, entityTpl, false); - if(ifaces != null) { - for(Map.Entry me: ifaces.entrySet()) { - String iname = me.getKey(); - LinkedHashMap value = (LinkedHashMap)me.getValue(); - if(iname.equals(InterfacesDef.LIFECYCLE) || iname.equals(InterfacesDef.LIFECYCLE_SHORTNAME)) { - // maybe we should convert [] to arraylist??? - ArrayList inlo = new ArrayList<>(); - for(int i=0; i irco = new ArrayList<>(); - for(int i=0; i _collectCustomIfaceOperations(String iname) { - ArrayList allowedOperations = new ArrayList<>(); - LinkedHashMap nodetypeIfaceDef = (LinkedHashMap)((NodeType) - typeDefinition).getInterfaces().get(iname); - allowedOperations.addAll(nodetypeIfaceDef.keySet()); - String ifaceType = (String)nodetypeIfaceDef.get("type"); - if(ifaceType != null) { - LinkedHashMap ifaceTypeDef = null; - if(((NodeType)typeDefinition).customDef != null) { - ifaceTypeDef = (LinkedHashMap)((NodeType)typeDefinition).customDef.get(ifaceType); - } - if(ifaceTypeDef == null) { - ifaceTypeDef = (LinkedHashMap)EntityType.TOSCA_DEF.get(ifaceType); - } - allowedOperations.addAll(ifaceTypeDef.keySet()); - } - // maybe we should convert [] to arraylist??? - ArrayList idrw = new ArrayList<>(); - for(int i=0; i nodetemplate) { - for(String ntname: nodetemplate.keySet()) { - boolean bFound = false; - for(int i=0; i< SECTIONS.length; i++) { - if(ntname.equals(SECTIONS[i])) { - bFound = true; - break; - } - } - if(!bFound) { - for(int i=0; i< SPECIAL_SECTIONS.length; i++) { - if(ntname.equals(SPECIAL_SECTIONS[i])) { - bFound = true; - break; - } - } - - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE213", String.format( - "UnknownFieldError: Node template \"%s\" has unknown field \"%s\"",name,ntname))); - } - } - } - - // getter/setter - - // multilevel nesting - public SubstitutionMappings getSubMappingToscaTemplate() { - return subMappingToscaTemplate; - } - - public void setSubMappingToscaTemplate(SubstitutionMappings sm) { - subMappingToscaTemplate = sm; - } - - public Metadata getMetaData() { - return metadata; - } - - public void setMetaData(Metadata metadata) { - this.metadata = metadata; - } - - @Override - public String toString() { - return getName(); - } - -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidPropertyValueError -from toscaparser.common.exception import MissingRequiredFieldError -from toscaparser.common.exception import TypeMismatchError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.common.exception import ValidationError -from toscaparser.dataentity import DataEntity -from toscaparser.elements.interfaces import CONFIGURE -from toscaparser.elements.interfaces import CONFIGURE_SHORTNAME -from toscaparser.elements.interfaces import INTERFACE_DEF_RESERVED_WORDS -from toscaparser.elements.interfaces import InterfacesDef -from toscaparser.elements.interfaces import LIFECYCLE -from toscaparser.elements.interfaces import LIFECYCLE_SHORTNAME -from toscaparser.elements.relationshiptype import RelationshipType -from toscaparser.entity_template import EntityTemplate -from toscaparser.relationship_template import RelationshipTemplate -from toscaparser.utils.gettextutils import _ - -log = logging.getLogger('tosca') - - -class NodeTemplate(EntityTemplate): - '''Node template from a Tosca profile.''' - def __init__(self, name, node_templates, custom_def=None, - available_rel_tpls=None, available_rel_types=None): - super(NodeTemplate, self).__init__(name, node_templates[name], - 'node_type', - custom_def) - self.templates = node_templates - self._validate_fields(node_templates[name]) - self.custom_def = custom_def - self.related = {} - self.relationship_tpl = [] - self.available_rel_tpls = available_rel_tpls - self.available_rel_types = available_rel_types - self._relationships = {} - self.sub_mapping_tosca_template = None - - @property - def relationships(self): - if not self._relationships: - requires = self.requirements - if requires and isinstance(requires, list): - for r in requires: - for r1, value in r.items(): - explicit = self._get_explicit_relationship(r, value) - if explicit: - for key, value in explicit.items(): - self._relationships[key] = value - return self._relationships - - def _get_explicit_relationship(self, req, value): - """Handle explicit relationship - - For example, - - req: - node: DBMS - relationship: tosca.relationships.HostedOn - """ - explicit_relation = {} - node = value.get('node') if isinstance(value, dict) else value - - if node: - # TO-DO(spzala) implement look up once Glance meta data is available - # to find a matching TOSCA node using the TOSCA types - msg = _('Lookup by TOSCA types is not supported. ' - 'Requirement for "%s" can not be full-filled.') % self.name - if (node in list(self.type_definition.TOSCA_DEF.keys()) - or node in self.custom_def): - ValidationIssueCollector.appendException(NotImplementedError(msg)) - return - - if node not in self.templates: - ValidationIssueCollector.appendException( - KeyError(_('Node template "%s" was not found.') % node)) - return - - related_tpl = NodeTemplate(node, self.templates, self.custom_def) - relationship = value.get('relationship') \ - if isinstance(value, dict) else None - # check if it's type has relationship defined - if not relationship: - parent_reqs = self.type_definition.get_all_requirements() - if parent_reqs is None: - ValidationIssueCollector.appendException( - ValidationError(message='parent_req is ' + - str(parent_reqs))) - else: - for key in req.keys(): - for req_dict in parent_reqs: - if key in req_dict.keys(): - relationship = (req_dict.get(key). - get('relationship')) - break - if relationship: - found_relationship_tpl = False - # apply available relationship templates if found - if self.available_rel_tpls: - for tpl in self.available_rel_tpls: - if tpl.name == relationship: - rtype = RelationshipType(tpl.type, None, - self.custom_def) - explicit_relation[rtype] = related_tpl - tpl.target = related_tpl - tpl.source = self - self.relationship_tpl.append(tpl) - found_relationship_tpl = True - # create relationship template object. - rel_prfx = self.type_definition.RELATIONSHIP_PREFIX - if not found_relationship_tpl: - if isinstance(relationship, dict): - relationship = relationship.get('type') - if relationship: - if self.available_rel_types and \ - relationship in self.available_rel_types.keys(): - pass - elif not relationship.startswith(rel_prfx): - relationship = rel_prfx + relationship - else: - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what=_('"relationship" used in template ' - '"%s"') % related_tpl.name, - required=self.TYPE)) - for rtype in self.type_definition.relationship.keys(): - if rtype.type == relationship: - explicit_relation[rtype] = related_tpl - related_tpl._add_relationship_template(req, - rtype.type, - self) - elif self.available_rel_types: - if relationship in self.available_rel_types.keys(): - rel_type_def = self.available_rel_types.\ - get(relationship) - if 'derived_from' in rel_type_def: - super_type = \ - rel_type_def.get('derived_from') - if not super_type.startswith(rel_prfx): - super_type = rel_prfx + super_type - if rtype.type == super_type: - explicit_relation[rtype] = related_tpl - related_tpl.\ - _add_relationship_template( - req, rtype.type, self) - return explicit_relation - - def _add_relationship_template(self, requirement, rtype, source): - req = requirement.copy() - req['type'] = rtype - tpl = RelationshipTemplate(req, rtype, self.custom_def, self, source) - self.relationship_tpl.append(tpl) - - def get_relationship_template(self): - return self.relationship_tpl - - def _add_next(self, nodetpl, relationship): - self.related[nodetpl] = relationship - - @property - def related_nodes(self): - if not self.related: - for relation, node in self.type_definition.relationship.items(): - for tpl in self.templates: - if tpl == node.type: - self.related[NodeTemplate(tpl)] = relation - return self.related.keys() - - def validate(self, tosca_tpl=None): - self._validate_capabilities() - self._validate_requirements() - self._validate_properties(self.entity_tpl, self.type_definition) - self._validate_interfaces() - for prop in self.get_properties_objects(): - prop.validate() - - def _validate_requirements(self): - type_requires = self.type_definition.get_all_requirements() - allowed_reqs = ["template"] - if type_requires: - for treq in type_requires: - for key, value in treq.items(): - allowed_reqs.append(key) - if isinstance(value, dict): - for key in value: - allowed_reqs.append(key) - - requires = self.type_definition.get_value(self.REQUIREMENTS, - self.entity_tpl) - if requires: - if not isinstance(requires, list): - ValidationIssueCollector.appendException( - TypeMismatchError( - what='"requirements" of template "%s"' % self.name, - type='list')) - else: - for req in requires: - for r1, value in req.items(): - if isinstance(value, dict): - self._validate_requirements_keys(value) - self._validate_requirements_properties(value) - allowed_reqs.append(r1) - self._common_validate_field(req, allowed_reqs, - 'requirements') - - def _validate_requirements_properties(self, requirements): - # TO-DO(anyone): Only occurrences property of the requirements is - # validated here. Validation of other requirement properties are being - # validated in different files. Better to keep all the requirements - # properties validation here. - for key, value in requirements.items(): - if key == 'occurrences': - self._validate_occurrences(value) - break - - def _validate_occurrences(self, occurrences): - DataEntity.validate_datatype('list', occurrences) - for value in occurrences: - DataEntity.validate_datatype('integer', value) - if len(occurrences) != 2 or not (0 <= occurrences[0] <= occurrences[1]) \ - or occurrences[1] == 0: - ValidationIssueCollector.appendException( - InvalidPropertyValueError(what=(occurrences))) - - def _validate_requirements_keys(self, requirement): - for key in requirement.keys(): - if key not in self.REQUIREMENTS_SECTION: - ValidationIssueCollector.appendException( - UnknownFieldError( - what='"requirements" of template "%s"' % self.name, - field=key)) - - def _validate_interfaces(self): - ifaces = self.type_definition.get_value(self.INTERFACES, - self.entity_tpl) - if ifaces: - for name, value in ifaces.items(): - if name in (LIFECYCLE, LIFECYCLE_SHORTNAME): - self._common_validate_field( - value, InterfacesDef. - interfaces_node_lifecycle_operations, - 'interfaces') - elif name in (CONFIGURE, CONFIGURE_SHORTNAME): - self._common_validate_field( - value, InterfacesDef. - interfaces_relationship_configure_operations, - 'interfaces') - elif name in self.type_definition.interfaces.keys(): - self._common_validate_field( - value, - self._collect_custom_iface_operations(name), - 'interfaces') - else: - ValidationIssueCollector.appendException( - UnknownFieldError( - what='"interfaces" of template "%s"' % - self.name, field=name)) - - def _collect_custom_iface_operations(self, name): - allowed_operations = [] - nodetype_iface_def = self.type_definition.interfaces[name] - allowed_operations.extend(nodetype_iface_def.keys()) - if 'type' in nodetype_iface_def: - iface_type = nodetype_iface_def['type'] - if iface_type in self.type_definition.custom_def: - iface_type_def = self.type_definition.custom_def[iface_type] - else: - iface_type_def = self.type_definition.TOSCA_DEF[iface_type] - allowed_operations.extend(iface_type_def.keys()) - allowed_operations = [op for op in allowed_operations if - op not in INTERFACE_DEF_RESERVED_WORDS] - return allowed_operations - - def _validate_fields(self, nodetemplate): - for name in nodetemplate.keys(): - if name not in self.SECTIONS and name not in self.SPECIAL_SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Node template "%s"' % self.name, - field=name))*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java deleted file mode 100644 index 26805bd..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java +++ /dev/null @@ -1,188 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; - -public class Policy extends EntityTemplate { - - - private static final String TYPE = "type"; - private static final String METADATA = "metadata"; - private static final String DESCRIPTION = "description"; - private static final String PROPERTIES = "properties"; - private static final String TARGETS = "targets"; - private static final String TRIGGERS = "triggers"; - private static final String SECTIONS[] = { - TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS}; - - LinkedHashMap metaData; - ArrayList targetsList; // *** a list of NodeTemplate OR a list of Group *** - String targetsType; - ArrayList triggers; - LinkedHashMap properties; - - public Policy(String _name, - LinkedHashMap _policy, -// ArrayList targetObjects, - ArrayList targetObjects, - String _targetsType, - LinkedHashMap _customDef) { - super(_name,_policy,"policy_type",_customDef); - - metaData = null; - if(_policy.get(METADATA) != null) { - metaData = (LinkedHashMap)_policy.get(METADATA); - ValidateUtils.validateMap(metaData); - } - - targetsList = targetObjects; - targetsType = _targetsType; - triggers = _triggers((LinkedHashMap)_policy.get(TRIGGERS)); - properties = null; - if(_policy.get("properties") != null) { - properties = (LinkedHashMap)_policy.get("properties"); - } - _validateKeys(); - } - - public ArrayList getTargets() { - return (ArrayList)entityTpl.get("targets"); - } - - public ArrayList getDescription() { - return (ArrayList)entityTpl.get("description"); - } - - public ArrayList getmetadata() { - return (ArrayList)entityTpl.get("metadata"); - } - - public String getTargetsType() { - return targetsType; - } - -// public ArrayList getTargetsList() { - public ArrayList getTargetsList() { - return targetsList; - } - - // entityTemplate already has a different getProperties... - // this is to access the local properties variable - public LinkedHashMap getPolicyProperties() { - return properties; - } - - private ArrayList _triggers(LinkedHashMap triggers) { - ArrayList triggerObjs = new ArrayList<>(); - if(triggers != null) { - for(Map.Entry me: triggers.entrySet()) { - String tname = me.getKey(); - LinkedHashMap ttriggerTpl = - (LinkedHashMap)me.getValue(); - Triggers triggersObj = new Triggers(tname,ttriggerTpl); - triggerObjs.add(triggersObj); - } - } - return triggerObjs; - } - - private void _validateKeys() { - for(String key: entityTpl.keySet()) { - boolean bFound = false; - for(int i=0; i customDef; - - public Property(String propname, - Object propvalue, - LinkedHashMap propschemaDict, - LinkedHashMap propcustomDef) { - - name = propname; - value = propvalue; - customDef = propcustomDef; - schema = new Schema(propname, propschemaDict); - } - - public String getType() { - return schema.getType(); - } - - public boolean isRequired() { - return schema.isRequired(); - } - - public String getDescription() { - return schema.getDescription(); - } - - public Object getDefault() { - return schema.getDefault(); - } - - public ArrayList getConstraints() { - return schema.getConstraints(); - } - - public LinkedHashMap getEntrySchema() { - return schema.getEntrySchema(); - } - - - public String getName() { - return name; - } - - public Object getValue() { - return value; - } - - // setter - public Object setValue(Object vob) { - value = vob; - return value; - } - - public void validate() { - // Validate if not a reference property - if(!Function.isFunction(value)) { - if(getType().equals(Schema.STRING)) { - value = value.toString(); - } - value = DataEntity.validateDatatype(getType(),value, - getEntrySchema(), - customDef, - name); - _validateConstraints(); - } - } - - private void _validateConstraints() { - if(getConstraints() != null) { - for(Constraint constraint: getConstraints()) { - constraint.validate(value); - } - } - } - - @Override - public String toString() { - return "Property{" + - "name='" + name + '\'' + - ", value=" + value + - ", schema=" + schema + - ", customDef=" + customDef + - '}'; - } -} - -/*python - -class Property(object): - '''TOSCA built-in Property type.''' - - PROPERTY_KEYS = ( - TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS - ) = ( - 'type', 'required', 'description', 'default', 'constraints' - ) - - ENTRY_SCHEMA_KEYS = ( - ENTRYTYPE, ENTRYPROPERTIES - ) = ( - 'type', 'properties' - ) - - def __init__(self, property_name, value, schema_dict, custom_def=None): - self.name = property_name - self.value = value - self.custom_def = custom_def - self.schema = Schema(property_name, schema_dict) - - @property - def type(self): - return self.schema.type - - @property - def required(self): - return self.schema.required - - @property - def description(self): - return self.schema.description - - @property - def default(self): - return self.schema.default - - @property - def constraints(self): - return self.schema.constraints - - @property - def entry_schema(self): - return self.schema.entry_schema - - def validate(self): - '''Validate if not a reference property.''' - if not is_function(self.value): - if self.type == Schema.STRING: - self.value = str(self.value) - self.value = DataEntity.validate_datatype(self.type, self.value, - self.entry_schema, - self.custom_def, - self.name) - self._validate_constraints() - - def _validate_constraints(self): - if self.constraints: - for constraint in self.constraints: - constraint.validate(self.value) -*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/RelationshipTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/RelationshipTemplate.java deleted file mode 100644 index 10d3ad9..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/RelationshipTemplate.java +++ /dev/null @@ -1,199 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.openecomp.sdc.toscaparser.api.elements.EntityType; -import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; -import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType; - -public class RelationshipTemplate extends EntityTemplate { - - private static final String DERIVED_FROM = "derived_from"; - private static final String PROPERTIES = "properties"; - private static final String REQUIREMENTS = "requirements"; - private static final String INTERFACES = "interfaces"; - private static final String CAPABILITIES = "capabilities"; - private static final String TYPE = "type"; - @SuppressWarnings("unused") - private static final String SECTIONS[] = { - DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES, CAPABILITIES, TYPE}; - - private String name; - private NodeTemplate target; - private NodeTemplate source; - private ArrayList _properties; - - public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, - String rtname, - LinkedHashMap rtcustomDef, - NodeTemplate rttarget, - NodeTemplate rtsource) { - super(rtname,rtrelationshipTemplate,"relationship_type",rtcustomDef); - - name = rtname; - target = rttarget; - source = rtsource; - _properties = null; - } - - public ArrayList getPropertiesObjects() { - // Return properties objects for this template - if(_properties == null) { - _properties = _createRelationshipProperties(); - } - return _properties; - } - - @SuppressWarnings({ "unchecked", "unused" }) - public ArrayList _createRelationshipProperties() { - ArrayList props = new ArrayList (); - LinkedHashMap properties = new LinkedHashMap(); - LinkedHashMap relationship = (LinkedHashMap)entityTpl.get("relationship"); - - if(relationship == null) { - for(Object val: entityTpl.values()) { - if(val instanceof LinkedHashMap) { - relationship = (LinkedHashMap)((LinkedHashMap)val).get("relationship"); - break; - } - } - } - - if(relationship != null) { - properties = (LinkedHashMap)((EntityType)typeDefinition).getValue(PROPERTIES,relationship,false); - } - if(properties == null) { - properties = new LinkedHashMap(); - } - if(properties == null) { - properties = (LinkedHashMap)entityTpl.get(PROPERTIES); - } - if(properties == null) { - properties = new LinkedHashMap(); - } - - if(properties != null) { - for(Map.Entry me: properties.entrySet()) { - String pname = me.getKey(); - Object pvalue = me.getValue(); - LinkedHashMap propsDef = ((StatefulEntityType)typeDefinition).getPropertiesDef(); - if(propsDef != null && propsDef.get(pname) != null) { - if(properties.get(pname) != null) { - pvalue = properties.get(name); - } - PropertyDef pd = (PropertyDef)propsDef.get(pname); - Property prop = new Property(pname,pvalue,pd.getSchema(),customDef); - props.add(prop); - } - } - } - ArrayList pds = ((StatefulEntityType)typeDefinition).getPropertiesDefObjects(); - for(PropertyDef p: pds) { - if(p.getDefault() != null && properties.get(p.getName()) == null) { - Property prop = new Property(p.getName(), (LinkedHashMap)p.getDefault(), p.getSchema(), customDef); - props.add(prop); - } - } - return props; - } - - public void validate() { - _validateProperties(entityTpl,(StatefulEntityType)typeDefinition); - } - - // getters/setters - public NodeTemplate getTarget() { - return target; - } - - public NodeTemplate getSource() { - return source; - } - - public void setSource(NodeTemplate nt) { - source = nt; - } - - public void setTarget(NodeTemplate nt) { - target = nt; - } - - @Override - public String toString() { - return "RelationshipTemplate{" + - "name='" + name + '\'' + - ", target=" + target.getName() + - ", source=" + source.getName() + - ", _properties=" + _properties + - '}'; - } - -} - -/*python - -from toscaparser.entity_template import EntityTemplate -from toscaparser.properties import Property - -SECTIONS = (DERIVED_FROM, PROPERTIES, REQUIREMENTS, - INTERFACES, CAPABILITIES, TYPE) = \ - ('derived_from', 'properties', 'requirements', 'interfaces', - 'capabilities', 'type') - -log = logging.getLogger('tosca') - - -class RelationshipTemplate(EntityTemplate): - '''Relationship template.''' - def __init__(self, relationship_template, name, custom_def=None, - target=None, source=None): - super(RelationshipTemplate, self).__init__(name, - relationship_template, - 'relationship_type', - custom_def) - self.name = name.lower() - self.target = target - self.source = source - - def get_properties_objects(self): - '''Return properties objects for this template.''' - if self._properties is None: - self._properties = self._create_relationship_properties() - return self._properties - - def _create_relationship_properties(self): - props = [] - properties = {} - relationship = self.entity_tpl.get('relationship') - - if not relationship: - for value in self.entity_tpl.values(): - if isinstance(value, dict): - relationship = value.get('relationship') - break - - if relationship: - properties = self.type_definition.get_value(self.PROPERTIES, - relationship) or {} - if not properties: - properties = self.entity_tpl.get(self.PROPERTIES) or {} - - if properties: - for name, value in properties.items(): - props_def = self.type_definition.get_properties_def() - if props_def and name in props_def: - if name in properties.keys(): - value = properties.get(name) - prop = Property(name, value, - props_def[name].schema, self.custom_def) - props.append(prop) - for p in self.type_definition.get_properties_def_objects(): - if p.default is not None and p.name not in properties.keys(): - prop = Property(p.name, p.default, p.schema, self.custom_def) - props.append(prop) - return props - - def validate(self): - self._validate_properties(self.entity_tpl, self.type_definition)*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java deleted file mode 100644 index 3ede22c..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java +++ /dev/null @@ -1,118 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.LinkedHashMap; - -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.openecomp.sdc.toscaparser.api.utils.UrlUtils; - -public class Repository { - - private static final String DESCRIPTION = "description"; - private static final String URL = "url"; - private static final String CREDENTIAL = "credential"; - private static final String SECTIONS[] ={DESCRIPTION, URL, CREDENTIAL}; - - private String name; - private Object reposit; - private String url; - - @SuppressWarnings("unchecked") - public Repository(String repName,Object repValue) { - name = repName; - reposit = repValue; - if(reposit instanceof LinkedHashMap) { - url = (String)((LinkedHashMap)reposit).get("url"); - if(url == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE229", String.format( - "MissingRequiredFieldError: Repository \"%s\" is missing required field \"url\"", - name))); - } - } - loadAndValidate(name,reposit); - } - - @SuppressWarnings("unchecked") - private void loadAndValidate(String val,Object repositDef) { - String keyname = val; - if(repositDef instanceof LinkedHashMap) { - for(String key: ((LinkedHashMap)reposit).keySet()) { - boolean bFound = false; - for(String sect: SECTIONS) { - if(key.equals(sect)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE230", String.format( - "UnknownFieldError: repositories \"%s\" contains unknown field \"%s\"", - keyname,key))); - } - } - - String repositUrl = (String)((LinkedHashMap)repositDef).get("url"); - if(repositUrl != null) { - boolean urlVal = UrlUtils.validateUrl(repositUrl); - if(!urlVal) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE231", String.format( - "URLException: repsositories \"%s\" Invalid Url",keyname))); - } - } - } - } - - @Override - public String toString() { - return "Repository{" + - "name='" + name + '\'' + - ", reposit=" + reposit + - ", url='" + url + '\'' + - '}'; - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import MissingRequiredFieldError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.common.exception import URLException -from toscaparser.utils.gettextutils import _ -import org.openecomp.sdc.toscaparser.api.utils.urlutils - -SECTIONS = (DESCRIPTION, URL, CREDENTIAL) = \ - ('description', 'url', 'credential') - - -class Repository(object): - def __init__(self, repositories, values): - self.name = repositories - self.reposit = values - if isinstance(self.reposit, dict): - if 'url' not in self.reposit.keys(): - ValidationIssueCollector.appendException( - MissingRequiredFieldError(what=_('Repository "%s"') - % self.name, required='url')) - self.url = self.reposit['url'] - self.load_and_validate(self.name, self.reposit) - - def load_and_validate(self, val, reposit_def): - self.keyname = val - if isinstance(reposit_def, dict): - for key in reposit_def.keys(): - if key not in SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what=_('repositories "%s"') - % self.keyname, field=key)) - - if URL in reposit_def.keys(): - reposit_url = reposit_def.get(URL) - url_val = toscaparser.utils.urlutils.UrlUtils.\ - validate_url(reposit_url) - if url_val is not True: - ValidationIssueCollector.appendException( - URLException(what=_('repsositories "%s" Invalid Url') - % self.keyname)) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignment.java b/src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignment.java deleted file mode 100644 index 799a8ee..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignment.java +++ /dev/null @@ -1,85 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import java.util.Map; - -public class RequirementAssignment { - - private String name; - private String nodeName; - private String capabilityName; - private Object relationship; - - public RequirementAssignment(String reqName, String nodeName) { - this.name = reqName; - this.nodeName = nodeName; - } - - public RequirementAssignment(String reqName, String nodeName, String capabilityName) { - this.name = reqName; - this.nodeName = nodeName; - this.capabilityName = capabilityName; - } - - public RequirementAssignment(String reqName, String nodeName, String capabilityName, Object relationship) { - this.name = reqName; - this.nodeName = nodeName; - this.capabilityName = capabilityName; - this.relationship = relationship; - } - - /** - * Get the name for requirement assignment. - * @return the name for requirement assignment. - */ - public String getName() { - return name; - } - - /** - * Set the name for requirement - * @param name - the name for requirement to set - */ - public void setName(String name) { - this.name = name; - } - - /** - * Get the node name for requirement assignment. - * @return the node name for requirement - */ - public String getNodeTemplateName() { - return nodeName; - } - - /** - * Set the node name for requirement - * @param nodeName - the node name for requirement to set - */ - public void setNodeTemplateName(String nodeName) { - this.nodeName = nodeName; - } - - /** - * Get the capability name for requirement assignment. - * @return the capability name for requirement - */ - public String getCapabilityName() { - return capabilityName; - } - - /** - * Set the capability name for requirement assignment. - * @param capabilityName - the capability name for requirement to set - */ - public void setCapabilityName(String capabilityName) { - this.capabilityName = capabilityName; - } - - /** - * Get the relationship object for requirement - * @return the relationship object for requirement - */ - public Object getRelationship() { - return relationship; - } -} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignments.java b/src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignments.java deleted file mode 100644 index 7991f3c..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/RequirementAssignments.java +++ /dev/null @@ -1,39 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import org.openecomp.sdc.toscaparser.api.RequirementAssignment; - -import java.util.ArrayList; -import java.util.List; -import java.util.stream.Collectors; - -public class RequirementAssignments { - - private List requirementAssignmentList; - - public RequirementAssignments(List requirementAssignments) { - this.requirementAssignmentList = requirementAssignments != null ? new ArrayList<>(requirementAssignments) : new ArrayList<>(); - } - - /** - * Get all requirement assignments for Node Template.
- * This object can be either the original one, holding all requirement assignments for this node template,or a filtered one, holding a filtered subset.
- * @return list of requirement assignments for the node template.
- * If there are no requirement assignments, empty list is returned. - */ - public List getAll() { - return new ArrayList<>(requirementAssignmentList); - } - - /** - * Filter requirement assignments by requirement name. - * @param reqName - The name of requirement - * @return RequirementAssignments object, containing requirement assignments of this type.
- * If no such found, filtering will result in an empty collection. - */ - public RequirementAssignments getRequirementsByName(String reqName) { - List requirementAssignments = requirementAssignmentList.stream() - .filter(req -> req.getName().equals(reqName)).collect(Collectors.toList()); - - return new RequirementAssignments(requirementAssignments); - } -} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java b/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java deleted file mode 100644 index e5e9d9a..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java +++ /dev/null @@ -1,519 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.openecomp.sdc.toscaparser.api.elements.NodeType; -import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; -import org.openecomp.sdc.toscaparser.api.parameters.Input; -import org.openecomp.sdc.toscaparser.api.parameters.Output; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; - - -public class SubstitutionMappings { - // SubstitutionMappings class declaration - - // SubstitutionMappings exports the topology template as an - // implementation of a Node type. - - private static final String NODE_TYPE = "node_type"; - private static final String REQUIREMENTS = "requirements"; - private static final String CAPABILITIES = "capabilities"; - - private static final String SECTIONS[] = {NODE_TYPE, REQUIREMENTS, CAPABILITIES}; - - private static final String OPTIONAL_OUTPUTS[] = {"tosca_id", "tosca_name", "state"}; - - private LinkedHashMap subMappingDef; - private ArrayList nodetemplates; - private ArrayList inputs; - private ArrayList outputs; - private ArrayList groups; - private NodeTemplate subMappedNodeTemplate; - private LinkedHashMap customDefs; - private LinkedHashMap _capabilities; - private LinkedHashMap _requirements; - - public SubstitutionMappings(LinkedHashMap smsubMappingDef, - ArrayList smnodetemplates, - ArrayList sminputs, - ArrayList smoutputs, - ArrayList smgroups, - NodeTemplate smsubMappedNodeTemplate, - LinkedHashMap smcustomDefs) { - - subMappingDef = smsubMappingDef; - nodetemplates = smnodetemplates; - inputs = sminputs != null ? sminputs : new ArrayList(); - outputs = smoutputs != null ? smoutputs : new ArrayList(); - groups = smgroups != null ? smgroups : new ArrayList(); - subMappedNodeTemplate = smsubMappedNodeTemplate; - customDefs = smcustomDefs != null ? smcustomDefs : new LinkedHashMap(); - _validate(); - - _capabilities = null; - _requirements = null; - } - - public String getType() { - if(subMappingDef != null) { - return (String)subMappingDef.get(NODE_TYPE); - } - return null; - } - - public ArrayList getNodeTemplates() { - return nodetemplates; - } - - /* - @classmethod - def get_node_type(cls, sub_mapping_def): - if isinstance(sub_mapping_def, dict): - return sub_mapping_def.get(cls.NODE_TYPE) - */ - - public static String stGetNodeType(LinkedHashMap _subMappingDef) { - if(_subMappingDef instanceof LinkedHashMap) { - return (String)_subMappingDef.get(NODE_TYPE); - } - return null; - } - - public String getNodeType() { - return (String)subMappingDef.get(NODE_TYPE); - } - - public ArrayList getInputs() { - return inputs; - } - - public ArrayList getGroups() { - return groups; - } - - public LinkedHashMap getCapabilities() { - return (LinkedHashMap)subMappingDef.get(CAPABILITIES); - } - - public LinkedHashMap getRequirements() { - return (LinkedHashMap)subMappingDef.get(REQUIREMENTS); - } - - public NodeType getNodeDefinition() { - return new NodeType(getNodeType(), customDefs); - } - - private void _validate() { - // Basic validation - _validateKeys(); - _validateType(); - - // SubstitutionMapping class syntax validation - _validateInputs(); - _validateCapabilities(); - _validateRequirements(); - _validateOutputs(); - } - - private void _validateKeys() { - // validate the keys of substitution mappings - for(String key: subMappingDef.keySet()) { - boolean bFound = false; - for(String s: SECTIONS) { - if(s.equals(key)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE232", String.format( - "UnknownFieldError: SubstitutionMappings contain unknown field \"%s\"", - key))); - } - } - } - - private void _validateType() { - // validate the node_type of substitution mappings - String nodeType = (String)subMappingDef.get(NODE_TYPE); - if(nodeType == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE233", String.format( - "MissingRequiredFieldError: SubstitutionMappings used in topology_template is missing required field \"%s\"", - NODE_TYPE))); - } - Object nodeTypeDef = customDefs.get(nodeType); - if(nodeTypeDef == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE234", String.format( - "InvalidNodeTypeError: \"%s\" is invalid",nodeType))); - } - } - - private void _validateInputs() { - // validate the inputs of substitution mappings. - - // The inputs defined by the topology template have to match the - // properties of the node type or the substituted node. If there are - // more inputs than the substituted node has properties, default values - //must be defined for those inputs. - - HashSet allInputs = new HashSet<>(); - for(Input inp: inputs) { - allInputs.add(inp.getName()); - } - HashSet requiredProperties = new HashSet<>(); - for(PropertyDef pd: getNodeDefinition().getPropertiesDefObjects()) { - if(pd.isRequired() && pd.getDefault() == null) { - requiredProperties.add(pd.getName()); - } - } - // Must provide inputs for required properties of node type. - for(String property: requiredProperties) { - // Check property which is 'required' and has no 'default' value - if(!allInputs.contains(property)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE235", String.format( - "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", - getNodeType(),property))); - } - } - // If the optional properties of node type need to be customized by - // substituted node, it also is necessary to define inputs for them, - // otherwise they are not mandatory to be defined. - HashSet customizedParameters = new HashSet<>(); - if(subMappedNodeTemplate != null) { - customizedParameters.addAll(subMappedNodeTemplate.getProperties().keySet()); - } - HashSet allProperties = new HashSet( - getNodeDefinition().getPropertiesDef().keySet()); - HashSet diffset = customizedParameters; - diffset.removeAll(allInputs); - for(String parameter: diffset) { - if(allProperties.contains(parameter)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE236", String.format( - "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", - getNodeType(),parameter))); - } - } - // Additional inputs are not in the properties of node type must - // provide default values. Currently the scenario may not happen - // because of parameters validation in nodetemplate, here is a - // guarantee. - for(Input inp: inputs) { - diffset = allInputs; - diffset.removeAll(allProperties); - if(diffset.contains(inp.getName()) && inp.getDefault() == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE237", String.format( - "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing rquired input \"%s\"", - getNodeType(),inp.getName()))); - } - } - } - - private void _validateCapabilities() { - // validate the capabilities of substitution mappings - - // The capabilities must be in node template which be mapped. - LinkedHashMap tplsCapabilities = - (LinkedHashMap)subMappingDef.get(CAPABILITIES); - List nodeCapabilities = null; - if(subMappedNodeTemplate != null) { - nodeCapabilities = subMappedNodeTemplate.getCapabilities().getAll(); - } - if(nodeCapabilities != null) { - for(CapabilityAssignment cap: nodeCapabilities) { - if(tplsCapabilities != null && tplsCapabilities.get(cap.getName()) == null) { - ; //pass - // ValidationIssueCollector.appendException( - // UnknownFieldError(what='SubstitutionMappings', - // field=cap)) - } - } - } - } - - private void _validateRequirements() { - // validate the requirements of substitution mappings - //***************************************************** - //TO-DO - Different from Python code!! one is a bug... - //***************************************************** - // The requirements must be in node template which be mapped. - LinkedHashMap tplsRequirements = - (LinkedHashMap)subMappingDef.get(REQUIREMENTS); - List nodeRequirements = null; - if(subMappedNodeTemplate != null) { - nodeRequirements = subMappedNodeTemplate.getRequirements().getAll(); - } - if(nodeRequirements != null) { - for(RequirementAssignment ro: nodeRequirements) { - String cap = ro.getName(); - if(tplsRequirements != null && tplsRequirements.get(cap) == null) { - ; //pass - // ValidationIssueCollector.appendException( - // UnknownFieldError(what='SubstitutionMappings', - // field=cap)) - } - } - } - } - - private void _validateOutputs() { - // validate the outputs of substitution mappings. - - // The outputs defined by the topology template have to match the - // attributes of the node type or the substituted node template, - // and the observable attributes of the substituted node template - // have to be defined as attributes of the node type or outputs in - // the topology template. - - // The outputs defined by the topology template have to match the - // attributes of the node type according to the specification, but - // it's reasonable that there are more inputs than the node type - // has properties, the specification will be amended? - - for(Output output: outputs) { - Object ado = getNodeDefinition().getAttributesDef(); - if(ado != null && ((LinkedHashMap)ado).get(output.getName()) == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE238", String.format( - "UnknownOutputError: Unknown output \"%s\" in SubstitutionMappings with node_type \"%s\"", - output.getName(),getNodeType()))); - } - } - } - - @Override - public String toString() { - return "SubstitutionMappings{" + -// "subMappingDef=" + subMappingDef + -// ", nodetemplates=" + nodetemplates + -// ", inputs=" + inputs + -// ", outputs=" + outputs + -// ", groups=" + groups + - ", subMappedNodeTemplate=" + (subMappedNodeTemplate==null?"":subMappedNodeTemplate.getName()) + -// ", customDefs=" + customDefs + -// ", _capabilities=" + _capabilities + -// ", _requirements=" + _requirements + - '}'; - } - - @Deprecated - public String toLimitedString() { - return "SubstitutionMappings{" + - "subMappingDef=" + subMappingDef + - ", nodetemplates=" + nodetemplates + - ", inputs=" + inputs + - ", outputs=" + outputs + - ", groups=" + groups + - ", subMappedNodeTemplate=" + (subMappedNodeTemplate==null?"":subMappedNodeTemplate.getName()) + - ", customDefs=" + customDefs + - ", _capabilities=" + _capabilities + - ", _requirements=" + _requirements + - '}'; - } -} - - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidNodeTypeError -from toscaparser.common.exception import MissingDefaultValueError -from toscaparser.common.exception import MissingRequiredFieldError -from toscaparser.common.exception import MissingRequiredInputError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.common.exception import UnknownOutputError -from toscaparser.elements.nodetype import NodeType -from toscaparser.utils.gettextutils import _ - -log = logging.getLogger('tosca') - - -class SubstitutionMappings(object): - '''SubstitutionMappings class declaration - - SubstitutionMappings exports the topology template as an - implementation of a Node type. - ''' - - SECTIONS = (NODE_TYPE, REQUIREMENTS, CAPABILITIES) = \ - ('node_type', 'requirements', 'capabilities') - - OPTIONAL_OUTPUTS = ['tosca_id', 'tosca_name', 'state'] - - def __init__(self, sub_mapping_def, nodetemplates, inputs, outputs, - sub_mapped_node_template, custom_defs): - self.nodetemplates = nodetemplates - self.sub_mapping_def = sub_mapping_def - self.inputs = inputs or [] - self.outputs = outputs or [] - self.sub_mapped_node_template = sub_mapped_node_template - self.custom_defs = custom_defs or {} - self._validate() - - self._capabilities = None - self._requirements = None - - @property - def type(self): - if self.sub_mapping_def: - return self.sub_mapping_def.get(self.NODE_TYPE) - - @classmethod - def get_node_type(cls, sub_mapping_def): - if isinstance(sub_mapping_def, dict): - return sub_mapping_def.get(cls.NODE_TYPE) - - @property - def node_type(self): - return self.sub_mapping_def.get(self.NODE_TYPE) - - @property - def capabilities(self): - return self.sub_mapping_def.get(self.CAPABILITIES) - - @property - def requirements(self): - return self.sub_mapping_def.get(self.REQUIREMENTS) - - @property - def node_definition(self): - return NodeType(self.node_type, self.custom_defs) - - def _validate(self): - # Basic validation - self._validate_keys() - self._validate_type() - - # SubstitutionMapping class syntax validation - self._validate_inputs() - self._validate_capabilities() - self._validate_requirements() - self._validate_outputs() - - def _validate_keys(self): - """validate the keys of substitution mappings.""" - for key in self.sub_mapping_def.keys(): - if key not in self.SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what=_('SubstitutionMappings'), - field=key)) - - def _validate_type(self): - """validate the node_type of substitution mappings.""" - node_type = self.sub_mapping_def.get(self.NODE_TYPE) - if not node_type: - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what=_('SubstitutionMappings used in topology_template'), - required=self.NODE_TYPE)) - - node_type_def = self.custom_defs.get(node_type) - if not node_type_def: - ValidationIssueCollector.appendException( - InvalidNodeTypeError(what=node_type)) - - def _validate_inputs(self): - """validate the inputs of substitution mappings. - - The inputs defined by the topology template have to match the - properties of the node type or the substituted node. If there are - more inputs than the substituted node has properties, default values - must be defined for those inputs. - """ - - all_inputs = set([input.name for input in self.inputs]) - required_properties = set([p.name for p in - self.node_definition. - get_properties_def_objects() - if p.required and p.default is None]) - # Must provide inputs for required properties of node type. - for property in required_properties: - # Check property which is 'required' and has no 'default' value - if property not in all_inputs: - ValidationIssueCollector.appendException( - MissingRequiredInputError( - what=_('SubstitutionMappings with node_type ') - + self.node_type, - input_name=property)) - - # If the optional properties of node type need to be customized by - # substituted node, it also is necessary to define inputs for them, - # otherwise they are not mandatory to be defined. - customized_parameters = set(self.sub_mapped_node_template - .get_properties().keys() - if self.sub_mapped_node_template else []) - all_properties = set(self.node_definition.get_properties_def()) - for parameter in customized_parameters - all_inputs: - if parameter in all_properties: - ValidationIssueCollector.appendException( - MissingRequiredInputError( - what=_('SubstitutionMappings with node_type ') - + self.node_type, - input_name=parameter)) - - # Additional inputs are not in the properties of node type must - # provide default values. Currently the scenario may not happen - # because of parameters validation in nodetemplate, here is a - # guarantee. - for input in self.inputs: - if input.name in all_inputs - all_properties \ - and input.default is None: - ValidationIssueCollector.appendException( - MissingDefaultValueError( - what=_('SubstitutionMappings with node_type ') - + self.node_type, - input_name=input.name)) - - def _validate_capabilities(self): - """validate the capabilities of substitution mappings.""" - - # The capabilites must be in node template wchich be mapped. - tpls_capabilities = self.sub_mapping_def.get(self.CAPABILITIES) - node_capabiliteys = self.sub_mapped_node_template.get_capabilities() \ - if self.sub_mapped_node_template else None - for cap in node_capabiliteys.keys() if node_capabiliteys else []: - if (tpls_capabilities and - cap not in list(tpls_capabilities.keys())): - pass - # ValidationIssueCollector.appendException( - # UnknownFieldError(what='SubstitutionMappings', - # field=cap)) - - def _validate_requirements(self): - """validate the requirements of substitution mappings.""" - - # The requirements must be in node template wchich be mapped. - tpls_requirements = self.sub_mapping_def.get(self.REQUIREMENTS) - node_requirements = self.sub_mapped_node_template.requirements \ - if self.sub_mapped_node_template else None - for req in node_requirements if node_requirements else []: - if (tpls_requirements and - req not in list(tpls_requirements.keys())): - pass - # ValidationIssueCollector.appendException( - # UnknownFieldError(what='SubstitutionMappings', - # field=req)) - - def _validate_outputs(self): - """validate the outputs of substitution mappings. - - The outputs defined by the topology template have to match the - attributes of the node type or the substituted node template, - and the observable attributes of the substituted node template - have to be defined as attributes of the node type or outputs in - the topology template. - """ - - # The outputs defined by the topology template have to match the - # attributes of the node type according to the specification, but - # it's reasonable that there are more inputs than the node type - # has properties, the specification will be amended? - for output in self.outputs: - if output.name not in self.node_definition.get_attributes_def(): - ValidationIssueCollector.appendException( - UnknownOutputError( - where=_('SubstitutionMappings with node_type ') - + self.node_type, - output_name=output.name))*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java deleted file mode 100644 index c19623f..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java +++ /dev/null @@ -1,858 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.openecomp.sdc.toscaparser.api.elements.InterfacesDef; -import org.openecomp.sdc.toscaparser.api.elements.NodeType; -import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; -import org.openecomp.sdc.toscaparser.api.functions.Function; -import org.openecomp.sdc.toscaparser.api.functions.GetAttribute; -import org.openecomp.sdc.toscaparser.api.functions.GetInput; -import org.openecomp.sdc.toscaparser.api.parameters.Input; -import org.openecomp.sdc.toscaparser.api.parameters.Output; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.Map; - -public class TopologyTemplate { - - private static final String DESCRIPTION = "description"; - private static final String INPUTS = "inputs"; - private static final String NODE_TEMPLATES = "node_templates"; - private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; - private static final String OUTPUTS = "outputs"; - private static final String GROUPS = "groups"; - private static final String SUBSTITUTION_MAPPINGS = "substitution_mappings"; - private static final String POLICIES = "policies"; - private static final String METADATA = "metadata"; - - private static String SECTIONS[] = { - DESCRIPTION, INPUTS, NODE_TEMPLATES, RELATIONSHIP_TEMPLATES, - OUTPUTS, GROUPS, SUBSTITUTION_MAPPINGS, POLICIES, METADATA - }; - - private LinkedHashMap tpl; - LinkedHashMap metaData; - private ArrayList inputs; - private ArrayList outputs; - private ArrayList relationshipTemplates; - private ArrayList nodeTemplates; - private LinkedHashMap customDefs; - private LinkedHashMap relTypes;//TYPE - private NodeTemplate subMappedNodeTemplate; - private ArrayList groups; - private ArrayList policies; - private LinkedHashMap parsedParams = null;//TYPE - private String description; - private ToscaGraph graph; - private SubstitutionMappings substitutionMappings; - private boolean resolveGetInput; - - public TopologyTemplate( - LinkedHashMap _template, - LinkedHashMap _customDefs, - LinkedHashMap _relTypes,//TYPE - LinkedHashMap _parsedParams, - NodeTemplate _subMappedNodeTemplate, - boolean _resolveGetInput) { - - tpl = _template; - if(tpl != null) { - subMappedNodeTemplate = _subMappedNodeTemplate; - metaData = _metaData(); - customDefs = _customDefs; - relTypes = _relTypes; - parsedParams = _parsedParams; - resolveGetInput = _resolveGetInput; - _validateField(); - description = _tplDescription(); - inputs = _inputs(); - relationshipTemplates =_relationshipTemplates(); - nodeTemplates = _nodeTemplates(); - outputs = _outputs(); - if(nodeTemplates != null) { - graph = new ToscaGraph(nodeTemplates); - } - groups = _groups(); - policies = _policies(); - _processIntrinsicFunctions(); - substitutionMappings = _substitutionMappings(); - } - } - - @SuppressWarnings("unchecked") - private ArrayList _inputs() { - //DumpUtils.dumpYaml(customDefs,0); - ArrayList alInputs = new ArrayList<>(); - for(String name: _tplInputs().keySet()) { - Object attrs = _tplInputs().get(name); - Input input = new Input(name,(LinkedHashMap)attrs,customDefs); - if(parsedParams != null && parsedParams.get(name) != null) { - input.validate(parsedParams.get(name)); - } - else { - Object _default = input.getDefault(); - if(_default != null) { - input.validate(_default); - } - } - if((parsedParams != null && parsedParams.get(input.getName()) == null || parsedParams == null) - && input.isRequired() && input.getDefault() == null) { - System.out.format("Log warning: The required parameter \"%s\" is not provided\n",input.getName()); - } - alInputs.add(input); - } - return alInputs; - - } - - private LinkedHashMap _metaData() { - if(tpl.get(METADATA) != null) { - return (LinkedHashMap)tpl.get(METADATA); - } - else { - return new LinkedHashMap(); - } - - } - - private ArrayList _nodeTemplates() { - ArrayList alNodeTemplates = new ArrayList<>(); - LinkedHashMap tpls = _tplNodeTemplates(); - if(tpls != null) { - for(String name: tpls.keySet()) { - NodeTemplate tpl = new NodeTemplate(name, - tpls, - customDefs, - relationshipTemplates, - relTypes); - if(tpl.getTypeDefinition() != null) { - boolean b = NodeType.TOSCA_DEF.get(tpl.getType()) != null; - if(b || (tpl.getCustomDef() != null && !tpl.getCustomDef().isEmpty())) { - tpl.validate(); - alNodeTemplates.add(tpl); - } - } - } - } - return alNodeTemplates; - } - - @SuppressWarnings("unchecked") - private ArrayList _relationshipTemplates() { - ArrayList alRelationshipTemplates = new ArrayList<>(); - LinkedHashMap tpls = _tplRelationshipTemplates(); - if(tpls != null) { - for(String name: tpls.keySet()) { - RelationshipTemplate tpl = new RelationshipTemplate( - (LinkedHashMap)tpls.get(name),name,customDefs,null,null); - - alRelationshipTemplates.add(tpl); - } - } - return alRelationshipTemplates; - } - - private ArrayList _outputs() { - ArrayList alOutputs = new ArrayList<>(); - for(Map.Entry me: _tplOutputs().entrySet()) { - String oname = me.getKey(); - LinkedHashMap oattrs = (LinkedHashMap)me.getValue(); - Output o = new Output(oname,oattrs); - o.validate(); - alOutputs.add(o); - } - return alOutputs; - } - - private SubstitutionMappings _substitutionMappings() { - LinkedHashMap tplSubstitutionMapping = (LinkedHashMap) _tplSubstitutionMappings(); - - //*** the commenting-out below and the weaker condition are in the Python source - // #if tpl_substitution_mapping and self.sub_mapped_node_template: - if(tplSubstitutionMapping != null && tplSubstitutionMapping.size() > 0) { - return new SubstitutionMappings(tplSubstitutionMapping, - nodeTemplates, - inputs, - outputs, - groups, - subMappedNodeTemplate, - customDefs); - } - return null; - - } - - @SuppressWarnings("unchecked") - private ArrayList _policies() { - ArrayList alPolicies = new ArrayList<>(); - for(Object po: _tplPolicies()) { - LinkedHashMap policy = (LinkedHashMap)po; - for(Map.Entry me: policy.entrySet()) { - String policyName = me.getKey(); - LinkedHashMap policyTpl = (LinkedHashMap)me.getValue(); - ArrayList targetList = (ArrayList)policyTpl.get("targets"); - //ArrayList targetObjects = new ArrayList<>(); - ArrayList targetNodes = new ArrayList<>(); - ArrayList targetObjects = new ArrayList<>(); - ArrayList targetGroups = new ArrayList<>(); - String targetsType = "groups"; - if(targetList != null && targetList.size() >= 1) { - targetGroups = _getPolicyGroups(targetList); - if(targetGroups == null) { - targetsType = "node_templates"; - targetNodes = _getGroupMembers(targetList); - for(NodeTemplate nt: targetNodes) { - targetObjects.add(nt); - } - } - else { - for(Group gr: targetGroups) { - targetObjects.add(gr); - } - } - } - Policy policyObj = new Policy(policyName, - policyTpl, - targetObjects, - targetsType, - customDefs); - alPolicies.add(policyObj); - } - } - return alPolicies; - } - - private ArrayList _groups() { - ArrayList groups = new ArrayList<>(); - ArrayList memberNodes = null; - for(Map.Entry me: _tplGroups().entrySet()) { - String groupName = me.getKey(); - LinkedHashMap groupTpl = (LinkedHashMap)me.getValue(); - ArrayList memberNames = (ArrayList)groupTpl.get("members"); - if(memberNames != null) { - DataEntity.validateDatatype("list", memberNames,null,null,null); - if(memberNames.size() < 1 || - (new HashSet(memberNames)).size() != memberNames.size()) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE005",String.format( - "InvalidGroupTargetException: Member nodes \"%s\" should be >= 1 and not repeated", - memberNames.toString()))); - } - else { - memberNodes = _getGroupMembers(memberNames); - } - } - Group group = new Group(groupName, - groupTpl, - memberNodes, - customDefs); - groups.add(group); - } - return groups; - } - - private ArrayList _getGroupMembers(ArrayList memberNames) { - ArrayList memberNodes = new ArrayList<>(); - _validateGroupMembers(memberNames); - for(String member: memberNames) { - for(NodeTemplate node: nodeTemplates) { - if(member.equals(node.getName())) { - memberNodes.add(node); - } - } - } - return memberNodes; - } - - private ArrayList _getPolicyGroups(ArrayList memberNames) { - ArrayList memberGroups = new ArrayList<>(); - for(String member: memberNames) { - for(Group group: groups) { - if(member.equals(group.getName())) { - memberGroups.add(group); - } - } - } - return memberGroups; - } - - private void _validateGroupMembers(ArrayList members) { - ArrayList nodeNames = new ArrayList<>(); - for(NodeTemplate node: nodeTemplates) { - nodeNames.add(node.getName()); - } - for(String member: members) { - if(!nodeNames.contains(member)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE239", String.format( - "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"",member))); - } - } - } - - // topology template can act like node template - // it is exposed by substitution_mappings. - - public String nodetype() { - return substitutionMappings.getNodeType(); - } - - public LinkedHashMap capabilities() { - return substitutionMappings.getCapabilities(); - } - - public LinkedHashMap requirements() { - return substitutionMappings.getRequirements(); - } - - private String _tplDescription() { - return (String)tpl.get(DESCRIPTION); - //if description: - // return description.rstrip() - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplInputs() { - if(tpl.get(INPUTS) != null) { - return (LinkedHashMap)tpl.get(INPUTS); - } - else { - return new LinkedHashMap(); - } - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplNodeTemplates() { - return (LinkedHashMap)tpl.get(NODE_TEMPLATES); - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplRelationshipTemplates() { - if(tpl.get(RELATIONSHIP_TEMPLATES) != null) { - return (LinkedHashMap)tpl.get(RELATIONSHIP_TEMPLATES); - } - else { - return new LinkedHashMap(); - } - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplOutputs() { - if(tpl.get(OUTPUTS) != null) { - return (LinkedHashMap)tpl.get(OUTPUTS); - } - else { - return new LinkedHashMap(); - } - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplSubstitutionMappings() { - if(tpl.get(SUBSTITUTION_MAPPINGS) != null) { - return (LinkedHashMap)tpl.get(SUBSTITUTION_MAPPINGS); - } - else { - return new LinkedHashMap(); - } - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplGroups() { - if(tpl.get(GROUPS) != null) { - return (LinkedHashMap)tpl.get(GROUPS); - } - else { - return new LinkedHashMap(); - } - } - - @SuppressWarnings("unchecked") - private ArrayList _tplPolicies() { - if(tpl.get(POLICIES) != null) { - return (ArrayList)tpl.get(POLICIES); - } - else { - return new ArrayList(); - } - } - - private void _validateField() { - for(String name: tpl.keySet()) { - boolean bFound = false; - for(String section: SECTIONS) { - if(name.equals(section)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE240", String.format( - "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"",name))); - } - } - } - - @SuppressWarnings("unchecked") - private void _processIntrinsicFunctions() { - // Process intrinsic functions - - // Current implementation processes functions within node template - // properties, requirements, interfaces inputs and template outputs. - - if(nodeTemplates != null) { - for(NodeTemplate nt: nodeTemplates) { - for(Property prop: nt.getPropertiesObjects()) { - prop.setValue(Function.getFunction(this,nt,prop.getValue(), resolveGetInput)); - } - for(InterfacesDef ifd: nt.getInterfaces()) { - LinkedHashMap ifin = ifd.getInputs(); - if(ifin != null) { - for(Map.Entry me: ifin.entrySet()) { - String name = me.getKey(); - Object value = Function.getFunction(this,nt,me.getValue(), resolveGetInput); - ifd.setInput(name,value); - } - } - } - if(nt.getRequirements() != null) { - for(RequirementAssignment req: nt.getRequirements().getAll()) { - LinkedHashMap rel; - Object t = req.getRelationship(); - // it can be a string or a LHM... - if(t instanceof LinkedHashMap) { - rel = (LinkedHashMap)t; - } - else { - // we set it to null to fail the next test - // and avoid the get("proprties") - rel = null; - } - - if(rel != null && rel.get("properties") != null) { - LinkedHashMap relprops = - (LinkedHashMap)rel.get("properties"); - for(String key: relprops.keySet()) { - Object value = relprops.get(key); - Object func = Function.getFunction(this,req,value, resolveGetInput); - relprops.put(key,func); - } - } - } - } - if(nt.getCapabilitiesObjects() != null) { - for(CapabilityAssignment cap: nt.getCapabilitiesObjects()) { - if(cap.getPropertiesObjects() != null) { - for(Property prop: cap.getPropertiesObjects()) { - Object propvalue = Function.getFunction(this,nt,prop.getValue(), resolveGetInput); - if(propvalue instanceof GetInput) { - propvalue = ((GetInput)propvalue).result(); - for(String p: cap.getProperties().keySet()) { - //Object v = cap.getProperties().get(p); - if(p.equals(prop.getName())) { - cap.setProperty(p,propvalue); - } - } - } - } - } - } - } - for(RelationshipType rel: nt.getRelationships().keySet()) { - NodeTemplate node = nt.getRelationships().get(rel); - ArrayList relTpls = node.getRelationshipTemplate(); - if(relTpls != null) { - for(RelationshipTemplate relTpl: relTpls) { - // TT 5 - for(InterfacesDef iface: relTpl.getInterfaces()) { - if(iface.getInputs() != null) { - for(String name: iface.getInputs().keySet()) { - Object value = iface.getInputs().get(name); - Object func = Function.getFunction( - this, - relTpl, - value, - resolveGetInput); - iface.setInput(name,func); - } - } - } - } - } - } - } - } - for(Output output: outputs) { - Object func = Function.getFunction(this,outputs,output.getValue(), resolveGetInput); - if(func instanceof GetAttribute) { - output.setAttr(Output.VALUE,func); - } - } - } - - public static String getSubMappingNodeType(LinkedHashMap topologyTpl) { - if(topologyTpl != null && topologyTpl instanceof LinkedHashMap) { - Object submapTpl = topologyTpl.get(SUBSTITUTION_MAPPINGS); - return SubstitutionMappings.stGetNodeType((LinkedHashMap)submapTpl); - } - return null; - } - - // getters - - public LinkedHashMap getTpl() { - return tpl; - } - - public LinkedHashMap getMetadata() { - return metaData; - } - - public ArrayList getInputs() { - return inputs; - } - - public ArrayList getOutputs() { - return outputs; - } - - public ArrayList getPolicies() { - return policies; - } - - public ArrayList getRelationshipTemplates() { - return relationshipTemplates; - } - - public ArrayList getNodeTemplates() { - return nodeTemplates; - } - - public ArrayList getGroups() { - return groups; - } - - public SubstitutionMappings getSubstitutionMappings() { - return substitutionMappings; - } - - public LinkedHashMap getParsedParams() { - return parsedParams; - } - - public boolean getResolveGetInput() { - return resolveGetInput; - } -} - -/*python - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import logging - -from toscaparser.common import exception -from toscaparser.dataentity import DataEntity -from toscaparser import functions -from toscaparser.groups import Group -from toscaparser.nodetemplate import NodeTemplate -from toscaparser.parameters import Input -from toscaparser.parameters import Output -from toscaparser.policy import Policy -from toscaparser.relationship_template import RelationshipTemplate -from toscaparser.substitution_mappings import SubstitutionMappings -from toscaparser.tpl_relationship_graph import ToscaGraph -from toscaparser.utils.gettextutils import _ - - -# Topology template key names -SECTIONS = (DESCRIPTION, INPUTS, NODE_TEMPLATES, - RELATIONSHIP_TEMPLATES, OUTPUTS, GROUPS, - SUBSTITUION_MAPPINGS, POLICIES) = \ - ('description', 'inputs', 'node_templates', - 'relationship_templates', 'outputs', 'groups', - 'substitution_mappings', 'policies') - -log = logging.getLogger("tosca.model") - - -class TopologyTemplate(object): - - '''Load the template data.''' - def __init__(self, template, custom_defs, - rel_types=None, parsed_params=None, - sub_mapped_node_template=None): - self.tpl = template - self.sub_mapped_node_template = sub_mapped_node_template - if self.tpl: - self.custom_defs = custom_defs - self.rel_types = rel_types - self.parsed_params = parsed_params - self._validate_field() - self.description = self._tpl_description() - self.inputs = self._inputs() - self.relationship_templates = self._relationship_templates() - self.nodetemplates = self._nodetemplates() - self.outputs = self._outputs() - if hasattr(self, 'nodetemplates'): - self.graph = ToscaGraph(self.nodetemplates) - self.groups = self._groups() - self.policies = self._policies() - self._process_intrinsic_functions() - self.substitution_mappings = self._substitution_mappings() - - def _inputs(self): - inputs = [] - for name, attrs in self._tpl_inputs().items(): - input = Input(name, attrs) - if self.parsed_params and name in self.parsed_params: - input.validate(self.parsed_params[name]) - else: - default = input.default - if default: - input.validate(default) - if (self.parsed_params and input.name not in self.parsed_params - or self.parsed_params is None) and input.required \ - and input.default is None: - log.warning(_('The required parameter %s ' - 'is not provided') % input.name) - - inputs.append(input) - return inputs - - def _nodetemplates(self): - nodetemplates = [] - tpls = self._tpl_nodetemplates() - if tpls: - for name in tpls: - tpl = NodeTemplate(name, tpls, self.custom_defs, - self.relationship_templates, - self.rel_types) - if (tpl.type_definition and - (tpl.type in tpl.type_definition.TOSCA_DEF or - (tpl.type not in tpl.type_definition.TOSCA_DEF and - bool(tpl.custom_def)))): - tpl.validate(self) - nodetemplates.append(tpl) - return nodetemplates - - def _relationship_templates(self): - rel_templates = [] - tpls = self._tpl_relationship_templates() - for name in tpls: - tpl = RelationshipTemplate(tpls[name], name, self.custom_defs) - rel_templates.append(tpl) - return rel_templates - - def _outputs(self): - outputs = [] - for name, attrs in self._tpl_outputs().items(): - output = Output(name, attrs) - output.validate() - outputs.append(output) - return outputs - - def _substitution_mappings(self): - tpl_substitution_mapping = self._tpl_substitution_mappings() - # if tpl_substitution_mapping and self.sub_mapped_node_template: - if tpl_substitution_mapping: - return SubstitutionMappings(tpl_substitution_mapping, - self.nodetemplates, - self.inputs, - self.outputs, - self.sub_mapped_node_template, - self.custom_defs) - - def _policies(self): - policies = [] - for policy in self._tpl_policies(): - for policy_name, policy_tpl in policy.items(): - target_list = policy_tpl.get('targets') - if target_list and len(target_list) >= 1: - target_objects = [] - targets_type = "groups" - target_objects = self._get_policy_groups(target_list) - if not target_objects: - targets_type = "node_templates" - target_objects = self._get_group_members(target_list) - policyObj = Policy(policy_name, policy_tpl, - target_objects, targets_type, - self.custom_defs) - policies.append(policyObj) - return policies - - def _groups(self): - groups = [] - member_nodes = None - for group_name, group_tpl in self._tpl_groups().items(): - member_names = group_tpl.get('members') - if member_names is not None: - DataEntity.validate_datatype('list', member_names) - if len(member_names) < 1 or \ - len(member_names) != len(set(member_names)): - exception.ValidationIssueCollector.appendException( - exception.InvalidGroupTargetException( - message=_('Member nodes "%s" should be >= 1 ' - 'and not repeated') % member_names)) - else: - member_nodes = self._get_group_members(member_names) - group = Group(group_name, group_tpl, - member_nodes, - self.custom_defs) - groups.append(group) - return groups - - def _get_group_members(self, member_names): - member_nodes = [] - self._validate_group_members(member_names) - for member in member_names: - for node in self.nodetemplates: - if node.name == member: - member_nodes.append(node) - return member_nodes - - def _get_policy_groups(self, member_names): - member_groups = [] - for member in member_names: - for group in self.groups: - if group.name == member: - member_groups.append(group) - return member_groups - - def _validate_group_members(self, members): - node_names = [] - for node in self.nodetemplates: - node_names.append(node.name) - for member in members: - if member not in node_names: - exception.ValidationIssueCollector.appendException( - exception.InvalidGroupTargetException( - message=_('Target member "%s" is not found in ' - 'node_templates') % member)) - - # topology template can act like node template - # it is exposed by substitution_mappings. - def nodetype(self): - return self.substitution_mappings.node_type \ - if self.substitution_mappings else None - - def capabilities(self): - return self.substitution_mappings.capabilities \ - if self.substitution_mappings else None - - def requirements(self): - return self.substitution_mappings.requirements \ - if self.substitution_mappings else None - - def _tpl_description(self): - description = self.tpl.get(DESCRIPTION) - if description: - return description.rstrip() - - def _tpl_inputs(self): - return self.tpl.get(INPUTS) or {} - - def _tpl_nodetemplates(self): - return self.tpl.get(NODE_TEMPLATES) - - def _tpl_relationship_templates(self): - return self.tpl.get(RELATIONSHIP_TEMPLATES) or {} - - def _tpl_outputs(self): - return self.tpl.get(OUTPUTS) or {} - - def _tpl_substitution_mappings(self): - return self.tpl.get(SUBSTITUION_MAPPINGS) or {} - - def _tpl_groups(self): - return self.tpl.get(GROUPS) or {} - - def _tpl_policies(self): - return self.tpl.get(POLICIES) or {} - - def _validate_field(self): - for name in self.tpl: - if name not in SECTIONS: - exception.ValidationIssueCollector.appendException( - exception.UnknownFieldError(what='Template', field=name)) - - def _process_intrinsic_functions(self): - """Process intrinsic functions - - Current implementation processes functions within node template - properties, requirements, interfaces inputs and template outputs. - """ - if hasattr(self, 'nodetemplates'): - for node_template in self.nodetemplates: - for prop in node_template.get_properties_objects(): - prop.value = functions.get_function(self, - node_template, - prop.value) - for interface in node_template.interfaces: - if interface.inputs: - for name, value in interface.inputs.items(): - interface.inputs[name] = functions.get_function( - self, - node_template, - value) - if node_template.requirements and \ - isinstance(node_template.requirements, list): - for req in node_template.requirements: - rel = req - for req_name, req_item in req.items(): - if isinstance(req_item, dict): - rel = req_item.get('relationship') - break - if rel and 'properties' in rel: - for key, value in rel['properties'].items(): - rel['properties'][key] = \ - functions.get_function(self, - req, - value) - if node_template.get_capabilities_objects(): - for cap in node_template.get_capabilities_objects(): - if cap.get_properties_objects(): - for prop in cap.get_properties_objects(): - propvalue = functions.get_function( - self, - node_template, - prop.value) - if isinstance(propvalue, functions.GetInput): - propvalue = propvalue.result() - for p, v in cap._properties.items(): - if p == prop.name: - cap._properties[p] = propvalue - for rel, node in node_template.relationships.items(): - rel_tpls = node.relationship_tpl - if rel_tpls: - for rel_tpl in rel_tpls: - for interface in rel_tpl.interfaces: - if interface.inputs: - for name, value in \ - interface.inputs.items(): - interface.inputs[name] = \ - functions.get_function(self, - rel_tpl, - value) - for output in self.outputs: - func = functions.get_function(self, self.outputs, output.value) - if isinstance(func, functions.GetAttribute): - output.attrs[output.VALUE] = func - - @classmethod - def get_sub_mapping_node_type(cls, topology_tpl): - if topology_tpl and isinstance(topology_tpl, dict): - submap_tpl = topology_tpl.get(SUBSTITUION_MAPPINGS) - return SubstitutionMappings.get_node_type(submap_tpl) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaGraph.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaGraph.java deleted file mode 100644 index 2de3bb9..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaGraph.java +++ /dev/null @@ -1,109 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; - -//import java.util.Iterator; - -public class ToscaGraph { - // Graph of Tosca Node Templates - - private ArrayList nodeTemplates; - private LinkedHashMap vertices; - - public ToscaGraph(ArrayList inodeTemplates) { - nodeTemplates = inodeTemplates; - vertices = new LinkedHashMap(); - _create(); - } - - private void _createVertex(NodeTemplate node) { - if(vertices.get(node.getName()) == null) { - vertices.put(node.getName(),node); - } - } - - private void _createEdge(NodeTemplate node1, - NodeTemplate node2, - RelationshipType relation) { - if(vertices.get(node1.getName()) == null) { - _createVertex(node1); - vertices.get(node1.name)._addNext(node2,relation); - } - } - - public NodeTemplate vertex(String name) { - if(vertices.get(name) != null) { - return vertices.get(name); - } - return null; - } - -// public Iterator getIter() { -// return vertices.values().iterator(); -// } - - private void _create() { - for(NodeTemplate node: nodeTemplates) { - LinkedHashMap relation = node.getRelationships(); - if(relation != null) { - for(RelationshipType rel: relation.keySet()) { - NodeTemplate nodeTpls = relation.get(rel); - for(NodeTemplate tpl: nodeTemplates) { - if(tpl.getName().equals(nodeTpls.getName())) { - _createEdge(node,tpl,rel); - } - } - } - } - _createVertex(node); - } - } - - @Override - public String toString() { - return "ToscaGraph{" + - "nodeTemplates=" + nodeTemplates + - ", vertices=" + vertices + - '}'; - } -} - -/*python - -class ToscaGraph(object): - '''Graph of Tosca Node Templates.''' - def __init__(self, nodetemplates): - self.nodetemplates = nodetemplates - self.vertices = {} - self._create() - - def _create_vertex(self, node): - if node not in self.vertices: - self.vertices[node.name] = node - - def _create_edge(self, node1, node2, relationship): - if node1 not in self.vertices: - self._create_vertex(node1) - self.vertices[node1.name]._add_next(node2, - relationship) - - def vertex(self, node): - if node in self.vertices: - return self.vertices[node] - - def __iter__(self): - return iter(self.vertices.values()) - - def _create(self): - for node in self.nodetemplates: - relation = node.relationships - if relation: - for rel, nodetpls in relation.items(): - for tpl in self.nodetemplates: - if tpl.name == nodetpls.name: - self._create_edge(node, tpl, rel) - self._create_vertex(node) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java deleted file mode 100644 index e96ca56..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java +++ /dev/null @@ -1,1200 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.util.*; -import java.util.concurrent.ConcurrentHashMap; -import java.nio.file.Files; -import java.util.function.Predicate; -import java.nio.file.Paths; - -import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector; -import org.openecomp.sdc.toscaparser.api.common.JToscaException; -import org.openecomp.sdc.toscaparser.api.elements.EntityType; -import org.openecomp.sdc.toscaparser.api.elements.Metadata; -import org.openecomp.sdc.toscaparser.api.extensions.ExtTools; -import org.openecomp.sdc.toscaparser.api.parameters.Input; -import org.openecomp.sdc.toscaparser.api.parameters.Output; -import org.openecomp.sdc.toscaparser.api.prereq.CSAR; -import org.openecomp.sdc.toscaparser.api.utils.JToscaErrorCodes; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.yaml.snakeyaml.Yaml; - -public class ToscaTemplate extends Object { - - private static Logger log = LoggerFactory.getLogger(ToscaTemplate.class.getName()); - - // TOSCA template key names - private static final String DEFINITION_VERSION = "tosca_definitions_version"; - private static final String DEFAULT_NAMESPACE = "tosca_default_namespace"; - private static final String TEMPLATE_NAME = "template_name"; - private static final String TOPOLOGY_TEMPLATE = "topology_template"; - private static final String TEMPLATE_AUTHOR = "template_author"; - private static final String TEMPLATE_VERSION = "template_version"; - private static final String DESCRIPTION = "description"; - private static final String IMPORTS = "imports"; - private static final String DSL_DEFINITIONS = "dsl_definitions"; - private static final String NODE_TYPES = "node_types"; - private static final String RELATIONSHIP_TYPES = "relationship_types"; - private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; - private static final String CAPABILITY_TYPES = "capability_types"; - private static final String ARTIFACT_TYPES = "artifact_types"; - private static final String DATA_TYPES = "data_types"; - private static final String INTERFACE_TYPES = "interface_types"; - private static final String POLICY_TYPES = "policy_types"; - private static final String GROUP_TYPES = "group_types"; - private static final String REPOSITORIES = "repositories"; - - private static String SECTIONS[] = { - DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME, - TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION, - DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES, - RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES, - CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES, - INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES, REPOSITORIES - }; - - // Sections that are specific to individual template definitions - private static final String METADATA = "metadata"; - private static ArrayList SPECIAL_SECTIONS; - - private ExtTools exttools = new ExtTools(); - - private ArrayList VALID_TEMPLATE_VERSIONS; - private LinkedHashMap> ADDITIONAL_SECTIONS; - - private boolean isFile; - private String path; - private String inputPath; - private String rootPath; - private LinkedHashMap parsedParams; - private boolean resolveGetInput; - private LinkedHashMap tpl; - private String version; - private ArrayList imports; - private LinkedHashMap relationshipTypes; - private Metadata metaData; - private String description; - private TopologyTemplate topologyTemplate; - private ArrayList repositories; - private ArrayList inputs; - private ArrayList relationshipTemplates; - private ArrayList nodeTemplates; - private ArrayList outputs; - private ArrayList policies; - private ConcurrentHashMap nestedToscaTplsWithTopology; - private ArrayList nestedToscaTemplatesWithTopology; - private ToscaGraph graph; - private String csarTempDir; - private int nestingLoopCounter; - private LinkedHashMap> metaProperties; - private Set processedImports; - - public ToscaTemplate(String _path, - LinkedHashMap _parsedParams, - boolean aFile, - LinkedHashMap yamlDictTpl) throws JToscaException { - init(_path, _parsedParams, aFile, yamlDictTpl, true); - } - - public ToscaTemplate(String _path, - LinkedHashMap _parsedParams, - boolean aFile, - LinkedHashMap yamlDictTpl, boolean resolveGetInput) throws JToscaException { - init(_path, _parsedParams, aFile, yamlDictTpl, resolveGetInput); - } - - @SuppressWarnings("unchecked") - private void init(String _path, - LinkedHashMap _parsedParams, - boolean aFile, - LinkedHashMap yamlDictTpl, boolean _resolveGetInput) throws JToscaException { - - ThreadLocalsHolder.setCollector(new ValidationIssueCollector()); - - VALID_TEMPLATE_VERSIONS = new ArrayList<>(); - VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_0"); - VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_1"); - VALID_TEMPLATE_VERSIONS.addAll(exttools.getVersions()); - ADDITIONAL_SECTIONS = new LinkedHashMap<>(); - SPECIAL_SECTIONS = new ArrayList<>(); - SPECIAL_SECTIONS.add(METADATA); - ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_0",SPECIAL_SECTIONS); - ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_1",SPECIAL_SECTIONS); - ADDITIONAL_SECTIONS.putAll(exttools.getSections()); - - //long startTime = System.nanoTime(); - - - isFile = aFile; - inputPath = null; - path = null; - tpl = null; - csarTempDir = null; - nestedToscaTplsWithTopology = new ConcurrentHashMap<>(); - nestedToscaTemplatesWithTopology = new ArrayList(); - resolveGetInput = _resolveGetInput; - metaProperties = new LinkedHashMap<>(); - - if(_path != null && !_path.isEmpty()) { - // save the original input path - inputPath = _path; - // get the actual path (will change with CSAR) - path = _getPath(_path); - // load the YAML template - if (path != null && !path.isEmpty()) { - try (InputStream input = new FileInputStream(new File(path));){ - //System.out.println("Loading YAML file " + path); - log.debug("ToscaTemplate Loading YAMEL file {}", path); - Yaml yaml = new Yaml(); - Object data = yaml.load(input); - this.tpl = (LinkedHashMap) data; - } - catch (FileNotFoundException e) { - log.error("ToscaTemplate - Exception loading yaml: {}", e.getMessage()); - log.error("Exception", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", - "ToscaTemplate - Exception loading yaml: -> " + e.getMessage())); - return; - } - catch(Exception e) { - log.error("ToscaTemplate - Error loading yaml, aborting -> ", e.getMessage()); - log.error("Exception", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", - "ToscaTemplate - Error loading yaml, aborting -> " + e.getMessage())); - return; - } - - if(yamlDictTpl != null) { - //msg = (_('Both path and yaml_dict_tpl arguments were ' - // 'provided. Using path and ignoring yaml_dict_tpl.')) - //log.info(msg) - log.debug("ToscaTemplate - Both path and yaml_dict_tpl arguments were provided. Using path and ignoring yaml_dict_tpl"); - } - } else { - // no input to process... - _abort(); - } - } - else { - if(yamlDictTpl != null) { - tpl = yamlDictTpl; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE244", - "ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse")); - log.debug("ToscaTemplate ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse"); - - } - } - - if(tpl != null) { - parsedParams = _parsedParams; - _validateField(); - this.rootPath = path; - this.processedImports = new HashSet(); - this.imports = _tplImports(); - this.version = _tplVersion(); - this.metaData = _tplMetaData(); - this.relationshipTypes = _tplRelationshipTypes(); - this.description = _tplDescription(); - this.topologyTemplate = _topologyTemplate(); - this.repositories = _tplRepositories(); - if(topologyTemplate.getTpl() != null) { - this.inputs = _inputs(); - this.relationshipTemplates = _relationshipTemplates(); - this.nodeTemplates = _nodeTemplates(); - this.outputs = _outputs(); - this.policies = _policies(); -// _handleNestedToscaTemplatesWithTopology(); - _handleNestedToscaTemplatesWithTopology(topologyTemplate); - graph = new ToscaGraph(nodeTemplates); - } - } - - if(csarTempDir != null) { - CSAR.deleteDir(new File(csarTempDir)); - csarTempDir = null; - } - - verifyTemplate(); - - } - - private void _abort() throws JToscaException { - // print out all exceptions caught - verifyTemplate(); - throw new JToscaException("jtosca aborting", JToscaErrorCodes.PATH_NOT_VALID.getValue()); - } - - private TopologyTemplate _topologyTemplate() { - return new TopologyTemplate( - _tplTopologyTemplate(), - _getAllCustomDefs(imports), - relationshipTypes, - parsedParams, - null, - resolveGetInput); - } - - private ArrayList _inputs() { - return topologyTemplate.getInputs(); - } - - private ArrayList _nodeTemplates() { - return topologyTemplate.getNodeTemplates(); - } - - private ArrayList _relationshipTemplates() { - return topologyTemplate.getRelationshipTemplates(); - } - - private ArrayList _outputs() { - return topologyTemplate.getOutputs(); - } - - private String _tplVersion() { - return (String)tpl.get(DEFINITION_VERSION); - } - - @SuppressWarnings("unchecked") - private Metadata _tplMetaData() { - Object mdo = tpl.get(METADATA); - if(mdo instanceof LinkedHashMap) { - return new Metadata((Map)mdo); - } - else { - return null; - } - } - - private String _tplDescription() { - return (String)tpl.get(DESCRIPTION); - } - - private ArrayList _tplImports() { - return (ArrayList)tpl.get(IMPORTS); - } - - private ArrayList _tplRepositories() { - LinkedHashMap repositories = - (LinkedHashMap)tpl.get(REPOSITORIES); - ArrayList reposit = new ArrayList<>(); - if(repositories != null) { - for(Map.Entry me: repositories.entrySet()) { - Repository reposits = new Repository(me.getKey(),me.getValue()); - reposit.add(reposits); - } - } - return reposit; - } - - private LinkedHashMap _tplRelationshipTypes() { - return (LinkedHashMap)_getCustomTypes(RELATIONSHIP_TYPES,null); - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplRelationshipTemplates() { - return (LinkedHashMap)_tplTopologyTemplate().get(RELATIONSHIP_TEMPLATES); - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplTopologyTemplate() { - return (LinkedHashMap)tpl.get(TOPOLOGY_TEMPLATE); - } - - private ArrayList _policies() { - return topologyTemplate.getPolicies(); - } - - /** - * This method is used to get consolidated custom definitions from all imports - * It is logically divided in two parts to handle imports; map and list formats. - * Before processing the imports; it sorts them to make sure the current directory imports are - * being processed first and then others. Once sorted; it processes each import one by one in - * recursive manner. - * To avoid cyclic dependency among imports; this method uses a set to keep track of all - * imports which are already processed and filters the imports which occurs more than once. - * - * @param alImports all imports which needs to be processed - * @return the linked hash map containing all import definitions - */ - private LinkedHashMap _getAllCustomDefs(Object alImports) { - - String types[] = { - IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, - DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES - }; - LinkedHashMap customDefsFinal = new LinkedHashMap<>(); - - List> imports = (List>) alImports; - if (imports != null && !imports.isEmpty()) { - if (imports.get(0) instanceof LinkedHashMap) { - imports = sortImports(imports); - - for (Map map : imports) { - List> singleImportList = new ArrayList(); - singleImportList.add(map); - - Map importNameDetails = getValidFileNameForImportReference(singleImportList); - singleImportList = filterImportsForRecursion(singleImportList, importNameDetails); - - if(!singleImportList.get(0).isEmpty()){ - LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(singleImportList)); - processedImports.add(importNameDetails.get("importFileName")); - - if (customDefs != null) { - customDefsFinal.putAll(customDefs); - - if (customDefs.get(IMPORTS) != null) { - resetPathForRecursiveImports(importNameDetails.get("importRelativeName")); - LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); - customDefsFinal.putAll(importDefs); - } - } - } - } - } else { - LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(imports)); - if (customDefs != null) { - customDefsFinal.putAll(customDefs); - - if (customDefs.get(IMPORTS) != null) { - LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); - customDefsFinal.putAll(importDefs); - } - } - } - } - - // As imports are not custom_types, remove from the dict - customDefsFinal.remove(IMPORTS); - - return customDefsFinal; - } - - /** - * This method is used to sort the imports in order so that same directory - * imports will be processed first - * - * @param customImports the custom imports - * @return the sorted list of imports - */ - private List> sortImports(List> customImports){ - List> finalList1 = new ArrayList<>(); - List> finalList2 = new ArrayList<>(); - Iterator> itr = customImports.iterator(); - while(itr.hasNext()) { - Map innerMap = itr.next(); - if (innerMap.toString().contains("../")) { - finalList2.add(innerMap); - itr.remove(); - } - else if (innerMap.toString().contains("/")) { - finalList1.add(innerMap); - itr.remove(); - } - } - - customImports.addAll(finalList1); - customImports.addAll(finalList2); - return customImports; - } - - /** - * This method is used to reset PATH variable after processing of current import file is done - * This is required because of relative path nature of imports present in files. - * - * @param currImportRelativeName the current import relative name - */ - private void resetPathForRecursiveImports(String currImportRelativeName){ - path = getPath(path, currImportRelativeName); - } - - /** - * This is a recursive method which starts from current import and then recursively finds a - * valid path relative to current import file name. - * By doing this it handles all nested hierarchy of imports defined in CSARs - * - * @param path the path - * @param importFileName the import file name - * @return the string containing updated path value - */ - private String getPath(String path, String importFileName){ - String tempFullPath = (Paths.get(path).toAbsolutePath().getParent() - .toString() + File.separator + importFileName.replace("../", "")).replace('\\', '/'); - String tempPartialPath = (Paths.get(path).toAbsolutePath().getParent().toString()).replace('\\', '/'); - if(Files.exists(Paths.get(tempFullPath))) - return tempFullPath; - else - return getPath(tempPartialPath, importFileName); - } - - /** - * This method is used to get full path name for the file which needs to be processed. It helps - * in situation where files are present in different directory and are references as relative - * paths. - * - * @param customImports the custom imports - * @return the map containing import file full and relative paths - */ - private Map getValidFileNameForImportReference(List> - customImports){ - String importFileName; - Map retMap = new HashMap<>(); - for (Map map1 : customImports) { - for (Map.Entry entry : map1.entrySet()) { - Map innerMostMap = (Map) entry.getValue(); - Iterator> it = innerMostMap.entrySet().iterator(); - while (it.hasNext()) { - Map.Entry val = it.next(); - if(val.getValue().contains("/")){ - importFileName = (Paths.get(rootPath).toAbsolutePath().getParent().toString() + File - .separator + val.getValue().replace("../", "")).replace('\\', '/'); - } - else { - importFileName = (Paths.get(path).toAbsolutePath().getParent().toString() + File - .separator + val.getValue().replace("../", "")).replace('\\', '/'); - } - retMap.put("importFileName", importFileName); - retMap.put("importRelativeName", val.getValue()); - } - } - } - return retMap; - } - - /** - * This method is used to filter the imports which already gets processed in previous step. - * It handles the use case of cyclic dependency in imports which may cause Stack Overflow - * exception - * - * @param customImports the custom imports - * @param importNameDetails the import name details - * @return the list containing filtered imports - */ - private List> filterImportsForRecursion(List> - customImports, Map importNameDetails){ - for (Map map1 : customImports) { - for (Map.Entry entry : map1.entrySet()) { - Map innerMostMap = (Map) entry.getValue(); - Iterator> it = innerMostMap.entrySet().iterator(); - while (it.hasNext()) { - it.next(); - if (processedImports.contains(importNameDetails.get("importFileName"))) { - it.remove(); - } - } - } - } - - // Remove Empty elements - Iterator> itr = customImports.iterator(); - while(itr.hasNext()) { - Map innerMap = itr.next(); - Predicate predicate = p-> p.values().isEmpty(); - innerMap.values().removeIf(predicate); - } - - return customImports; - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _getCustomTypes(Object typeDefinitions,ArrayList alImports) { - - // Handle custom types defined in imported template files - // This method loads the custom type definitions referenced in "imports" - // section of the TOSCA YAML template. - - LinkedHashMap customDefs = new LinkedHashMap(); - ArrayList typeDefs = new ArrayList(); - if(typeDefinitions instanceof String[]) { - for(String s: (String[])typeDefinitions) { - typeDefs.add(s); - } - } - else { - typeDefs.add((String)typeDefinitions); - } - - if(alImports == null) { - alImports = _tplImports(); - } - - if(alImports != null) { - ImportsLoader customService = new ImportsLoader(alImports,path,typeDefs,tpl); - ArrayList> nestedToscaTpls = customService.getNestedToscaTpls(); - _updateNestedToscaTplsWithTopology(nestedToscaTpls); - - customDefs = customService.getCustomDefs(); - if(customDefs == null) { - return null; - } - } - - //Handle custom types defined in current template file - for(String td: typeDefs) { - if(!td.equals(IMPORTS)) { - LinkedHashMap innerCustomTypes = (LinkedHashMap )tpl.get(td); - if(innerCustomTypes != null) { - customDefs.putAll(innerCustomTypes); - } - } - } - return customDefs; - } - - private void _updateNestedToscaTplsWithTopology(ArrayList> nestedToscaTpls) { - for(LinkedHashMap ntpl: nestedToscaTpls) { - // there is just one key:value pair in ntpl - for(Map.Entry me: ntpl.entrySet()) { - String fileName = me.getKey(); - @SuppressWarnings("unchecked") - LinkedHashMap toscaTpl = (LinkedHashMap)me.getValue(); - if(toscaTpl.get(TOPOLOGY_TEMPLATE) != null) { - if(nestedToscaTplsWithTopology.get(fileName) == null) { - nestedToscaTplsWithTopology.putAll(ntpl); - } - } - } - } - } - - // multi level nesting - RECURSIVE - private void _handleNestedToscaTemplatesWithTopology(TopologyTemplate tt) { - if(++nestingLoopCounter > 10) { - log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting"); - return; - } - // Reset Processed Imports for nested templates - this.processedImports = new HashSet<>(); - for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { - String fname = me.getKey(); - LinkedHashMap toscaTpl = - (LinkedHashMap)me.getValue(); - for(NodeTemplate nt: tt.getNodeTemplates()) { - if(_isSubMappedNode(nt,toscaTpl)) { - parsedParams = _getParamsForNestedTemplate(nt); - ArrayList alim = (ArrayList)toscaTpl.get(IMPORTS); - LinkedHashMap topologyTpl = - (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE); - TopologyTemplate topologyWithSubMapping = - new TopologyTemplate(topologyTpl, - _getAllCustomDefs(alim), - relationshipTypes, - parsedParams, - nt, - resolveGetInput); - if(topologyWithSubMapping.getSubstitutionMappings() != null) { - // Record nested topology templates in top level template - //nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); - // Set substitution mapping object for mapped node - nt.setSubMappingToscaTemplate( - topologyWithSubMapping.getSubstitutionMappings()); - _handleNestedToscaTemplatesWithTopology(topologyWithSubMapping); - } - } - } - } - } - -// private void _handleNestedToscaTemplatesWithTopology() { -// for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { -// String fname = me.getKey(); -// LinkedHashMap toscaTpl = -// (LinkedHashMap)me.getValue(); -// for(NodeTemplate nt: nodeTemplates) { -// if(_isSubMappedNode(nt,toscaTpl)) { -// parsedParams = _getParamsForNestedTemplate(nt); -// ArrayList alim = (ArrayList)toscaTpl.get(IMPORTS); -// LinkedHashMap topologyTpl = -// (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE); -// TopologyTemplate topologyWithSubMapping = -// new TopologyTemplate(topologyTpl, -// //_getAllCustomDefs(null), -// _getAllCustomDefs(alim), -// relationshipTypes, -// parsedParams, -// nt); -// if(topologyWithSubMapping.getSubstitutionMappings() != null) { -// // Record nested topology templates in top level template -// nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); -// // Set substitution mapping object for mapped node -// nt.setSubMappingToscaTemplate( -// topologyWithSubMapping.getSubstitutionMappings()); -// } -// } -// } -// } -// } - - private void _validateField() { - String sVersion = _tplVersion(); - if(sVersion == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE245", String.format( - "MissingRequiredField: Template is missing required field \"%s\"",DEFINITION_VERSION))); - } - else { - _validateVersion(sVersion); - this.version = sVersion; - } - - for (String sKey : tpl.keySet()) { - boolean bFound = false; - for (String sSection: SECTIONS) { - if(sKey.equals(sSection)) { - bFound = true; - break; - } - } - // check ADDITIONAL_SECTIONS - if(!bFound) { - if(ADDITIONAL_SECTIONS.get(version) != null && - ADDITIONAL_SECTIONS.get(version).contains(sKey)) { - bFound = true; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE246", String.format( - "UnknownFieldError: Template contains unknown field \"%s\"", - sKey))); - } - } - } - - private void _validateVersion(String sVersion) { - boolean bFound = false; - for(String vtv: VALID_TEMPLATE_VERSIONS) { - if(sVersion.equals(vtv)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE247", String.format( - "InvalidTemplateVersion: \"%s\" is invalid. Valid versions are %s", - sVersion,VALID_TEMPLATE_VERSIONS.toString()))); - } - else if ((!sVersion.equals("tosca_simple_yaml_1_0") && !sVersion.equals("tosca_simple_yaml_1_1"))) { - EntityType.updateDefinitions(sVersion); - - } - } - - private String _getPath(String _path) throws JToscaException { - if (_path.toLowerCase().endsWith(".yaml") || _path.toLowerCase().endsWith(".yml")) { - return _path; - } - else if (_path.toLowerCase().endsWith(".zip") || _path.toLowerCase().endsWith(".csar")) { - // a CSAR archive - CSAR csar = new CSAR(_path, isFile); - if (csar.validate()) { - try { - csar.decompress(); - metaProperties = csar.getMetaProperties(); - } - catch (IOException e) { - log.error("ToscaTemplate - _getPath - IOException trying to decompress {}", _path); - return null; - } - isFile = true; // the file has been decompressed locally - csar.cleanup(); - csarTempDir = csar.getTempDir(); - return csar.getTempDir() + File.separator + csar.getMainTemplate(); - } - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE248", "ValueError: " + _path + " is not a valid file")); - return null; - } - return null; - } - - private void verifyTemplate() throws JToscaException { - //Criticals - int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); - if (validationIssuesCaught > 0) { - List validationIssueStrings = ThreadLocalsHolder.getCollector().getValidationIssueReport(); - log.trace("####################################################################################################"); - log.trace("ToscaTemplate - verifyTemplate - {} Parsing Critical{} occurred...", validationIssuesCaught, (validationIssuesCaught > 1 ? "s" : "")); - for (String s : validationIssueStrings) { - log.trace("{}. CSAR name - {}", s, inputPath); - } - log.trace("####################################################################################################"); - } - - } - - public String getPath() { - return path; - } - - public String getVersion() { - return version; - } - - public String getDescription() { - return description; - } - - public TopologyTemplate getTopologyTemplate() { - return topologyTemplate; - } - - public Metadata getMetaData() { - return metaData; - } - - public ArrayList getInputs() { - return inputs; - } - - public ArrayList getOutputs() { - return outputs; - } - - public ArrayList getPolicies() { - return policies; - } - - public ArrayList getNodeTemplates() { - return nodeTemplates; - } - - public LinkedHashMap getMetaProperties(String propertiesFile) { - return metaProperties.get(propertiesFile); - } - -// private boolean _isSubMappedNode(NodeTemplate nt,LinkedHashMap toscaTpl) { -// // Return True if the nodetemple is substituted -// if(nt != null && nt.getSubMappingToscaTemplate() == null && -// getSubMappingNodeType(toscaTpl).equals(nt.getType()) && -// nt.getInterfaces().size() < 1) { -// return true; -// } -// return false; -// } - - private boolean _isSubMappedNode(NodeTemplate nt, LinkedHashMap toscaTpl) { - // Return True if the nodetemple is substituted - if(nt != null && nt.getSubMappingToscaTemplate() == null && - getSubMappingNodeType(toscaTpl).equals(nt.getType()) && - nt.getInterfaces().size() < 1) { - return true; - } - return false; - } - - private LinkedHashMap _getParamsForNestedTemplate(NodeTemplate nt) { - // Return total params for nested_template - LinkedHashMap pparams; - if(parsedParams != null) { - pparams = parsedParams; - } - else { - pparams = new LinkedHashMap(); - } - if(nt != null) { - for(String pname: nt.getProperties().keySet()) { - pparams.put(pname,nt.getPropertyValue(pname)); - } - } - return pparams; - } - - private String getSubMappingNodeType(LinkedHashMap toscaTpl) { - // Return substitution mappings node type - if(toscaTpl != null) { - return TopologyTemplate.getSubMappingNodeType( - (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE)); - } - return null; - } - - private boolean _hasSubstitutionMapping() { - // Return True if the template has valid substitution mappings - return topologyTemplate != null && - topologyTemplate.getSubstitutionMappings() != null; - } - - public boolean hasNestedTemplates() { - // Return True if the tosca template has nested templates - return nestedToscaTemplatesWithTopology != null && - nestedToscaTemplatesWithTopology.size() >= 1; - - } - - public ArrayList getNestedTemplates() { - return nestedToscaTemplatesWithTopology; - } - - @Override - public String toString() { - return "ToscaTemplate{" + - "exttools=" + exttools + - ", VALID_TEMPLATE_VERSIONS=" + VALID_TEMPLATE_VERSIONS + - ", ADDITIONAL_SECTIONS=" + ADDITIONAL_SECTIONS + - ", isFile=" + isFile + - ", path='" + path + '\'' + - ", inputPath='" + inputPath + '\'' + - ", parsedParams=" + parsedParams + - ", tpl=" + tpl + - ", version='" + version + '\'' + - ", imports=" + imports + - ", relationshipTypes=" + relationshipTypes + - ", metaData=" + metaData + - ", description='" + description + '\'' + - ", topologyTemplate=" + topologyTemplate + - ", repositories=" + repositories + - ", inputs=" + inputs + - ", relationshipTemplates=" + relationshipTemplates + - ", nodeTemplates=" + nodeTemplates + - ", outputs=" + outputs + - ", policies=" + policies + - ", nestedToscaTplsWithTopology=" + nestedToscaTplsWithTopology + - ", nestedToscaTemplatesWithTopology=" + nestedToscaTemplatesWithTopology + - ", graph=" + graph + - ", csarTempDir='" + csarTempDir + '\'' + - ", nestingLoopCounter=" + nestingLoopCounter + - '}'; - } -} - -/*python - -import logging -import os - -from copy import deepcopy -from toscaparser.common.exception import ValidationIssueCollector.collector -from toscaparser.common.exception import InvalidTemplateVersion -from toscaparser.common.exception import MissingRequiredFieldError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.common.exception import ValidationError -from toscaparser.elements.entity_type import update_definitions -from toscaparser.extensions.exttools import ExtTools -import org.openecomp.sdc.toscaparser.api.imports -from toscaparser.prereq.csar import CSAR -from toscaparser.repositories import Repository -from toscaparser.topology_template import TopologyTemplate -from toscaparser.tpl_relationship_graph import ToscaGraph -from toscaparser.utils.gettextutils import _ -import org.openecomp.sdc.toscaparser.api.utils.yamlparser - - -# TOSCA template key names -SECTIONS = (DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME, - TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION, - DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES, - RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES, - CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES, INTERFACE_TYPES, - POLICY_TYPES, GROUP_TYPES, REPOSITORIES) = \ - ('tosca_definitions_version', 'tosca_default_namespace', - 'template_name', 'topology_template', 'template_author', - 'template_version', 'description', 'imports', 'dsl_definitions', - 'node_types', 'relationship_types', 'relationship_templates', - 'capability_types', 'artifact_types', 'data_types', - 'interface_types', 'policy_types', 'group_types', 'repositories') -# Sections that are specific to individual template definitions -SPECIAL_SECTIONS = (METADATA) = ('metadata') - -log = logging.getLogger("tosca.model") - -YAML_LOADER = toscaparser.utils.yamlparser.load_yaml - - -class ToscaTemplate(object): - exttools = ExtTools() - - VALID_TEMPLATE_VERSIONS = ['tosca_simple_yaml_1_0'] - - VALID_TEMPLATE_VERSIONS.extend(exttools.get_versions()) - - ADDITIONAL_SECTIONS = {'tosca_simple_yaml_1_0': SPECIAL_SECTIONS} - - ADDITIONAL_SECTIONS.update(exttools.get_sections()) - - '''Load the template data.''' - def __init__(self, path=None, parsed_params=None, a_file=True, - yaml_dict_tpl=None): - - ValidationIssueCollector.collector.start() - self.a_file = a_file - self.input_path = None - self.path = None - self.tpl = None - self.nested_tosca_tpls_with_topology = {} - self.nested_tosca_templates_with_topology = [] - if path: - self.input_path = path - self.path = self._get_path(path) - if self.path: - self.tpl = YAML_LOADER(self.path, self.a_file) - if yaml_dict_tpl: - msg = (_('Both path and yaml_dict_tpl arguments were ' - 'provided. Using path and ignoring yaml_dict_tpl.')) - log.info(msg) - print(msg) - else: - if yaml_dict_tpl: - self.tpl = yaml_dict_tpl - else: - ValidationIssueCollector.collector.appendException( - ValueError(_('No path or yaml_dict_tpl was provided. ' - 'There is nothing to parse.'))) - - if self.tpl: - self.parsed_params = parsed_params - self._validate_field() - self.version = self._tpl_version() - self.relationship_types = self._tpl_relationship_types() - self.description = self._tpl_description() - self.topology_template = self._topology_template() - self.repositories = self._tpl_repositories() - if self.topology_template.tpl: - self.inputs = self._inputs() - self.relationship_templates = self._relationship_templates() - self.nodetemplates = self._nodetemplates() - self.outputs = self._outputs() - self._handle_nested_tosca_templates_with_topology() - self.graph = ToscaGraph(self.nodetemplates) - - ValidationIssueCollector.collector.stop() - self.verify_template() - - def _topology_template(self): - return TopologyTemplate(self._tpl_topology_template(), - self._get_all_custom_defs(), - self.relationship_types, - self.parsed_params, - None) - - def _inputs(self): - return self.topology_template.inputs - - def _nodetemplates(self): - return self.topology_template.nodetemplates - - def _relationship_templates(self): - return self.topology_template.relationship_templates - - def _outputs(self): - return self.topology_template.outputs - - def _tpl_version(self): - return self.tpl.get(DEFINITION_VERSION) - - def _tpl_description(self): - desc = self.tpl.get(DESCRIPTION) - if desc: - return desc.rstrip() - - def _tpl_imports(self): - return self.tpl.get(IMPORTS) - - def _tpl_repositories(self): - repositories = self.tpl.get(REPOSITORIES) - reposit = [] - if repositories: - for name, val in repositories.items(): - reposits = Repository(name, val) - reposit.append(reposits) - return reposit - - def _tpl_relationship_types(self): - return self._get_custom_types(RELATIONSHIP_TYPES) - - def _tpl_relationship_templates(self): - topology_template = self._tpl_topology_template() - return topology_template.get(RELATIONSHIP_TEMPLATES) - - def _tpl_topology_template(self): - return self.tpl.get(TOPOLOGY_TEMPLATE) - - def _get_all_custom_defs(self, imports=None): - types = [IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, - DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES] - custom_defs_final = {} - custom_defs = self._get_custom_types(types, imports) - if custom_defs: - custom_defs_final.update(custom_defs) - if custom_defs.get(IMPORTS): - import_defs = self._get_all_custom_defs( - custom_defs.get(IMPORTS)) - custom_defs_final.update(import_defs) - - # As imports are not custom_types, removing from the dict - custom_defs_final.pop(IMPORTS, None) - return custom_defs_final - - def _get_custom_types(self, type_definitions, imports=None): - """Handle custom types defined in imported template files - - This method loads the custom type definitions referenced in "imports" - section of the TOSCA YAML template. - """ - custom_defs = {} - type_defs = [] - if not isinstance(type_definitions, list): - type_defs.append(type_definitions) - else: - type_defs = type_definitions - - if not imports: - imports = self._tpl_imports() - - if imports: - custom_service = toscaparser.imports.\ - ImportsLoader(imports, self.path, - type_defs, self.tpl) - - nested_tosca_tpls = custom_service.get_nested_tosca_tpls() - self._update_nested_tosca_tpls_with_topology(nested_tosca_tpls) - - custom_defs = custom_service.get_custom_defs() - if not custom_defs: - return - - # Handle custom types defined in current template file - for type_def in type_defs: - if type_def != IMPORTS: - inner_custom_types = self.tpl.get(type_def) or {} - if inner_custom_types: - custom_defs.update(inner_custom_types) - return custom_defs - - def _update_nested_tosca_tpls_with_topology(self, nested_tosca_tpls): - for tpl in nested_tosca_tpls: - filename, tosca_tpl = list(tpl.items())[0] - if (tosca_tpl.get(TOPOLOGY_TEMPLATE) and - filename not in list( - self.nested_tosca_tpls_with_topology.keys())): - self.nested_tosca_tpls_with_topology.update(tpl) - - def _handle_nested_tosca_templates_with_topology(self): - for fname, tosca_tpl in self.nested_tosca_tpls_with_topology.items(): - for nodetemplate in self.nodetemplates: - if self._is_sub_mapped_node(nodetemplate, tosca_tpl): - parsed_params = self._get_params_for_nested_template( - nodetemplate) - topology_tpl = tosca_tpl.get(TOPOLOGY_TEMPLATE) - topology_with_sub_mapping = TopologyTemplate( - topology_tpl, - self._get_all_custom_defs(), - self.relationship_types, - parsed_params, - nodetemplate) - if topology_with_sub_mapping.substitution_mappings: - # Record nested topo templates in top level template - self.nested_tosca_templates_with_topology.\ - append(topology_with_sub_mapping) - # Set substitution mapping object for mapped node - nodetemplate.sub_mapping_tosca_template = \ - topology_with_sub_mapping.substitution_mappings - - def _validate_field(self): - version = self._tpl_version() - if not version: - ValidationIssueCollector.collector.appendException( - MissingRequiredFieldError(what='Template', - required=DEFINITION_VERSION)) - else: - self._validate_version(version) - self.version = version - - for name in self.tpl: - if (name not in SECTIONS and - name not in self.ADDITIONAL_SECTIONS.get(version, ())): - ValidationIssueCollector.collector.appendException( - UnknownFieldError(what='Template', field=name)) - - def _validate_version(self, version): - if version not in self.VALID_TEMPLATE_VERSIONS: - ValidationIssueCollector.collector.appendException( - InvalidTemplateVersion( - what=version, - valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS))) - else: - if version != 'tosca_simple_yaml_1_0': - update_definitions(version) - - def _get_path(self, path): - if path.lower().endswith(('.yaml','.yml')): - return path - elif path.lower().endswith(('.zip', '.csar')): - # a CSAR archive - csar = CSAR(path, self.a_file) - if csar.validate(): - csar.decompress() - self.a_file = True # the file has been decompressed locally - return os.path.join(csar.temp_dir, csar.get_main_template()) - else: - ValidationIssueCollector.collector.appendException( - ValueError(_('"%(path)s" is not a valid file.') - % {'path': path})) - - def verify_template(self): - if ValidationIssueCollector.collector.exceptionsCaught(): - if self.input_path: - raise ValidationError( - message=(_('\nThe input "%(path)s" failed validation with ' - 'the following error(s): \n\n\t') - % {'path': self.input_path}) + - '\n\t'.join(ValidationIssueCollector.collector.getExceptionsReport())) - else: - raise ValidationError( - message=_('\nThe pre-parsed input failed validation with ' - 'the following error(s): \n\n\t') + - '\n\t'.join(ValidationIssueCollector.collector.getExceptionsReport())) - else: - if self.input_path: - msg = (_('The input "%(path)s" successfully passed ' - 'validation.') % {'path': self.input_path}) - else: - msg = _('The pre-parsed input successfully passed validation.') - - log.info(msg) - - def _is_sub_mapped_node(self, nodetemplate, tosca_tpl): - """Return True if the nodetemple is substituted.""" - if (nodetemplate and not nodetemplate.sub_mapping_tosca_template and - self.get_sub_mapping_node_type(tosca_tpl) == nodetemplate.type - and len(nodetemplate.interfaces) < 1): - return True - else: - return False - - def _get_params_for_nested_template(self, nodetemplate): - """Return total params for nested_template.""" - parsed_params = deepcopy(self.parsed_params) \ - if self.parsed_params else {} - if nodetemplate: - for pname in nodetemplate.get_properties(): - parsed_params.update({pname: - nodetemplate.get_property_value(pname)}) - return parsed_params - - def get_sub_mapping_node_type(self, tosca_tpl): - """Return substitution mappings node type.""" - if tosca_tpl: - return TopologyTemplate.get_sub_mapping_node_type( - tosca_tpl.get(TOPOLOGY_TEMPLATE)) - - def _has_substitution_mappings(self): - """Return True if the template has valid substitution mappings.""" - return self.topology_template is not None and \ - self.topology_template.substitution_mappings is not None - - def has_nested_templates(self): - """Return True if the tosca template has nested templates.""" - return self.nested_tosca_templates_with_topology is not None and \ - len(self.nested_tosca_templates_with_topology) >= 1 -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java b/src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java deleted file mode 100644 index 1e82a6c..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java +++ /dev/null @@ -1,184 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.LinkedHashMap; - -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; - -public class Triggers extends EntityTemplate { - - private static final String DESCRIPTION = "description"; - private static final String EVENT = "event_type"; - private static final String SCHEDULE = "schedule"; - private static final String TARGET_FILTER = "target_filter"; - private static final String CONDITION = "condition"; - private static final String ACTION = "action"; - - private static final String SECTIONS[] = { - DESCRIPTION, EVENT, SCHEDULE, TARGET_FILTER, CONDITION, ACTION - }; - - private static final String METER_NAME = "meter_name"; - private static final String CONSTRAINT = "constraint"; - private static final String PERIOD = "period"; - private static final String EVALUATIONS = "evaluations"; - private static final String METHOD = "method"; - private static final String THRESHOLD = "threshold"; - private static final String COMPARISON_OPERATOR = "comparison_operator"; - - private static final String CONDITION_KEYNAMES[] = { - METER_NAME, CONSTRAINT, PERIOD, EVALUATIONS, METHOD, THRESHOLD, COMPARISON_OPERATOR - }; - - private String name; - private LinkedHashMap triggerTpl; - - public Triggers(String _name,LinkedHashMap _triggerTpl) { - super(); // dummy. don't want super - name = _name; - triggerTpl = _triggerTpl; - _validateKeys(); - _validateCondition(); - _validateInput(); - } - - public String getDescription() { - return (String)triggerTpl.get("description"); - } - - public String getEvent() { - return (String)triggerTpl.get("event_type"); - } - - public LinkedHashMap getSchedule() { - return (LinkedHashMap)triggerTpl.get("schedule"); - } - - public LinkedHashMap getTargetFilter() { - return (LinkedHashMap)triggerTpl.get("target_filter"); - } - - public LinkedHashMap getCondition() { - return (LinkedHashMap)triggerTpl.get("condition"); - } - - public LinkedHashMap getAction() { - return (LinkedHashMap)triggerTpl.get("action"); - } - - private void _validateKeys() { - for(String key: triggerTpl.keySet()) { - boolean bFound = false; - for(int i=0; i validationIssues = new HashMap(); - public void appendValidationIssue(JToscaValidationIssue issue) { - - validationIssues.put(issue.getMessage(),issue); - - } - - public List getValidationIssueReport() { - List report = new ArrayList<>(); - if (!validationIssues.isEmpty()) { - for (JToscaValidationIssue exception : validationIssues.values()) { - report.add("["+exception.getCode()+"]: "+ exception.getMessage()); - } - } - - return report; - } - public Map getValidationIssues() { - return validationIssues; - } - - - public int validationIssuesCaught() { - return validationIssues.size(); - } - -} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ArtifactTypeDef.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ArtifactTypeDef.java deleted file mode 100644 index 8a13d99..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ArtifactTypeDef.java +++ /dev/null @@ -1,105 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import java.util.LinkedHashMap; - -public class ArtifactTypeDef extends StatefulEntityType { - - private String type; - private LinkedHashMap customDef; - private LinkedHashMap properties; - private LinkedHashMap parentArtifacts; - - - - public ArtifactTypeDef(String atype,LinkedHashMap _customDef) { - super(atype,ARTIFACT_PREFIX,_customDef); - - type = atype; - customDef = _customDef; - properties = null; - if(defs != null) { - properties = (LinkedHashMap)defs.get(PROPERTIES); - } - parentArtifacts = _getParentArtifacts(); - } - - private LinkedHashMap _getParentArtifacts() { - LinkedHashMap artifacts = new LinkedHashMap<>(); - String parentArtif = null; - if(getParentType() != null) { - parentArtif = getParentType().getType(); - } - if(parentArtif != null && !parentArtif.isEmpty()) { - while(!parentArtif.equals("tosca.artifacts.Root")) { - Object ob = TOSCA_DEF.get(parentArtif); - artifacts.put(parentArtif,ob); - parentArtif = - (String)((LinkedHashMap)ob).get("derived_from"); - } - } - return artifacts; - } - - public ArtifactTypeDef getParentType() { - // Return a artifact entity from which this entity is derived - if(defs == null) { - return null; - } - String partifactEntity = derivedFrom(defs); - if(partifactEntity != null) { - return new ArtifactTypeDef(partifactEntity,customDef); - } - return null; - } - - public Object getArtifact(String name) { - // Return the definition of an artifact field by name - if(defs != null) { - return defs.get(name); - } - return null; - } - - public String getType() { - return type; - } - -} - -/*python -class ArtifactTypeDef(StatefulEntityType): - '''TOSCA built-in artifacts type.''' - - def __init__(self, atype, custom_def=None): - super(ArtifactTypeDef, self).__init__(atype, self.ARTIFACT_PREFIX, - custom_def) - self.type = atype - self.custom_def = custom_def - self.properties = None - if self.PROPERTIES in self.defs: - self.properties = self.defs[self.PROPERTIES] - self.parent_artifacts = self._get_parent_artifacts() - - def _get_parent_artifacts(self): - artifacts = {} - parent_artif = self.parent_type.type if self.parent_type else None - if parent_artif: - while parent_artif != 'tosca.artifacts.Root': - artifacts[parent_artif] = self.TOSCA_DEF[parent_artif] - parent_artif = artifacts[parent_artif]['derived_from'] - return artifacts - - @property - def parent_type(self): - '''Return a artifact entity from which this entity is derived.''' - if not hasattr(self, 'defs'): - return None - partifact_entity = self.derived_from(self.defs) - if partifact_entity: - return ArtifactTypeDef(partifact_entity, self.custom_def) - - def get_artifact(self, name): - '''Return the definition of an artifact field by name.''' - if name in self.defs: - return self.defs[name] -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/AttributeDef.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/AttributeDef.java deleted file mode 100644 index 5551908..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/AttributeDef.java +++ /dev/null @@ -1,40 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import java.util.LinkedHashMap; - -public class AttributeDef { - // TOSCA built-in Attribute type - - private String name; - private Object value; - private LinkedHashMap schema; - - public AttributeDef(String adName, Object adValue, LinkedHashMap adSchema) { - name = adName; - value = adValue; - schema = adSchema; - } - - public String getName() { - return name; - } - - public Object getValue() { - return value; - } - - public LinkedHashMap getSchema() { - return schema; - } -} - -/*python - -class AttributeDef(object): - '''TOSCA built-in Attribute type.''' - - def __init__(self, name, value=None, schema=None): - self.name = name - self.value = value - self.schema = schema -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java deleted file mode 100644 index 2994fa8..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java +++ /dev/null @@ -1,224 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; - -public class CapabilityTypeDef extends StatefulEntityType { - // TOSCA built-in capabilities type - - private static final String TOSCA_TYPEURI_CAPABILITY_ROOT = "tosca.capabilities.Root"; - - private String name; - private String nodetype; - private LinkedHashMap customDef; - private LinkedHashMap properties; - private LinkedHashMap parentCapabilities; - - @SuppressWarnings("unchecked") - public CapabilityTypeDef(String cname,String ctype,String ntype,LinkedHashMap ccustomDef) { - super(ctype,CAPABILITY_PREFIX,ccustomDef); - - name = cname; - nodetype = ntype; - properties = null; - customDef = ccustomDef; - if(defs != null) { - properties = (LinkedHashMap)defs.get(PROPERTIES); - } - parentCapabilities = _getParentCapabilities(customDef); - } - - @SuppressWarnings("unchecked") - public ArrayList getPropertiesDefObjects () { - // Return a list of property definition objects - ArrayList propsdefs = new ArrayList<>(); - LinkedHashMap parentProperties = new LinkedHashMap<>(); - if(parentCapabilities != null) { - for(Map.Entry me: parentCapabilities.entrySet()) { - parentProperties.put(me.getKey(),((LinkedHashMap)me.getValue()).get("properties")); - } - } - if(properties != null) { - for(Map.Entry me: properties.entrySet()) { - propsdefs.add(new PropertyDef(me.getKey(),null,(LinkedHashMap)me.getValue())); - } - } - if(parentProperties != null) { - for(Map.Entry me: parentProperties.entrySet()) { - LinkedHashMap props = (LinkedHashMap)me.getValue(); - if (props != null) { - for(Map.Entry pe: props.entrySet()) { - String prop = pe.getKey(); - LinkedHashMap schema = (LinkedHashMap)pe.getValue(); - // add parent property if not overridden by children type - if(properties == null || properties.get(prop) == null) { - propsdefs.add(new PropertyDef(prop, null, schema)); - } - } - } - } - } - return propsdefs; - } - - public LinkedHashMap getPropertiesDef() { - LinkedHashMap pds = new LinkedHashMap<>(); - for(PropertyDef pd: getPropertiesDefObjects()) { - pds.put(pd.getName(),pd); - } - return pds; - } - - public PropertyDef getPropertyDefValue(String pdname) { - // Return the definition of a given property name - LinkedHashMap propsDef = getPropertiesDef(); - if(propsDef != null && propsDef.get(pdname) != null) { - return (PropertyDef)propsDef.get(pdname).getPDValue(); - } - return null; - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _getParentCapabilities(LinkedHashMap customDef) { - LinkedHashMap capabilities = new LinkedHashMap<>(); - CapabilityTypeDef parentCap = getParentType(); - if(parentCap != null) { - String sParentCap = parentCap.getType(); - while(!sParentCap.equals(TOSCA_TYPEURI_CAPABILITY_ROOT)) { - if(TOSCA_DEF.get(sParentCap) != null) { - capabilities.put(sParentCap,TOSCA_DEF.get(sParentCap)); - } - else if(customDef != null && customDef.get(sParentCap) != null) { - capabilities.put(sParentCap,customDef.get(sParentCap)); - } - sParentCap = (String)((LinkedHashMap)capabilities.get(sParentCap)).get("derived_from"); - } - } - return capabilities; - } - - public CapabilityTypeDef getParentType() { - // Return a capability this capability is derived from - if(defs == null) { - return null; - } - String pnode = derivedFrom(defs); - if(pnode != null && !pnode.isEmpty()) { - return new CapabilityTypeDef(name, pnode, nodetype, customDef); - } - return null; - } - - public boolean inheritsFrom(ArrayList typeNames) { - // Check this capability is in type_names - - // Check if this capability or some of its parent types - // are in the list of types: type_names - if(typeNames.contains(getType())) { - return true; - } - else if(getParentType() != null) { - return getParentType().inheritsFrom(typeNames); - } - return false; - } - - // getters/setters - - public LinkedHashMap getProperties() { - return properties; - } - - public String getName() { - return name; - } -} - -/*python -from toscaparser.elements.property_definition import PropertyDef -from toscaparser.elements.statefulentitytype import StatefulEntityType - - -class CapabilityTypeDef(StatefulEntityType): - '''TOSCA built-in capabilities type.''' - TOSCA_TYPEURI_CAPABILITY_ROOT = 'tosca.capabilities.Root' - - def __init__(self, name, ctype, ntype, custom_def=None): - self.name = name - super(CapabilityTypeDef, self).__init__(ctype, self.CAPABILITY_PREFIX, - custom_def) - self.nodetype = ntype - self.properties = None - self.custom_def = custom_def - if self.PROPERTIES in self.defs: - self.properties = self.defs[self.PROPERTIES] - self.parent_capabilities = self._get_parent_capabilities(custom_def) - - def get_properties_def_objects(self): - '''Return a list of property definition objects.''' - properties = [] - parent_properties = {} - if self.parent_capabilities: - for type, value in self.parent_capabilities.items(): - parent_properties[type] = value.get('properties') - if self.properties: - for prop, schema in self.properties.items(): - properties.append(PropertyDef(prop, None, schema)) - if parent_properties: - for parent, props in parent_properties.items(): - for prop, schema in props.items(): - # add parent property if not overridden by children type - if not self.properties or \ - prop not in self.properties.keys(): - properties.append(PropertyDef(prop, None, schema)) - return properties - - def get_properties_def(self): - '''Return a dictionary of property definition name-object pairs.''' - return {prop.name: prop - for prop in self.get_properties_def_objects()} - - def get_property_def_value(self, name): - '''Return the definition of a given property name.''' - props_def = self.get_properties_def() - if props_def and name in props_def: - return props_def[name].value - - def _get_parent_capabilities(self, custom_def=None): - capabilities = {} - parent_cap = self.parent_type - if parent_cap: - parent_cap = parent_cap.type - while parent_cap != self.TOSCA_TYPEURI_CAPABILITY_ROOT: - if parent_cap in self.TOSCA_DEF.keys(): - capabilities[parent_cap] = self.TOSCA_DEF[parent_cap] - elif custom_def and parent_cap in custom_def.keys(): - capabilities[parent_cap] = custom_def[parent_cap] - parent_cap = capabilities[parent_cap]['derived_from'] - return capabilities - - @property - def parent_type(self): - '''Return a capability this capability is derived from.''' - if not hasattr(self, 'defs'): - return None - pnode = self.derived_from(self.defs) - if pnode: - return CapabilityTypeDef(self.name, pnode, - self.nodetype, self.custom_def) - - def inherits_from(self, type_names): - '''Check this capability is in type_names - - Check if this capability or some of its parent types - are in the list of types: type_names - ''' - if self.type in type_names: - return True - elif self.parent_type: - return self.parent_type.inherits_from(type_names) - else: - return False*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/DataType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/DataType.java deleted file mode 100644 index d5d770b..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/DataType.java +++ /dev/null @@ -1,116 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -public class DataType extends StatefulEntityType { - - LinkedHashMap customDef; - - public DataType(String _dataTypeName,LinkedHashMap _customDef) { - super(_dataTypeName,DATATYPE_NETWORK_PREFIX,_customDef); - - customDef = _customDef; - } - - public DataType getParentType() { - // Return a datatype this datatype is derived from - if(defs != null) { - String ptype = derivedFrom(defs); - if(ptype != null) { - return new DataType(ptype,customDef); - } - } - return null; - } - - public String getValueType() { - // Return 'type' section in the datatype schema - if(defs != null) { - return (String)entityValue(defs,"type"); - } - return null; - } - - public ArrayList getAllPropertiesObjects() { - //Return all properties objects defined in type and parent type - ArrayList propsDef = getPropertiesDefObjects(); - DataType ptype = getParentType(); - while(ptype != null) { - propsDef.addAll(ptype.getPropertiesDefObjects()); - ptype = ptype.getParentType(); - } - return propsDef; - } - - public LinkedHashMap getAllProperties() { - // Return a dictionary of all property definition name-object pairs - LinkedHashMap pno = new LinkedHashMap<>(); - for(PropertyDef pd: getAllPropertiesObjects()) { - pno.put(pd.getName(),pd); - } - return pno; - } - - public Object getAllPropertyValue(String name) { - // Return the value of a given property name - LinkedHashMap propsDef = getAllProperties(); - if(propsDef != null && propsDef.get(name) != null) { - return propsDef.get(name).getPDValue(); - } - return null; - } - - public LinkedHashMap getDefs() { - return defs; - } - -} - -/*python - -from toscaparser.elements.statefulentitytype import StatefulEntityType - - -class DataType(StatefulEntityType): - '''TOSCA built-in and user defined complex data type.''' - - def __init__(self, datatypename, custom_def=None): - super(DataType, self).__init__(datatypename, - self.DATATYPE_NETWORK_PREFIX, - custom_def) - self.custom_def = custom_def - - @property - def parent_type(self): - '''Return a datatype this datatype is derived from.''' - ptype = self.derived_from(self.defs) - if ptype: - return DataType(ptype, self.custom_def) - return None - - @property - def value_type(self): - '''Return 'type' section in the datatype schema.''' - return self.entity_value(self.defs, 'type') - - def get_all_properties_objects(self): - '''Return all properties objects defined in type and parent type.''' - props_def = self.get_properties_def_objects() - ptype = self.parent_type - while ptype: - props_def.extend(ptype.get_properties_def_objects()) - ptype = ptype.parent_type - return props_def - - def get_all_properties(self): - '''Return a dictionary of all property definition name-object pairs.''' - return {prop.name: prop - for prop in self.get_all_properties_objects()} - - def get_all_property_value(self, name): - '''Return the value of a given property name.''' - props_def = self.get_all_properties() - if props_def and name in props_def.key(): - return props_def[name].value -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java deleted file mode 100644 index 50ef715..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java +++ /dev/null @@ -1,418 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.openecomp.sdc.toscaparser.api.extensions.ExtTools; -import org.openecomp.sdc.toscaparser.api.utils.CopyUtils; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.yaml.snakeyaml.Yaml; - -public class EntityType { - - private static Logger log = LoggerFactory.getLogger(EntityType.class.getName()); - - private static final String TOSCA_DEFINITION_1_0_YAML = "TOSCA_definition_1_0.yaml"; - protected static final String DERIVED_FROM = "derived_from"; - protected static final String PROPERTIES = "properties"; - protected static final String ATTRIBUTES = "attributes"; - protected static final String REQUIREMENTS = "requirements"; - protected static final String INTERFACES = "interfaces"; - protected static final String CAPABILITIES = "capabilities"; - protected static final String TYPE = "type"; - protected static final String ARTIFACTS = "artifacts"; - - @SuppressWarnings("unused") - private static final String SECTIONS[] = { - DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS, - INTERFACES, CAPABILITIES, TYPE, ARTIFACTS - }; - - public static final String TOSCA_DEF_SECTIONS[] = { - "node_types", "data_types", "artifact_types", - "group_types", "relationship_types", - "capability_types", "interface_types", - "policy_types"}; - - - // TOSCA definition file - //private final static String path = EntityType.class.getProtectionDomain().getCodeSource().getLocation().getPath(); - - //private final static String path = EntityType.class.getClassLoader().getResource("TOSCA_definition_1_0.yaml").getFile(); - //private final static String TOSCA_DEF_FILE = EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); - - private static LinkedHashMap TOSCA_DEF_LOAD_AS_IS = loadTdf(); - - //EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); - - @SuppressWarnings("unchecked") - private static LinkedHashMap loadTdf() { - String toscaDefLocation = EntityType.class.getClassLoader().getResource(TOSCA_DEFINITION_1_0_YAML).getFile(); - InputStream input = EntityType.class.getClassLoader().getResourceAsStream(TOSCA_DEFINITION_1_0_YAML); - if (input == null){ - log.error("EntityType - loadTdf - Couldn't load TOSCA_DEF_FILE {}", toscaDefLocation); - } - Yaml yaml = new Yaml(); - Object loaded = yaml.load(input); - //@SuppressWarnings("unchecked") - return (LinkedHashMap) loaded; - } - - // Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS - public static LinkedHashMap TOSCA_DEF; - static { - TOSCA_DEF = new LinkedHashMap(); - for(String section: TOSCA_DEF_SECTIONS) { - @SuppressWarnings("unchecked") - LinkedHashMap value = (LinkedHashMap)TOSCA_DEF_LOAD_AS_IS.get(section); - if(value != null) { - for(String key: value.keySet()) { - TOSCA_DEF.put(key, value.get(key)); - } - } - } - } - - public static final String DEPENDSON = "tosca.relationships.DependsOn"; - public static final String HOSTEDON = "tosca.relationships.HostedOn"; - public static final String CONNECTSTO = "tosca.relationships.ConnectsTo"; - public static final String ATTACHESTO = "tosca.relationships.AttachesTo"; - public static final String LINKSTO = "tosca.relationships.network.LinksTo"; - public static final String BINDSTO = "tosca.relationships.network.BindsTo"; - - public static final String RELATIONSHIP_TYPE[] = { - "tosca.relationships.DependsOn", - "tosca.relationships.HostedOn", - "tosca.relationships.ConnectsTo", - "tosca.relationships.AttachesTo", - "tosca.relationships.network.LinksTo", - "tosca.relationships.network.BindsTo"}; - - public static final String NODE_PREFIX = "tosca.nodes."; - public static final String RELATIONSHIP_PREFIX = "tosca.relationships."; - public static final String CAPABILITY_PREFIX = "tosca.capabilities."; - public static final String INTERFACE_PREFIX = "tosca.interfaces."; - public static final String ARTIFACT_PREFIX = "tosca.artifacts."; - public static final String POLICY_PREFIX = "tosca.policies."; - public static final String GROUP_PREFIX = "tosca.groups."; - //currently the data types are defined only for network - // but may have changes in the future. - public static final String DATATYPE_PREFIX = "tosca.datatypes."; - public static final String DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + "network."; - public static final String TOSCA = "tosca"; - - protected String type; - protected LinkedHashMap defs = null; - public Object getParentType() { return null; } - - public String derivedFrom(LinkedHashMap defs) { - // Return a type this type is derived from - return (String)entityValue(defs, "derived_from"); - } - - public boolean isDerivedFrom(String type_str) { - // Check if object inherits from the given type - // Returns true if this object is derived from 'type_str' - // False otherwise. - if(type == null || this.type.isEmpty()) { - return false; - } - else if(type == type_str) { - return true; - } - else if(getParentType() != null) { - return ((EntityType)getParentType()).isDerivedFrom(type_str); - } - else { - return false; - } - } - - public Object entityValue(LinkedHashMap defs, String key) { - if(defs != null) { - return defs.get(key); - } - return null; - } - - @SuppressWarnings("unchecked") - public Object getValue(String ndtype, LinkedHashMap _defs, boolean parent) { - Object value = null; - if(_defs == null) { - if(defs == null) { - return null; - } - _defs = this.defs; - } - Object defndt = _defs.get(ndtype); - if(defndt != null) { - // copy the value to avoid that next operations add items in the - // item definitions - //value = copy.copy(defs[ndtype]) - value = CopyUtils.copyLhmOrAl(defndt); - } - - if(parent) { - EntityType p = this; - if(p != null) { - while(p != null) { - if(p.defs != null && p.defs.get(ndtype) != null) { - // get the parent value - Object parentValue = p.defs.get(ndtype); - if(value != null) { - if(value instanceof LinkedHashMap) { - for(Map.Entry me: ((LinkedHashMap)parentValue).entrySet()) { - String k = me.getKey(); - if(((LinkedHashMap)value).get(k) == null) { - ((LinkedHashMap)value).put(k,me.getValue()); - } - } - } - if(value instanceof ArrayList) { - for(Object pValue: (ArrayList)parentValue) { - if(!((ArrayList)value).contains(pValue)) { - ((ArrayList)value).add(pValue); - } - } - } - } - else { - // value = copy.copy(parent_value) - value = CopyUtils.copyLhmOrAl(parentValue); - } - } - p = (EntityType)p.getParentType(); - } - } - } - - return value; - } - - @SuppressWarnings("unchecked") - public Object getDefinition(String ndtype) { - Object value = null; - LinkedHashMap _defs; - // no point in hasattr, because we have it, and it - // doesn't do anything except emit an exception anyway - //if not hasattr(self, 'defs'): - // defs = None - // ValidationIssueCollector.appendException( - // ValidationError(message="defs is " + str(defs))) - //else: - // defs = self.defs - _defs = this.defs; - - - if(_defs != null && _defs.get(ndtype) != null) { - value = _defs.get(ndtype); - } - - Object p = getParentType(); - if(p != null) { - Object inherited = ((EntityType)p).getDefinition(ndtype); - if(inherited != null) { - // inherited = dict(inherited) WTF?!? - if(value == null) { - value = inherited; - } - else { - //????? - //inherited.update(value) - //value.update(inherited) - for(Map.Entry me: ((LinkedHashMap)inherited).entrySet()) { - ((LinkedHashMap)value).put(me.getKey(),me.getValue()); - } - } - } - } - return value; - } - - public static void updateDefinitions(String version) { - ExtTools exttools = new ExtTools(); - String extensionDefsFile = exttools.getDefsFile(version); - - try (InputStream input = EntityType.class.getClassLoader().getResourceAsStream(extensionDefsFile);){ - Yaml yaml = new Yaml(); - LinkedHashMap nfvDefFile = (LinkedHashMap)yaml.load(input); - LinkedHashMap nfvDef = new LinkedHashMap<>(); - for(String section: TOSCA_DEF_SECTIONS) { - if(nfvDefFile.get(section) != null) { - LinkedHashMap value = - (LinkedHashMap)nfvDefFile.get(section); - for(String key: value.keySet()) { - nfvDef.put(key, value.get(key)); - } - } - } - TOSCA_DEF.putAll(nfvDef); - } - catch (IOException e) { - log.error("EntityType - updateDefinitions - Failed to update definitions from defs file {}",extensionDefsFile); - log.error("Exception:", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE280", - String.format("Failed to update definitions from defs file \"%s\" ",extensionDefsFile))); - return; - } - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import ValidationError -from toscaparser.extensions.exttools import ExtTools -import org.openecomp.sdc.toscaparser.api.utils.yamlparser - -log = logging.getLogger('tosca') - - -class EntityType(object): - '''Base class for TOSCA elements.''' - - SECTIONS = (DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS, - INTERFACES, CAPABILITIES, TYPE, ARTIFACTS) = \ - ('derived_from', 'properties', 'attributes', 'requirements', - 'interfaces', 'capabilities', 'type', 'artifacts') - - TOSCA_DEF_SECTIONS = ['node_types', 'data_types', 'artifact_types', - 'group_types', 'relationship_types', - 'capability_types', 'interface_types', - 'policy_types'] - - '''TOSCA definition file.''' - TOSCA_DEF_FILE = os.path.join( - os.path.dirname(os.path.abspath(__file__)), - "TOSCA_definition_1_0.yaml") - - loader = toscaparser.utils.yamlparser.load_yaml - - TOSCA_DEF_LOAD_AS_IS = loader(TOSCA_DEF_FILE) - - # Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS - TOSCA_DEF = {} - for section in TOSCA_DEF_SECTIONS: - if section in TOSCA_DEF_LOAD_AS_IS.keys(): - value = TOSCA_DEF_LOAD_AS_IS[section] - for key in value.keys(): - TOSCA_DEF[key] = value[key] - - RELATIONSHIP_TYPE = (DEPENDSON, HOSTEDON, CONNECTSTO, ATTACHESTO, - LINKSTO, BINDSTO) = \ - ('tosca.relationships.DependsOn', - 'tosca.relationships.HostedOn', - 'tosca.relationships.ConnectsTo', - 'tosca.relationships.AttachesTo', - 'tosca.relationships.network.LinksTo', - 'tosca.relationships.network.BindsTo') - - NODE_PREFIX = 'tosca.nodes.' - RELATIONSHIP_PREFIX = 'tosca.relationships.' - CAPABILITY_PREFIX = 'tosca.capabilities.' - INTERFACE_PREFIX = 'tosca.interfaces.' - ARTIFACT_PREFIX = 'tosca.artifacts.' - POLICY_PREFIX = 'tosca.policies.' - GROUP_PREFIX = 'tosca.groups.' - # currently the data types are defined only for network - # but may have changes in the future. - DATATYPE_PREFIX = 'tosca.datatypes.' - DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + 'network.' - TOSCA = 'tosca' - - def derived_from(self, defs): - '''Return a type this type is derived from.''' - return self.entity_value(defs, 'derived_from') - - def is_derived_from(self, type_str): - '''Check if object inherits from the given type. - - Returns true if this object is derived from 'type_str'. - False otherwise. - ''' - if not self.type: - return False - elif self.type == type_str: - return True - elif self.parent_type: - return self.parent_type.is_derived_from(type_str) - else: - return False - - def entity_value(self, defs, key): - if key in defs: - return defs[key] - - def get_value(self, ndtype, defs=None, parent=None): - value = None - if defs is None: - if not hasattr(self, 'defs'): - return None - defs = self.defs - if ndtype in defs: - # copy the value to avoid that next operations add items in the - # item definitions - value = copy.copy(defs[ndtype]) - if parent: - p = self - if p: - while p: - if ndtype in p.defs: - # get the parent value - parent_value = p.defs[ndtype] - if value: - if isinstance(value, dict): - for k, v in parent_value.items(): - if k not in value.keys(): - value[k] = v - if isinstance(value, list): - for p_value in parent_value: - if p_value not in value: - value.append(p_value) - else: - value = copy.copy(parent_value) - p = p.parent_type - return value - - def get_definition(self, ndtype): - value = None - if not hasattr(self, 'defs'): - defs = None - ValidationIssueCollector.appendException( - ValidationError(message="defs is " + str(defs))) - else: - defs = self.defs - if defs is not None and ndtype in defs: - value = defs[ndtype] - p = self.parent_type - if p: - inherited = p.get_definition(ndtype) - if inherited: - inherited = dict(inherited) - if not value: - value = inherited - else: - inherited.update(value) - value.update(inherited) - return value - - -def update_definitions(version): - exttools = ExtTools() - extension_defs_file = exttools.get_defs_file(version) - loader = toscaparser.utils.yamlparser.load_yaml - nfv_def_file = loader(extension_defs_file) - nfv_def = {} - for section in EntityType.TOSCA_DEF_SECTIONS: - if section in nfv_def_file.keys(): - value = nfv_def_file[section] - for key in value.keys(): - nfv_def[key] = value[key] - EntityType.TOSCA_DEF.update(nfv_def) -*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java deleted file mode 100644 index 0f6ae6d..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java +++ /dev/null @@ -1,216 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.LinkedHashMap; - -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class GroupType extends StatefulEntityType { - - private static final String DERIVED_FROM = "derived_from"; - private static final String VERSION = "version"; - private static final String METADATA = "metadata"; - private static final String DESCRIPTION = "description"; - private static final String PROPERTIES = "properties"; - private static final String MEMBERS = "members"; - private static final String INTERFACES = "interfaces"; - - private static final String SECTIONS[] = { - DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; - - private String groupType; - private LinkedHashMap customDef; - private String groupDescription; - private String groupVersion; - //private LinkedHashMap groupProperties; - //private ArrayList groupMembers; - private LinkedHashMap metaData; - - @SuppressWarnings("unchecked") - public GroupType(String _grouptype,LinkedHashMap _customDef) { - super(_grouptype,GROUP_PREFIX,_customDef); - - groupType = _grouptype; - customDef = _customDef; - _validateFields(); - if(defs != null) { - groupDescription = (String)defs.get(DESCRIPTION); - groupVersion = (String)defs.get(VERSION); - //groupProperties = (LinkedHashMap)defs.get(PROPERTIES); - //groupMembers = (ArrayList)defs.get(MEMBERS); - Object mdo = defs.get(METADATA); - if(mdo instanceof LinkedHashMap) { - metaData = (LinkedHashMap)mdo; - } - else { - metaData = null; - } - - if(metaData != null) { - _validateMetadata(metaData); - } - } - } - - public GroupType getParentType() { - // Return a group statefulentity of this entity is derived from. - if(defs == null) { - return null; - } - String pgroupEntity = derivedFrom(defs); - if(pgroupEntity != null) { - return new GroupType(pgroupEntity,customDef); - } - return null; - } - - public String getDescription() { - return groupDescription; - } - - public String getVersion() { - return groupVersion; - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getInterfaces() { - Object ifo = getValue(INTERFACES,null,false); - if(ifo instanceof LinkedHashMap) { - return (LinkedHashMap)ifo; - } - return new LinkedHashMap(); - } - - private void _validateFields() { - if(defs != null) { - for(String name: defs.keySet()) { - boolean bFound = false; - for(String sect: SECTIONS) { - if(name.equals(sect)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE120", String.format( - "UnknownFieldError: Group Type \"%s\" contains unknown field \"%s\"", - groupType,name))); - } - } - } - } - - @SuppressWarnings("unchecked") - private void _validateMetadata(LinkedHashMap metadata) { - String mtt = (String) metadata.get("type"); - if(mtt != null && !mtt.equals("map") && !mtt.equals("tosca:map")) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE121", String.format( - "InvalidTypeError: \"%s\" defined in group for metadata is invalid", - mtt))); - } - for(String entrySchema: metadata.keySet()) { - Object estob = metadata.get(entrySchema); - if(estob instanceof LinkedHashMap) { - String est = (String)((LinkedHashMap)estob).get("type"); - if(!est.equals("string")) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE122", String.format( - "InvalidTypeError: \"%s\" defined in group for metadata \"%s\" is invalid", - est,entrySchema))); - } - } - } - } - - public String getType() { - return groupType; - } - - -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidTypeError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.elements.statefulentitytype import StatefulEntityType - - -class GroupType(StatefulEntityType): - '''TOSCA built-in group type.''' - - SECTIONS = (DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, - MEMBERS, INTERFACES) = \ - ("derived_from", "version", "metadata", "description", - "properties", "members", "interfaces") - - def __init__(self, grouptype, custom_def=None): - super(GroupType, self).__init__(grouptype, self.GROUP_PREFIX, - custom_def) - self.custom_def = custom_def - self.grouptype = grouptype - self._validate_fields() - self.group_description = None - if self.DESCRIPTION in self.defs: - self.group_description = self.defs[self.DESCRIPTION] - - self.group_version = None - if self.VERSION in self.defs: - self.group_version = self.defs[self.VERSION] - - self.group_properties = None - if self.PROPERTIES in self.defs: - self.group_properties = self.defs[self.PROPERTIES] - - self.group_members = None - if self.MEMBERS in self.defs: - self.group_members = self.defs[self.MEMBERS] - - if self.METADATA in self.defs: - self.meta_data = self.defs[self.METADATA] - self._validate_metadata(self.meta_data) - - @property - def parent_type(self): - '''Return a group statefulentity of this entity is derived from.''' - if not hasattr(self, 'defs'): - return None - pgroup_entity = self.derived_from(self.defs) - if pgroup_entity: - return GroupType(pgroup_entity, self.custom_def) - - @property - def description(self): - return self.group_description - - @property - def version(self): - return self.group_version - - @property - def interfaces(self): - return self.get_value(self.INTERFACES) - - def _validate_fields(self): - if self.defs: - for name in self.defs.keys(): - if name not in self.SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Group Type %s' - % self.grouptype, field=name)) - - def _validate_metadata(self, meta_data): - if not meta_data.get('type') in ['map', 'tosca:map']: - ValidationIssueCollector.appendException( - InvalidTypeError(what='"%s" defined in group for ' - 'metadata' % (meta_data.get('type')))) - for entry_schema, entry_schema_type in meta_data.items(): - if isinstance(entry_schema_type, dict) and not \ - entry_schema_type.get('type') == 'string': - ValidationIssueCollector.appendException( - InvalidTypeError(what='"%s" defined in group for ' - 'metadata "%s"' - % (entry_schema_type.get('type'), - entry_schema))) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java deleted file mode 100644 index 357ee23..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java +++ /dev/null @@ -1,228 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import org.openecomp.sdc.toscaparser.api.EntityTemplate; -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -public class InterfacesDef extends StatefulEntityType { - - public static final String LIFECYCLE = "tosca.interfaces.node.lifecycle.Standard"; - public static final String CONFIGURE = "tosca.interfaces.relationship.Configure"; - public static final String LIFECYCLE_SHORTNAME = "Standard"; - public static final String CONFIGURE_SHORTNAME = "Configure"; - - public static final String SECTIONS[] = { - LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME,CONFIGURE_SHORTNAME - }; - - public static final String IMPLEMENTATION = "implementation"; - public static final String INPUTS = "inputs"; - - public static final String INTERFACEVALUE[] = {IMPLEMENTATION, INPUTS}; - - public static final String INTERFACE_DEF_RESERVED_WORDS[] = { - "type", "inputs", "derived_from", "version", "description"}; - - private EntityType ntype; - private EntityTemplate nodeTemplate; - private String name; - private Object value; - private String implementation; - private LinkedHashMap inputs; - - - @SuppressWarnings("unchecked") - public InterfacesDef(EntityType inodeType, - String interfaceType, - EntityTemplate inodeTemplate, - String iname, - Object ivalue) { - // void - super(); - - ntype = inodeType; - nodeTemplate = inodeTemplate; - type = interfaceType; - name = iname; - value = ivalue; - implementation = null; - inputs = null; - defs = new LinkedHashMap(); - - if(interfaceType.equals(LIFECYCLE_SHORTNAME)) { - interfaceType = LIFECYCLE; - } - if(interfaceType.equals(CONFIGURE_SHORTNAME)) { - interfaceType = CONFIGURE; - } - - // only NodeType has getInterfaces "hasattr(ntype,interfaces)" - // while RelationshipType does not - if(ntype instanceof NodeType) { - if(((NodeType)ntype).getInterfaces() != null && - ((NodeType)ntype).getInterfaces().values().contains(interfaceType)) { - LinkedHashMap nii = (LinkedHashMap) - ((NodeType)ntype).getInterfaces().get(interfaceType); - interfaceType = (String)nii.get("type"); - } - } - if(inodeType != null) { - if(nodeTemplate != null && nodeTemplate.getCustomDef() != null && - nodeTemplate.getCustomDef().values().contains(interfaceType)) { - defs = (LinkedHashMap) - nodeTemplate.getCustomDef().get(interfaceType); - } - else { - defs = (LinkedHashMap)TOSCA_DEF.get(interfaceType); - } - } - - if(ivalue != null) { - if(ivalue instanceof LinkedHashMap) { - for(Map.Entry me: ((LinkedHashMap)ivalue).entrySet()) { - if(me.getKey().equals("implementation")) { - implementation = (String)me.getValue(); - } - else if(me.getKey().equals("inputs")) { - inputs = (LinkedHashMap)me.getValue(); - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE123", String.format( - "UnknownFieldError: \"interfaces\" of template \"%s\" contain unknown field \"%s\"", - nodeTemplate.getName(),me.getKey()))); - } - } - } - else { - implementation = (String)ivalue; - } - } - } - - public ArrayList getLifecycleOps() { - if(defs != null) { - if(type.equals(LIFECYCLE)) { - return _ops(); - } - } - return null; - } - - public ArrayList getConfigureOps() { - if(defs != null) { - if(type.equals(CONFIGURE)) { - return _ops(); - } - } - return null; - } - - private ArrayList _ops() { - return new ArrayList(defs.keySet()); - } - - // getters/setters - - public LinkedHashMap getInputs() { - return inputs; - } - - public void setInput(String name,Object value) { - inputs.put(name, value); - } -} - -/*python - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import UnknownFieldError -from toscaparser.elements.statefulentitytype import StatefulEntityType - -SECTIONS = (LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME, - CONFIGURE_SHORTNAME) = \ - ('tosca.interfaces.node.lifecycle.Standard', - 'tosca.interfaces.relationship.Configure', - 'Standard', 'Configure') - -INTERFACEVALUE = (IMPLEMENTATION, INPUTS) = ('implementation', 'inputs') - -INTERFACE_DEF_RESERVED_WORDS = ['type', 'inputs', 'derived_from', 'version', - 'description'] - - -class InterfacesDef(StatefulEntityType): - '''TOSCA built-in interfaces type.''' - - def __init__(self, node_type, interfacetype, - node_template=None, name=None, value=None): - self.ntype = node_type - self.node_template = node_template - self.type = interfacetype - self.name = name - self.value = value - self.implementation = None - self.inputs = None - self.defs = {} - if interfacetype == LIFECYCLE_SHORTNAME: - interfacetype = LIFECYCLE - if interfacetype == CONFIGURE_SHORTNAME: - interfacetype = CONFIGURE - if hasattr(self.ntype, 'interfaces') \ - and self.ntype.interfaces \ - and interfacetype in self.ntype.interfaces: - interfacetype = self.ntype.interfaces[interfacetype]['type'] - if node_type: - if self.node_template and self.node_template.custom_def \ - and interfacetype in self.node_template.custom_def: - self.defs = self.node_template.custom_def[interfacetype] - else: - self.defs = self.TOSCA_DEF[interfacetype] - if value: - if isinstance(self.value, dict): - for i, j in self.value.items(): - if i == IMPLEMENTATION: - self.implementation = j - elif i == INPUTS: - self.inputs = j - else: - what = ('"interfaces" of template "%s"' % - self.node_template.name) - ValidationIssueCollector.appendException( - UnknownFieldError(what=what, field=i)) - else: - self.implementation = value - - @property - def lifecycle_ops(self): - if self.defs: - if self.type == LIFECYCLE: - return self._ops() - - @property - def configure_ops(self): - if self.defs: - if self.type == CONFIGURE: - return self._ops() - - def _ops(self): - ops = [] - for name in list(self.defs.keys()): - ops.append(name) - return ops -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java deleted file mode 100644 index b153876..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java +++ /dev/null @@ -1,41 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import java.util.AbstractMap; -import java.util.HashMap; -import java.util.Map; -import java.util.stream.Collectors; - -public class Metadata { - - private final Map metadataMap; - - public Metadata(Map metadataMap) { - this.metadataMap = metadataMap != null ? metadataMap : new HashMap<>(); - } - - public String getValue(String key) { - - Object obj = this.metadataMap.get(key); - if (obj != null){ - return String.valueOf(obj); - } - return null; - } - - /** - * Get all properties of a Metadata object.
- * This object represents the "metadata" section of some entity. - * @return all properties of this Metadata, as a key-value. - */ - public Map getAllProperties() { - return metadataMap.entrySet().stream().map(e-> new AbstractMap.SimpleEntry(e.getKey(), String.valueOf(e.getValue()))).collect(Collectors.toMap(Map.Entry::getKey,Map.Entry::getValue)); - } - - @Override - public String toString() { - return "Metadata{" + - "metadataMap=" + metadataMap + - '}'; - } - -} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java deleted file mode 100644 index 48fbe59..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java +++ /dev/null @@ -1,525 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -public class NodeType extends StatefulEntityType { - // TOSCA built-in node type - - private static final String DERIVED_FROM = "derived_from"; - private static final String METADATA = "metadata"; - private static final String PROPERTIES = "properties"; - private static final String VERSION = "version"; - private static final String DESCRIPTION = "description"; - private static final String ATTRIBUTES = "attributes"; - private static final String REQUIREMENTS = "requirements"; - private static final String CAPABILITIES = "capabilities"; - private static final String INTERFACES = "interfaces"; - private static final String ARTIFACTS = "artifacts"; - - private static final String SECTIONS[] = { - DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS - }; - - private String ntype; - public LinkedHashMap customDef; - - public NodeType(String nttype,LinkedHashMap ntcustomDef) { - super(nttype,NODE_PREFIX, ntcustomDef); - ntype = nttype; - customDef = ntcustomDef; - _validateKeys(); - } - - public Object getParentType() { - // Return a node this node is derived from - if(defs == null) { - return null; - } - String pnode = derivedFrom(defs); - if(pnode != null && !pnode.isEmpty()) { - return new NodeType(pnode,customDef); - } - return null; - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getRelationship() { - // Return a dictionary of relationships to other node types - - // This method returns a dictionary of named relationships that nodes - // of the current node type (self) can have to other nodes (of specific - // types) in a TOSCA template. - - LinkedHashMap relationship = new LinkedHashMap<>(); - ArrayList> requires; - Object treq = getAllRequirements(); - if(treq != null) { - // NOTE(sdmonov): Check if requires is a dict. - // If it is a dict convert it to a list of dicts. - // This is needed because currently the code below supports only - // lists as requirements definition. The following check will - // make sure if a map (dict) was provided it will be converted to - // a list before proceeding to the parsing. - if(treq instanceof LinkedHashMap) { - requires = new ArrayList<>(); - for(Map.Entry me: ((LinkedHashMap)treq).entrySet()) { - LinkedHashMap tl = new LinkedHashMap<>(); - tl.put(me.getKey(),me.getValue()); - requires.add(tl); - } - } - else { - requires = (ArrayList>)treq; - } - - String keyword = null; - String nodeType = null; - for(LinkedHashMap require: requires) { - String relation = null; - for(Map.Entry re: require.entrySet()) { - String key = re.getKey(); - LinkedHashMap req = (LinkedHashMap)re.getValue(); - if(req.get("relationship") != null) { - Object trelation = req.get("relationship"); - // trelation is a string or a dict with "type" mapped to the string we want - if(trelation instanceof String) { - relation = (String)trelation; - } - else { - if(((LinkedHashMap)trelation).get("type") != null) { - relation = (String)((LinkedHashMap)trelation).get("type"); - } - } - nodeType = (String)req.get("node"); - //BUG meaningless?? LinkedHashMap value = req; - if(nodeType != null) { - keyword = "node"; - } - else { - // If value is a dict and has a type key - // we need to lookup the node type using - // the capability type - String captype = (String)req.get("capability"); - String value = _getNodeTypeByCap(captype); - String getRelation = _getRelation(key,value); - if (getRelation != null) { - relation = getRelation; - } - keyword = key; - nodeType = value; - } - } - - } - RelationshipType rtype = new RelationshipType(relation, keyword, customDef); - NodeType relatednode = new NodeType(nodeType, customDef); - relationship.put(rtype, relatednode); - } - } - return relationship; - - } - - @SuppressWarnings("unchecked") - private String _getNodeTypeByCap(String cap) { - // Find the node type that has the provided capability - - // This method will lookup all node types if they have the - // provided capability. - // Filter the node types - ArrayList nodeTypes = new ArrayList<>(); - for(String nt: customDef.keySet()) { - if(nt.startsWith(NODE_PREFIX) || nt.startsWith("org.openecomp") && !nt.equals("tosca.nodes.Root")) { - nodeTypes.add(nt); - } - } - for(String nt: nodeTypes) { - LinkedHashMap nodeDef = (LinkedHashMap)customDef.get(nt); - if(nodeDef instanceof LinkedHashMap && nodeDef.get("capabilities") != null) { - LinkedHashMap nodeCaps = (LinkedHashMap)nodeDef.get("capabilities"); - if(nodeCaps != null) { - for(Object val: nodeCaps.values()) { - if(val instanceof LinkedHashMap) { - String tp = (String)((LinkedHashMap)val).get("type"); - if(tp != null && tp.equals(cap)) { - return nt; - } - } - } - } - } - } - return null; - } - - @SuppressWarnings("unchecked") - private String _getRelation(String key,String ndtype) { - String relation = null; - NodeType ntype = new NodeType(ndtype, customDef); - LinkedHashMap caps = ntype.getCapabilities(); - if(caps != null && caps.get(key) != null) { - CapabilityTypeDef c = caps.get(key); - for(int i=0; i< RELATIONSHIP_TYPE.length; i++) { - String r = RELATIONSHIP_TYPE[i]; - if(r != null) { - relation = r; - break; - } - LinkedHashMap rtypedef = (LinkedHashMap)customDef.get(r); - for(Object o: rtypedef.values()) { - LinkedHashMap properties = (LinkedHashMap)o; - if(properties.get(c.getType()) != null) { - relation = r; - break; - } - } - if(relation != null) { - break; - } - else { - for(Object o: rtypedef.values()) { - LinkedHashMap properties = (LinkedHashMap)o; - if(properties.get(c.getParentType()) != null) { - relation = r; - break; - } - } - } - } - } - return relation; - } - - @SuppressWarnings("unchecked") - public ArrayList getCapabilitiesObjects() { - // Return a list of capability objects - ArrayList typecapabilities = new ArrayList<>(); - LinkedHashMap caps = (LinkedHashMap)getValue(CAPABILITIES, null, true); - if(caps != null) { - // 'cname' is symbolic name of the capability - // 'cvalue' is a dict { 'type': } - for(Map.Entry me: caps.entrySet()) { - String cname = me.getKey(); - LinkedHashMap cvalue = (LinkedHashMap)me.getValue(); - String ctype = cvalue.get("type"); - CapabilityTypeDef cap = new CapabilityTypeDef(cname,ctype,type,customDef); - typecapabilities.add(cap); - } - } - return typecapabilities; - } - - public LinkedHashMap getCapabilities() { - // Return a dictionary of capability name-objects pairs - LinkedHashMap caps = new LinkedHashMap<>(); - for(CapabilityTypeDef ctd: getCapabilitiesObjects()) { - caps.put(ctd.getName(),ctd); - } - return caps; - } - - @SuppressWarnings("unchecked") - public ArrayList getRequirements() { - return (ArrayList)getValue(REQUIREMENTS,null,true); - } - - public ArrayList getAllRequirements() { - return getRequirements(); - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getInterfaces() { - return (LinkedHashMap)getValue(INTERFACES,null,false); - } - - - @SuppressWarnings("unchecked") - public ArrayList getLifecycleInputs() - { - // Return inputs to life cycle operations if found - ArrayList inputs = new ArrayList<>(); - LinkedHashMap interfaces = getInterfaces(); - if(interfaces != null) { - for(Map.Entry me: interfaces.entrySet()) { - String iname = me.getKey(); - LinkedHashMap ivalue = (LinkedHashMap)me.getValue(); - if(iname.equals(InterfacesDef.LIFECYCLE)) { - for(Map.Entry ie: ivalue.entrySet()) { - if(ie.getKey().equals("input")) { - LinkedHashMap y = (LinkedHashMap)ie.getValue(); - for(String i: y.keySet()) { - inputs.add(i); - } - } - } - } - } - } - return inputs; - } - - public ArrayList getLifecycleOperations() { - // Return available life cycle operations if found - ArrayList ops = null; - LinkedHashMap interfaces = getInterfaces(); - if(interfaces != null) { - InterfacesDef i = new InterfacesDef(this,InterfacesDef.LIFECYCLE,null,null,null); - ops = i.getLifecycleOps(); - } - return ops; - } - - public CapabilityTypeDef getCapability(String name) { - //BUG?? the python code has to be wrong - // it refers to a bad attribute 'value'... - LinkedHashMap caps = getCapabilities(); - if(caps != null) { - return caps.get(name); - } - return null; - /* - def get_capability(self, name): - caps = self.get_capabilities() - if caps and name in caps.keys(): - return caps[name].value - */ - } - - public String getCapabilityType(String name) { - //BUG?? the python code has to be wrong - // it refers to a bad attribute 'value'... - CapabilityTypeDef captype = getCapability(name); - if(captype != null) { - return captype.getType(); - } - return null; - /* - def get_capability_type(self, name): - captype = self.get_capability(name) - if captype and name in captype.keys(): - return captype[name].value - */ - } - - private void _validateKeys() { - if(defs != null) { - for(String key: defs.keySet()) { - boolean bFound = false; - for(int i=0; i< SECTIONS.length; i++) { - if(key.equals(SECTIONS[i])) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE124", String.format( - "UnknownFieldError: Nodetype \"%s\" has unknown field \"%s\"",ntype,key))); - } - } - } - } - -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import UnknownFieldError -from toscaparser.elements.capabilitytype import CapabilityTypeDef -import org.openecomp.sdc.toscaparser.api.elements.interfaces as ifaces -from toscaparser.elements.interfaces import InterfacesDef -from toscaparser.elements.relationshiptype import RelationshipType -from toscaparser.elements.statefulentitytype import StatefulEntityType - - -class NodeType(StatefulEntityType): - '''TOSCA built-in node type.''' - SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS) = \ - ('derived_from', 'metadata', 'properties', 'version', - 'description', 'attributes', 'requirements', 'capabilities', - 'interfaces', 'artifacts') - - def __init__(self, ntype, custom_def=None): - super(NodeType, self).__init__(ntype, self.NODE_PREFIX, custom_def) - self.ntype = ntype - self.custom_def = custom_def - self._validate_keys() - - @property - def parent_type(self): - '''Return a node this node is derived from.''' - if not hasattr(self, 'defs'): - return None - pnode = self.derived_from(self.defs) - if pnode: - return NodeType(pnode, self.custom_def) - - @property - def relationship(self): - '''Return a dictionary of relationships to other node types. - - This method returns a dictionary of named relationships that nodes - of the current node type (self) can have to other nodes (of specific - types) in a TOSCA template. - - ''' - relationship = {} - requires = self.get_all_requirements() - if requires: - # NOTE(sdmonov): Check if requires is a dict. - # If it is a dict convert it to a list of dicts. - # This is needed because currently the code below supports only - # lists as requirements definition. The following check will - # make sure if a map (dict) was provided it will be converted to - # a list before proceeding to the parsing. - if isinstance(requires, dict): - requires = [{key: value} for key, value in requires.items()] - - keyword = None - node_type = None - for require in requires: - for key, req in require.items(): - if 'relationship' in req: - relation = req.get('relationship') - if 'type' in relation: - relation = relation.get('type') - node_type = req.get('node') - value = req - if node_type: - keyword = 'node' - else: - # If value is a dict and has a type key - # we need to lookup the node type using - # the capability type - value = req - if isinstance(value, dict): - captype = value['capability'] - value = (self. - _get_node_type_by_cap(key, captype)) - relation = self._get_relation(key, value) - keyword = key - node_type = value - rtype = RelationshipType(relation, keyword, self.custom_def) - relatednode = NodeType(node_type, self.custom_def) - relationship[rtype] = relatednode - return relationship - - def _get_node_type_by_cap(self, key, cap): - '''Find the node type that has the provided capability - - This method will lookup all node types if they have the - provided capability. - ''' - - # Filter the node types - node_types = [node_type for node_type in self.TOSCA_DEF.keys() - if node_type.startswith(self.NODE_PREFIX) and - node_type != 'tosca.nodes.Root'] - - for node_type in node_types: - node_def = self.TOSCA_DEF[node_type] - if isinstance(node_def, dict) and 'capabilities' in node_def: - node_caps = node_def['capabilities'] - for value in node_caps.values(): - if isinstance(value, dict) and \ - 'type' in value and value['type'] == cap: - return node_type - - def _get_relation(self, key, ndtype): - relation = None - ntype = NodeType(ndtype) - caps = ntype.get_capabilities() - if caps and key in caps.keys(): - c = caps[key] - for r in self.RELATIONSHIP_TYPE: - rtypedef = ntype.TOSCA_DEF[r] - for properties in rtypedef.values(): - if c.type in properties: - relation = r - break - if relation: - break - else: - for properties in rtypedef.values(): - if c.parent_type in properties: - relation = r - break - return relation - - def get_capabilities_objects(self): - '''Return a list of capability objects.''' - typecapabilities = [] - caps = self.get_value(self.CAPABILITIES, None, True) - if caps: - # 'name' is symbolic name of the capability - # 'value' is a dict { 'type': } - for name, value in caps.items(): - ctype = value.get('type') - cap = CapabilityTypeDef(name, ctype, self.type, - self.custom_def) - typecapabilities.append(cap) - return typecapabilities - - def get_capabilities(self): - '''Return a dictionary of capability name-objects pairs.''' - return {cap.name: cap - for cap in self.get_capabilities_objects()} - - @property - def requirements(self): - return self.get_value(self.REQUIREMENTS, None, True) - - def get_all_requirements(self): - return self.requirements - - @property - def interfaces(self): - return self.get_value(self.INTERFACES) - - @property - def lifecycle_inputs(self): - '''Return inputs to life cycle operations if found.''' - inputs = [] - interfaces = self.interfaces - if interfaces: - for name, value in interfaces.items(): - if name == ifaces.LIFECYCLE: - for x, y in value.items(): - if x == 'inputs': - for i in y.iterkeys(): - inputs.append(i) - return inputs - - @property - def lifecycle_operations(self): - '''Return available life cycle operations if found.''' - ops = None - interfaces = self.interfaces - if interfaces: - i = InterfacesDef(self.type, ifaces.LIFECYCLE) - ops = i.lifecycle_ops - return ops - - def get_capability(self, name): - caps = self.get_capabilities() - if caps and name in caps.keys(): - return caps[name].value - - def get_capability_type(self, name): - captype = self.get_capability(name) - if captype and name in captype.keys(): - return captype[name].value - - def _validate_keys(self): - if self.defs: - for key in self.defs.keys(): - if key not in self.SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Nodetype"%s"' % self.ntype, - field=key)) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java deleted file mode 100644 index b046a48..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java +++ /dev/null @@ -1,291 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -import org.openecomp.sdc.toscaparser.api.utils.TOSCAVersionProperty; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class PolicyType extends StatefulEntityType { - - private static final String DERIVED_FROM = "derived_from"; - private static final String METADATA = "metadata"; - private static final String PROPERTIES = "properties"; - private static final String VERSION = "version"; - private static final String DESCRIPTION = "description"; - private static final String TARGETS = "targets"; - private static final String TRIGGERS = "triggers"; - private static final String TYPE = "type"; - - private static final String SECTIONS[] = { - DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS, TRIGGERS, TYPE - }; - - private LinkedHashMap customDef; - private String policyDescription; - private Object policyVersion; - private LinkedHashMap properties; - private LinkedHashMap parentPolicies; - private LinkedHashMap metaData; - private ArrayList targetsList; - - - public PolicyType(String _type, LinkedHashMap _customDef) { - super(_type,POLICY_PREFIX,_customDef); - - type = _type; - customDef = _customDef; - _validateKeys(); - - metaData = null; - if(defs != null && defs.get(METADATA) != null) { - metaData = (LinkedHashMap)defs.get(METADATA); - _validateMetadata(metaData); - } - - properties = null; - if(defs != null && defs.get(PROPERTIES) != null) { - properties = (LinkedHashMap)defs.get(PROPERTIES); - } - parentPolicies = _getParentPolicies(); - - policyVersion = null; - if(defs != null && defs.get(VERSION) != null) { - policyVersion = (new TOSCAVersionProperty( - defs.get(VERSION))).getVersion(); - } - - policyDescription = null; - if(defs != null && defs.get(DESCRIPTION) != null) { - policyDescription = (String)defs.get(DESCRIPTION); - } - - targetsList = null; - if(defs != null && defs.get(TARGETS) != null) { - targetsList = (ArrayList)defs.get(TARGETS); - _validateTargets(targetsList,customDef); - } - - } - - private LinkedHashMap _getParentPolicies() { - LinkedHashMap policies = new LinkedHashMap<>(); - String parentPolicy; - if(getParentType() != null) { - parentPolicy = getParentType().getType(); - } - else { - parentPolicy = null; - } - if(parentPolicy != null) { - while(parentPolicy != null && !parentPolicy.equals("tosca.policies.Root")) { - policies.put(parentPolicy, TOSCA_DEF.get(parentPolicy)); - parentPolicy = (String) - ((LinkedHashMap)policies.get(parentPolicy)).get("derived_from);"); - } - } - return policies; - } - - public String getType() { - return type; - } - - public PolicyType getParentType() { - // Return a policy statefulentity of this node is derived from - if(defs == null) { - return null; - } - String ppolicyEntity = derivedFrom(defs); - if(ppolicyEntity != null) { - return new PolicyType(ppolicyEntity,customDef); - } - return null; - } - - public Object getPolicy(String name) { - // Return the definition of a policy field by name - if(defs != null && defs.get(name) != null) { - return defs.get(name); - } - return null; - } - - public ArrayList getTargets() { - // Return targets - return targetsList; - } - - public String getDescription() { - return policyDescription; - } - - public Object getVersion() { - return policyVersion; - } - - private void _validateKeys() { - for(String key: defs.keySet()) { - boolean bFound = false; - for(String sect: SECTIONS) { - if(key.equals(sect)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE125", String.format( - "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"", - type,key))); - } - } - } - - private void _validateTargets(ArrayList _targetsList, - LinkedHashMap _customDef) { - for(String nodetype: _targetsList) { - if(_customDef.get(nodetype) == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE126", String.format( - "InvalidTypeError: \"%s\" defined in targets for policy \"%s\"", - nodetype,type))); - - } - } - } - - private void _validateMetadata(LinkedHashMap _metaData) { - String mtype = (String)_metaData.get("type"); - if(mtype != null && !mtype.equals("map") && !mtype.equals("tosca:map")) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE127", String.format( - "InvalidTypeError: \"%s\" defined in policy for metadata", - mtype))); - } - for(String entrySchema: metaData.keySet()) { - Object estob = metaData.get(entrySchema); - if(estob instanceof LinkedHashMap) { - String est = (String) - ((LinkedHashMap)estob).get("type"); - if(!est.equals("string")) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE128", String.format( - "InvalidTypeError: \"%s\" defined in policy for metadata \"%s\"", - est,entrySchema))); - } - } - } - } - -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidTypeError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.elements.statefulentitytype import StatefulEntityType -from toscaparser.utils.validateutils import TOSCAVersionProperty - - -class PolicyType(StatefulEntityType): - - '''TOSCA built-in policies type.''' - SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS) = \ - ('derived_from', 'metadata', 'properties', 'version', - 'description', 'targets') - - def __init__(self, ptype, custom_def=None): - super(PolicyType, self).__init__(ptype, self.POLICY_PREFIX, - custom_def) - self.type = ptype - self.custom_def = custom_def - self._validate_keys() - - self.meta_data = None - if self.METADATA in self.defs: - self.meta_data = self.defs[self.METADATA] - self._validate_metadata(self.meta_data) - - self.properties = None - if self.PROPERTIES in self.defs: - self.properties = self.defs[self.PROPERTIES] - self.parent_policies = self._get_parent_policies() - - self.policy_version = None - if self.VERSION in self.defs: - self.policy_version = TOSCAVersionProperty( - self.defs[self.VERSION]).get_version() - - self.policy_description = self.defs[self.DESCRIPTION] \ - if self.DESCRIPTION in self.defs else None - - self.targets_list = None - if self.TARGETS in self.defs: - self.targets_list = self.defs[self.TARGETS] - self._validate_targets(self.targets_list, custom_def) - - def _get_parent_policies(self): - policies = {} - parent_policy = self.parent_type.type if self.parent_type else None - if parent_policy: - while parent_policy != 'tosca.policies.Root': - policies[parent_policy] = self.TOSCA_DEF[parent_policy] - parent_policy = policies[parent_policy]['derived_from'] - return policies - - @property - def parent_type(self): - '''Return a policy statefulentity of this node is derived from.''' - if not hasattr(self, 'defs'): - return None - ppolicy_entity = self.derived_from(self.defs) - if ppolicy_entity: - return PolicyType(ppolicy_entity, self.custom_def) - - def get_policy(self, name): - '''Return the definition of a policy field by name.''' - if name in self.defs: - return self.defs[name] - - @property - def targets(self): - '''Return targets.''' - return self.targets_list - - @property - def description(self): - return self.policy_description - - @property - def version(self): - return self.policy_version - - def _validate_keys(self): - for key in self.defs.keys(): - if key not in self.SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Policy "%s"' % self.type, - field=key)) - - def _validate_targets(self, targets_list, custom_def): - for nodetype in targets_list: - if nodetype not in custom_def: - ValidationIssueCollector.appendException( - InvalidTypeError(what='"%s" defined in targets for ' - 'policy "%s"' % (nodetype, self.type))) - - def _validate_metadata(self, meta_data): - if not meta_data.get('type') in ['map', 'tosca:map']: - ValidationIssueCollector.appendException( - InvalidTypeError(what='"%s" defined in policy for ' - 'metadata' % (meta_data.get('type')))) - - for entry_schema, entry_schema_type in meta_data.items(): - if isinstance(entry_schema_type, dict) and not \ - entry_schema_type.get('type') == 'string': - ValidationIssueCollector.appendException( - InvalidTypeError(what='"%s" defined in policy for ' - 'metadata "%s"' - % (entry_schema_type.get('type'), - entry_schema))) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java deleted file mode 100644 index 1a6745c..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java +++ /dev/null @@ -1,161 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.LinkedHashMap; - -import org.openecomp.sdc.toscaparser.api.DataEntity; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; - -public class PortSpec { - // Parent class for tosca.datatypes.network.PortSpec type - - private static final String SHORTNAME = "PortSpec"; - private static final String TYPE_URI = "tosca.datatypes.network." + SHORTNAME; - - private static final String PROTOCOL = "protocol"; - private static final String SOURCE = "source"; - private static final String SOURCE_RANGE = "source_range"; - private static final String TARGET = "target"; - private static final String TARGET_RANGE = "target_range"; - - private static final String PROPERTY_NAMES[] = { - PROTOCOL, SOURCE, SOURCE_RANGE, - TARGET, TARGET_RANGE - }; - - // todo(TBD) May want to make this a subclass of DataType - // and change init method to set PortSpec's properties - public PortSpec() { - - } - - // The following additional requirements MUST be tested: - // 1) A valid PortSpec MUST have at least one of the following properties: - // target, target_range, source or source_range. - // 2) A valid PortSpec MUST have a value for the source property that - // is within the numeric range specified by the property source_range - // when source_range is specified. - // 3) A valid PortSpec MUST have a value for the target property that is - // within the numeric range specified by the property target_range - // when target_range is specified. - public static void validateAdditionalReq(Object _properties, - String propName, - LinkedHashMap custom_def) { - - try { - LinkedHashMap properties = (LinkedHashMap)_properties; - Object source = properties.get(PortSpec.SOURCE); - Object sourceRange = properties.get(PortSpec.SOURCE_RANGE); - Object target = properties.get(PortSpec.TARGET); - Object targetRange = properties.get(PortSpec.TARGET_RANGE); - - // verify one of the specified values is set - if(source == null && sourceRange == null && - target == null && targetRange == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE129", String.format( - "InvalidTypeAdditionalRequirementsError: Additional requirements for type \"%s\" not met", - TYPE_URI))); - } - // Validate source value is in specified range - if(source != null && sourceRange != null) { - ValidateUtils.validateValueInRange(source,sourceRange,SOURCE); - } - else { - DataEntity portdef = new DataEntity("PortDef", source, null, SOURCE); - portdef.validate(); - } - // Validate target value is in specified range - if(target != null && targetRange != null) { - ValidateUtils.validateValueInRange(target,targetRange,SOURCE); - } - else { - DataEntity portdef = new DataEntity("PortDef", source, null, TARGET); - portdef.validate(); - } - } - catch(Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE130", String.format( - "ValueError: \"%s\" do not meet requirements for type \"%s\"", - _properties.toString(),SHORTNAME))); - } - } - -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidTypeAdditionalRequirementsError -from toscaparser.utils.gettextutils import _ -import org.openecomp.sdc.toscaparser.api.utils.validateutils as validateutils - -log = logging.getLogger('tosca') - - -class PortSpec(object): - '''Parent class for tosca.datatypes.network.PortSpec type.''' - - SHORTNAME = 'PortSpec' - TYPE_URI = 'tosca.datatypes.network.' + SHORTNAME - - PROPERTY_NAMES = ( - PROTOCOL, SOURCE, SOURCE_RANGE, - TARGET, TARGET_RANGE - ) = ( - 'protocol', 'source', 'source_range', - 'target', 'target_range' - ) - - # TODO(TBD) May want to make this a subclass of DataType - # and change init method to set PortSpec's properties - def __init__(self): - pass - - # The following additional requirements MUST be tested: - # 1) A valid PortSpec MUST have at least one of the following properties: - # target, target_range, source or source_range. - # 2) A valid PortSpec MUST have a value for the source property that - # is within the numeric range specified by the property source_range - # when source_range is specified. - # 3) A valid PortSpec MUST have a value for the target property that is - # within the numeric range specified by the property target_range - # when target_range is specified. - @staticmethod - def validate_additional_req(properties, prop_name, custom_def=None, ): - try: - source = properties.get(PortSpec.SOURCE) - source_range = properties.get(PortSpec.SOURCE_RANGE) - target = properties.get(PortSpec.TARGET) - target_range = properties.get(PortSpec.TARGET_RANGE) - - # verify one of the specified values is set - if source is None and source_range is None and \ - target is None and target_range is None: - ValidationIssueCollector.appendException( - InvalidTypeAdditionalRequirementsError( - type=PortSpec.TYPE_URI)) - # Validate source value is in specified range - if source and source_range: - validateutils.validate_value_in_range(source, source_range, - PortSpec.SOURCE) - else: - from toscaparser.dataentity import DataEntity - portdef = DataEntity('PortDef', source, None, PortSpec.SOURCE) - portdef.validate() - # Validate target value is in specified range - if target and target_range: - validateutils.validate_value_in_range(target, target_range, - PortSpec.TARGET) - else: - from toscaparser.dataentity import DataEntity - portdef = DataEntity('PortDef', source, None, PortSpec.TARGET) - portdef.validate() - except Exception: - msg = _('"%(value)s" do not meet requirements ' - 'for type "%(type)s".') \ - % {'value': properties, 'type': PortSpec.SHORTNAME} - ValidationIssueCollector.appendException( - ValueError(msg)) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java deleted file mode 100644 index bdd376b..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java +++ /dev/null @@ -1,231 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import java.util.LinkedHashMap; -import java.util.Map; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class PropertyDef { - - private static final String PROPERTY_KEYNAME_DEFAULT = "default"; - private static final String PROPERTY_KEYNAME_REQUIRED = "required"; - private static final String PROPERTY_KEYNAME_STATUS = "status"; - private static final String VALID_PROPERTY_KEYNAMES[] = { - PROPERTY_KEYNAME_DEFAULT, - PROPERTY_KEYNAME_REQUIRED, - PROPERTY_KEYNAME_STATUS}; - - private static final boolean PROPERTY_REQUIRED_DEFAULT = true; - - private static final String VALID_REQUIRED_VALUES[] = {"true", "false"}; - - private static final String PROPERTY_STATUS_SUPPORTED = "supported"; - private static final String PROPERTY_STATUS_EXPERIMENTAL = "experimental"; - private static final String VALID_STATUS_VALUES[] = { - PROPERTY_STATUS_SUPPORTED, PROPERTY_STATUS_EXPERIMENTAL}; - - private static final String PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED; - - private String name; - private Object value; - private LinkedHashMap schema; - private String _status; - private boolean _required; - - public PropertyDef(String pdName, Object pdValue, - LinkedHashMap pdSchema) { - name = pdName; - value = pdValue; - schema = pdSchema; - _status = PROPERTY_STATUS_DEFAULT; - _required = PROPERTY_REQUIRED_DEFAULT; - - if(schema != null) { - // Validate required 'type' property exists - if(schema.get("type") == null) { - //msg = (_('Schema definition of "%(pname)s" must have a "type" ' - // 'attribute.') % dict(pname=self.name)) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE131", String.format( - "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name))); - } - _loadRequiredAttrFromSchema(); - _loadStatusAttrFromSchema(); - } - } - - public Object getDefault() { - if(schema != null) { - for(Map.Entry me: schema.entrySet()) { - if(me.getKey().equals(PROPERTY_KEYNAME_DEFAULT)) { - return me.getValue(); - } - } - } - return null; - } - - public boolean isRequired() { - return _required; - } - - private void _loadRequiredAttrFromSchema() { - // IF 'required' keyname exists verify it's a boolean, - // if so override default - Object val = schema.get(PROPERTY_KEYNAME_REQUIRED); - if(val != null) { - if(val instanceof Boolean) { - _required = (boolean)val; - } - else { - //valid_values = ', '.join(self.VALID_REQUIRED_VALUES) - //attr = self.PROPERTY_KEYNAME_REQUIRED - //TOSCAException.generate_inv_schema_property_error(self, - // attr, - // value, - // valid_values) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE132", String.format( - "Schema definition of \"%s\" has \"required\" attribute with an invalid value", - name))); - } - } - } - - public String getStatus() { - return _status; - } - - private void _loadStatusAttrFromSchema() { - // IF 'status' keyname exists verify it's a boolean, - // if so override default - String sts = (String)schema.get(PROPERTY_KEYNAME_STATUS); - if(sts != null) { - boolean bFound = false; - for(String vsv: VALID_STATUS_VALUES) { - if(vsv.equals(sts)) { - bFound = true; - break; - } - } - if(bFound) { - _status = sts; - } - else { - //valid_values = ', '.join(self.VALID_STATUS_VALUES) - //attr = self.PROPERTY_KEYNAME_STATUS - //TOSCAException.generate_inv_schema_property_error(self, - // attr, - // value, - // valid_values) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE006", String.format( - "Schema definition of \"%s\" has \"status\" attribute with an invalid value", - name))); - } - } - } - - public String getName() { - return name; - } - - public LinkedHashMap getSchema() { - return schema; - } - - public Object getPDValue() { - // there's getValue in EntityType... - return value; - } - -} -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidSchemaError -from toscaparser.common.exception import TOSCAException -from toscaparser.utils.gettextutils import _ - - -class PropertyDef(object): - '''TOSCA built-in Property type.''' - - VALID_PROPERTY_KEYNAMES = (PROPERTY_KEYNAME_DEFAULT, - PROPERTY_KEYNAME_REQUIRED, - PROPERTY_KEYNAME_STATUS) = \ - ('default', 'required', 'status') - - PROPERTY_REQUIRED_DEFAULT = True - - VALID_REQUIRED_VALUES = ['true', 'false'] - VALID_STATUS_VALUES = (PROPERTY_STATUS_SUPPORTED, - PROPERTY_STATUS_EXPERIMENTAL) = \ - ('supported', 'experimental') - - PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED - - def __init__(self, name, value=None, schema=None): - self.name = name - self.value = value - self.schema = schema - self._status = self.PROPERTY_STATUS_DEFAULT - self._required = self.PROPERTY_REQUIRED_DEFAULT - - # Validate required 'type' property exists - try: - self.schema['type'] - except KeyError: - msg = (_('Schema definition of "%(pname)s" must have a "type" ' - 'attribute.') % dict(pname=self.name)) - ValidationIssueCollector.appendException( - InvalidSchemaError(message=msg)) - - if self.schema: - self._load_required_attr_from_schema() - self._load_status_attr_from_schema() - - @property - def default(self): - if self.schema: - for prop_key, prop_value in self.schema.items(): - if prop_key == self.PROPERTY_KEYNAME_DEFAULT: - return prop_value - return None - - @property - def required(self): - return self._required - - def _load_required_attr_from_schema(self): - # IF 'required' keyname exists verify it's a boolean, - # if so override default - if self.PROPERTY_KEYNAME_REQUIRED in self.schema: - value = self.schema[self.PROPERTY_KEYNAME_REQUIRED] - if isinstance(value, bool): - self._required = value - else: - valid_values = ', '.join(self.VALID_REQUIRED_VALUES) - attr = self.PROPERTY_KEYNAME_REQUIRED - TOSCAException.generate_inv_schema_property_error(self, - attr, - value, - valid_values) - - @property - def status(self): - return self._status - - def _load_status_attr_from_schema(self): - # IF 'status' keyname exists verify it's a valid value, - # if so override default - if self.PROPERTY_KEYNAME_STATUS in self.schema: - value = self.schema[self.PROPERTY_KEYNAME_STATUS] - if value in self.VALID_STATUS_VALUES: - self._status = value - else: - valid_values = ', '.join(self.VALID_STATUS_VALUES) - attr = self.PROPERTY_KEYNAME_STATUS - TOSCAException.generate_inv_schema_property_error(self, - attr, - value, - valid_values) -*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java deleted file mode 100644 index 3a56840..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java +++ /dev/null @@ -1,102 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.LinkedHashMap; - -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class RelationshipType extends StatefulEntityType { - - private static final String DERIVED_FROM = "derived_from"; - private static final String VALID_TARGET_TYPES = "valid_target_types"; - private static final String INTERFACES = "interfaces"; - private static final String ATTRIBUTES = "attributes"; - private static final String PROPERTIES = "properties"; - private static final String DESCRIPTION = "description"; - private static final String VERSION = "version"; - private static final String CREDENTIAL = "credential"; - - private static final String SECTIONS[] = { - DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES, - ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, CREDENTIAL}; - - private String capabilityName; - private LinkedHashMap customDef; - - public RelationshipType(String _type, String _capabilityName, LinkedHashMap _customDef) { - super(_type,RELATIONSHIP_PREFIX,_customDef); - capabilityName = _capabilityName; - customDef = _customDef; - } - - public RelationshipType getParentType() { - // Return a relationship this reletionship is derived from.''' - String prel = derivedFrom(defs); - if(prel != null) { - return new RelationshipType(prel,null,customDef); - } - return null; - } - - public Object getValidTargetTypes() { - return entityValue(defs,"valid_target_types"); - } - - private void _validateKeys() { - for(String key: defs.keySet()) { - boolean bFound = false; - for(int i=0; i< SECTIONS.length; i++) { - if(key.equals(SECTIONS[i])) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE133", String.format( - "UnknownFieldError: Relationshiptype \"%s\" has unknown field \"%s\"",type,key))); - } - } - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import UnknownFieldError -from toscaparser.elements.statefulentitytype import StatefulEntityType - - -class RelationshipType(StatefulEntityType): - '''TOSCA built-in relationship type.''' - SECTIONS = (DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES, - ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, - CREDENTIAL) = ('derived_from', 'valid_target_types', - 'interfaces', 'attributes', 'properties', - 'description', 'version', 'credential') - - def __init__(self, type, capability_name=None, custom_def=None): - super(RelationshipType, self).__init__(type, self.RELATIONSHIP_PREFIX, - custom_def) - self.capability_name = capability_name - self.custom_def = custom_def - self._validate_keys() - - @property - def parent_type(self): - '''Return a relationship this reletionship is derived from.''' - prel = self.derived_from(self.defs) - if prel: - return RelationshipType(prel, self.custom_def) - - @property - def valid_target_types(self): - return self.entity_value(self.defs, 'valid_target_types') - - def _validate_keys(self): - for key in self.defs.keys(): - if key not in self.SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Relationshiptype "%s"' % self.type, - field=key)) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java deleted file mode 100644 index 77cd4c2..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java +++ /dev/null @@ -1,262 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import java.util.HashMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public abstract class ScalarUnit { - - private static Logger log = LoggerFactory.getLogger(ScalarUnit.class.getName()); - - private static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; - private static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; - private static final String SCALAR_UNIT_TIME = "scalar-unit.time"; - - public static final String SCALAR_UNIT_TYPES[] = { - SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME - }; - - private Object value; - protected HashMap SCALAR_UNIT_DICT; - protected String SCALAR_UNIT_DEFAULT; - - public ScalarUnit(Object _value) { - value = _value; - SCALAR_UNIT_DICT = new HashMap<>(); - SCALAR_UNIT_DEFAULT = ""; - } - - - private String _checkUnitInScalarStandardUnits(String inputUnit) { - // Check whether the input unit is following specified standard - - // If unit is not following specified standard, convert it to standard - // unit after displaying a warning message. - - if(SCALAR_UNIT_DICT.get(inputUnit) != null) { - return inputUnit; - } - else { - for(String key: SCALAR_UNIT_DICT.keySet()) { - if(key.toUpperCase().equals(inputUnit.toUpperCase())) { - log.debug("ScalarUnit - _checkUnitInScalarStandardUnits - \n" + - "The unit {} does not follow scalar unit standards\n" + - "using {} instead", - inputUnit, key); - return key; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE007", String.format( - "'The unit \"%s\" is not valid. Valid units are \n%s", - inputUnit,SCALAR_UNIT_DICT.keySet().toString()))); - return inputUnit; - } - } - - public Object validateScalarUnit() { - Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); - Matcher matcher = pattern.matcher(value.toString()); - if(matcher.find()) { - ValidateUtils.strToNum(matcher.group(1)); - String scalarUnit = _checkUnitInScalarStandardUnits(matcher.group(2)); - value = matcher.group(1) + " " + scalarUnit; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE134", String.format( - "ValueError: \"%s\" is not a valid scalar-unit",value.toString()))); - } - return value; - } - - public double getNumFromScalarUnit(String unit) { - if(unit != null) { - unit = _checkUnitInScalarStandardUnits(unit); - } - else { - unit = SCALAR_UNIT_DEFAULT; - } - Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); - Matcher matcher = pattern.matcher(value.toString()); - if(matcher.find()) { - ValidateUtils.strToNum(matcher.group(1)); - String scalarUnit = _checkUnitInScalarStandardUnits(matcher.group(2)); - value = matcher.group(1) + " " + scalarUnit; - Object on1 = ValidateUtils.strToNum(matcher.group(1)) != null ? ValidateUtils.strToNum(matcher.group(1)) : 0; - Object on2 = SCALAR_UNIT_DICT.get(matcher.group(2)) != null ? SCALAR_UNIT_DICT.get(matcher.group(2)) : 0; - Object on3 = SCALAR_UNIT_DICT.get(unit) != null ? SCALAR_UNIT_DICT.get(unit) : 0; - - Double n1 = new Double(on1.toString()); - Double n2 = new Double(on2.toString()); - Double n3 = new Double(on3.toString()); - double converted = n1 * n2 / n3; - if(Math.abs(converted - Math.round(converted)) < 0.0000000000001 ) { - converted = Math.round(converted); - } - return converted; - } - return 0l; //??? - } - - protected static HashMap scalarunitMapping = _getScalarunitMappings(); - - private static HashMap _getScalarunitMappings() { - HashMap map = new HashMap<>(); - map.put(SCALAR_UNIT_FREQUENCY,"ScalarUnitFrequency"); - map.put(SCALAR_UNIT_SIZE, "ScalarUnitSize"); - map.put(SCALAR_UNIT_TIME, "ScalarUnit_Time"); - return map; - } - - public static ScalarUnit getScalarunitClass(String type,Object val) { - if(type.equals(SCALAR_UNIT_SIZE)) { - return new ScalarUnitSize(val); - } - else if(type.equals(SCALAR_UNIT_TIME)) { - return new ScalarUnitTime(val); - } - else if(type.equals(SCALAR_UNIT_FREQUENCY)) { - return new ScalarUnitFrequency(val); - } - return null; - } - - public static double getScalarunitValue(String type, Object value, String unit) { - if(type.equals(SCALAR_UNIT_SIZE)) { - return (new ScalarUnitSize(value)).getNumFromScalarUnit(unit); - } - if(type.equals(SCALAR_UNIT_TIME)) { - return (new ScalarUnitTime(value)).getNumFromScalarUnit(unit); - } - if(type.equals(SCALAR_UNIT_FREQUENCY)) { - return (new ScalarUnitFrequency(value)).getNumFromScalarUnit(unit); - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE135", String.format( - "TypeError: \"%s\" is not a valid scalar-unit type",type))); - return 0.0; - } - -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.utils.gettextutils import _ -from toscaparser.utils import validateutils - -log = logging.getLogger('tosca') - - -class ScalarUnit(object): - '''Parent class for scalar-unit type.''' - - SCALAR_UNIT_TYPES = ( - SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME - ) = ( - 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time' - ) - - def __init__(self, value): - self.value = value - - def _check_unit_in_scalar_standard_units(self, input_unit): - """Check whether the input unit is following specified standard - - If unit is not following specified standard, convert it to standard - unit after displaying a warning message. - """ - if input_unit in self.SCALAR_UNIT_DICT.keys(): - return input_unit - else: - for key in self.SCALAR_UNIT_DICT.keys(): - if key.upper() == input_unit.upper(): - log.warning(_('The unit "%(unit)s" does not follow ' - 'scalar unit standards; using "%(key)s" ' - 'instead.') % {'unit': input_unit, - 'key': key}) - return key - msg = (_('The unit "%(unit)s" is not valid. Valid units are ' - '"%(valid_units)s".') % - {'unit': input_unit, - 'valid_units': sorted(self.SCALAR_UNIT_DICT.keys())}) - ValidationIssueCollector.appendException(ValueError(msg)) - - def validate_scalar_unit(self): - regex = re.compile('([0-9.]+)\s*(\w+)') - try: - result = regex.match(str(self.value)).groups() - validateutils.str_to_num(result[0]) - scalar_unit = self._check_unit_in_scalar_standard_units(result[1]) - self.value = ' '.join([result[0], scalar_unit]) - return self.value - - except Exception: - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a valid scalar-unit.') - % self.value)) - - def get_num_from_scalar_unit(self, unit=None): - if unit: - unit = self._check_unit_in_scalar_standard_units(unit) - else: - unit = self.SCALAR_UNIT_DEFAULT - self.validate_scalar_unit() - - regex = re.compile('([0-9.]+)\s*(\w+)') - result = regex.match(str(self.value)).groups() - converted = (float(validateutils.str_to_num(result[0])) - * self.SCALAR_UNIT_DICT[result[1]] - / self.SCALAR_UNIT_DICT[unit]) - if converted - int(converted) < 0.0000000000001: - converted = int(converted) - return converted - - -class ScalarUnit_Size(ScalarUnit): - - SCALAR_UNIT_DEFAULT = 'B' - SCALAR_UNIT_DICT = {'B': 1, 'kB': 1000, 'KiB': 1024, 'MB': 1000000, - 'MiB': 1048576, 'GB': 1000000000, - 'GiB': 1073741824, 'TB': 1000000000000, - 'TiB': 1099511627776} - - -class ScalarUnit_Time(ScalarUnit): - - SCALAR_UNIT_DEFAULT = 'ms' - SCALAR_UNIT_DICT = {'d': 86400, 'h': 3600, 'm': 60, 's': 1, - 'ms': 0.001, 'us': 0.000001, 'ns': 0.000000001} - - -class ScalarUnit_Frequency(ScalarUnit): - - SCALAR_UNIT_DEFAULT = 'GHz' - SCALAR_UNIT_DICT = {'Hz': 1, 'kHz': 1000, - 'MHz': 1000000, 'GHz': 1000000000} - - -scalarunit_mapping = { - ScalarUnit.SCALAR_UNIT_FREQUENCY: ScalarUnit_Frequency, - ScalarUnit.SCALAR_UNIT_SIZE: ScalarUnit_Size, - ScalarUnit.SCALAR_UNIT_TIME: ScalarUnit_Time, - } - - -def get_scalarunit_class(type): - return scalarunit_mapping.get(type) - - -def get_scalarunit_value(type, value, unit=None): - if type in ScalarUnit.SCALAR_UNIT_TYPES: - ScalarUnit_Class = get_scalarunit_class(type) - return (ScalarUnit_Class(value). - get_num_from_scalar_unit(unit)) - else: - ValidationIssueCollector.appendException( - TypeError(_('"%s" is not a valid scalar-unit type.') % type)) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitFrequency.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitFrequency.java deleted file mode 100644 index 57a111e..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitFrequency.java +++ /dev/null @@ -1,14 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -public class ScalarUnitFrequency extends ScalarUnit { - - public ScalarUnitFrequency(Object value) { - super(value); - SCALAR_UNIT_DEFAULT = "GHz"; - SCALAR_UNIT_DICT.put("Hz",1L); - SCALAR_UNIT_DICT.put("kHz",1000L); - SCALAR_UNIT_DICT.put("MHz",1000000L); - SCALAR_UNIT_DICT.put("GHz",1000000000L); - } - -} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitSize.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitSize.java deleted file mode 100644 index 72e7c33..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitSize.java +++ /dev/null @@ -1,19 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -public class ScalarUnitSize extends ScalarUnit { - - public ScalarUnitSize(Object value) { - super(value); - - SCALAR_UNIT_DEFAULT = "B"; - SCALAR_UNIT_DICT.put("B",1L); - SCALAR_UNIT_DICT.put("kB",1000L); - SCALAR_UNIT_DICT.put("kiB",1024L); - SCALAR_UNIT_DICT.put("MB",1000000L); - SCALAR_UNIT_DICT.put("MiB",1048576L); - SCALAR_UNIT_DICT.put("GB",1000000000L); - SCALAR_UNIT_DICT.put("GiB",1073741824L); - SCALAR_UNIT_DICT.put("TB",1000000000000L); - SCALAR_UNIT_DICT.put("TiB",1099511627776L); - } -} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitTime.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitTime.java deleted file mode 100644 index 5cde10a..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitTime.java +++ /dev/null @@ -1,17 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -public class ScalarUnitTime extends ScalarUnit { - - public ScalarUnitTime(Object value) { - super(value); - SCALAR_UNIT_DEFAULT = "ms"; - SCALAR_UNIT_DICT.put("d",86400L); - SCALAR_UNIT_DICT.put("h",3600L); - SCALAR_UNIT_DICT.put("m",60L); - SCALAR_UNIT_DICT.put("s",1L); - SCALAR_UNIT_DICT.put("ms",0.001); - SCALAR_UNIT_DICT.put("us",0.000001); - SCALAR_UNIT_DICT.put("ns",0.000000001); - } - -} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java deleted file mode 100644 index 0a83cb1..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java +++ /dev/null @@ -1,218 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.openecomp.sdc.toscaparser.api.UnsupportedType; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - - -public class StatefulEntityType extends EntityType { - // Class representing TOSCA states - - public static final String interfacesNodeLifecycleOperations[] = { - "create", "configure", "start", "stop", "delete"}; - - public static final String interfacesRelationshipConfigureOperations[] = { - "post_configure_source", "post_configure_target", "add_target", "remove_target"}; - - public StatefulEntityType() { - // void constructor for subclasses that don't want super - } - - @SuppressWarnings("unchecked") - public StatefulEntityType(String entityType, String prefix, LinkedHashMap customDef) { - - String entireEntityType = entityType; - if(UnsupportedType.validateType(entireEntityType)) { - defs = null; - } - else { - if(entityType.startsWith(TOSCA + ":")) { - entityType = entityType.substring(TOSCA.length()+1); - entireEntityType = prefix + entityType; - } - if(!entityType.startsWith(TOSCA)) { - entireEntityType = prefix + entityType; - } - if(TOSCA_DEF.get(entireEntityType) != null) { - defs = (LinkedHashMap )TOSCA_DEF.get(entireEntityType); - entityType = entireEntityType; - } - else if(customDef != null && customDef.get(entityType) != null) { - defs = (LinkedHashMap )customDef.get(entityType); - } - else{ - defs = null; - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE136", String.format( - "InvalidTypeError: \"%s\" is not a valid type",entityType))); - } - } - type = entityType; - } - - @SuppressWarnings("unchecked") - public ArrayList getPropertiesDefObjects() { - // Return a list of property definition objects - ArrayList properties = new ArrayList(); - LinkedHashMap props = (LinkedHashMap)getDefinition(PROPERTIES); - if(props != null) { - for(Map.Entry me: props.entrySet()) { - String pdname = me.getKey(); - Object to = me.getValue(); - if(to == null || !(to instanceof LinkedHashMap)) { - String s = to == null ? "null" : to.getClass().getSimpleName(); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE137", String.format( - "Unexpected type error: property \"%s\" has type \"%s\" (expected dict)",pdname,s))); - continue; - } - LinkedHashMap pdschema = (LinkedHashMap)to; - properties.add(new PropertyDef(pdname,null,pdschema)); - } - } - return properties; - } - - public LinkedHashMap getPropertiesDef() { - LinkedHashMap pds = new LinkedHashMap(); - for(PropertyDef pd: getPropertiesDefObjects()) { - pds.put(pd.getName(),pd); - } - return pds; - } - - public PropertyDef getPropertyDefValue(String name) { - // Return the property definition associated with a given name - PropertyDef pd = null; - LinkedHashMap propsDef = getPropertiesDef(); - if(propsDef != null) { - pd = propsDef.get(name); - } - return pd; - } - - public ArrayList getAttributesDefObjects() { - // Return a list of attribute definition objects - @SuppressWarnings("unchecked") - LinkedHashMap attrs = (LinkedHashMap)getValue(ATTRIBUTES,null,true); - ArrayList ads = new ArrayList<>(); - if(attrs != null) { - for(Map.Entry me: attrs.entrySet()) { - String attr = me.getKey(); - @SuppressWarnings("unchecked") - LinkedHashMap adschema = (LinkedHashMap)me.getValue(); - ads.add(new AttributeDef(attr,null,adschema)); - } - } - return ads; - } - - public LinkedHashMap getAttributesDef() { - // Return a dictionary of attribute definition name-object pairs - - LinkedHashMap ads = new LinkedHashMap<>(); - for(AttributeDef ado: getAttributesDefObjects()) { - ads.put(((AttributeDef)ado).getName(),ado); - } - return ads; - } - - public AttributeDef getAttributeDefValue(String name) { - // Return the attribute definition associated with a given name - AttributeDef ad = null; - LinkedHashMap attrsDef = getAttributesDef(); - if(attrsDef != null) { - ad = attrsDef.get(name); - } - return ad; - } - - public String getType() { - return type; - } - } - -/*python - -from toscaparser.common.exception import InvalidTypeError -from toscaparser.elements.attribute_definition import AttributeDef -from toscaparser.elements.entity_type import EntityType -from toscaparser.elements.property_definition import PropertyDef -from toscaparser.unsupportedtype import UnsupportedType - - -class StatefulEntityType(EntityType): - '''Class representing TOSCA states.''' - - interfaces_node_lifecycle_operations = ['create', - 'configure', 'start', - 'stop', 'delete'] - - interfaces_relationship_configure_operations = ['post_configure_source', - 'post_configure_target', - 'add_target', - 'remove_target'] - - def __init__(self, entitytype, prefix, custom_def=None): - entire_entitytype = entitytype - if UnsupportedType.validate_type(entire_entitytype): - self.defs = None - else: - if entitytype.startswith(self.TOSCA + ":"): - entitytype = entitytype[(len(self.TOSCA) + 1):] - entire_entitytype = prefix + entitytype - if not entitytype.startswith(self.TOSCA): - entire_entitytype = prefix + entitytype - if entire_entitytype in list(self.TOSCA_DEF.keys()): - self.defs = self.TOSCA_DEF[entire_entitytype] - entitytype = entire_entitytype - elif custom_def and entitytype in list(custom_def.keys()): - self.defs = custom_def[entitytype] - else: - self.defs = None - ValidationIssueCollector.appendException( - InvalidTypeError(what=entitytype)) - self.type = entitytype - - def get_properties_def_objects(self): - '''Return a list of property definition objects.''' - properties = [] - props = self.get_definition(self.PROPERTIES) - if props: - for prop, schema in props.items(): - properties.append(PropertyDef(prop, None, schema)) - return properties - - def get_properties_def(self): - '''Return a dictionary of property definition name-object pairs.''' - return {prop.name: prop - for prop in self.get_properties_def_objects()} - - def get_property_def_value(self, name): - '''Return the property definition associated with a given name.''' - props_def = self.get_properties_def() - if props_def and name in props_def.keys(): - return props_def[name].value - - def get_attributes_def_objects(self): - '''Return a list of attribute definition objects.''' - attrs = self.get_value(self.ATTRIBUTES, parent=True) - if attrs: - return [AttributeDef(attr, None, schema) - for attr, schema in attrs.items()] - return [] - - def get_attributes_def(self): - '''Return a dictionary of attribute definition name-object pairs.''' - return {attr.name: attr - for attr in self.get_attributes_def_objects()} - - def get_attribute_def_value(self, name): - '''Return the attribute definition associated with a given name.''' - attrs_def = self.get_attributes_def() - if attrs_def and name in attrs_def.keys(): - return attrs_def[name].value -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java deleted file mode 100644 index 7bfe333..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java +++ /dev/null @@ -1,153 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -import org.openecomp.sdc.toscaparser.api.extensions.ExtTools; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class TypeValidation { - - private static final String DEFINITION_VERSION = "tosca_definitions_version"; - private static final String DESCRIPTION = "description"; - private static final String IMPORTS = "imports"; - private static final String DSL_DEFINITIONS = "dsl_definitions"; - private static final String NODE_TYPES = "node_types"; - private static final String REPOSITORIES = "repositories"; - private static final String DATA_TYPES = "data_types"; - private static final String ARTIFACT_TYPES = "artifact_types"; - private static final String GROUP_TYPES = "group_types"; - private static final String RELATIONSHIP_TYPES = "relationship_types"; - private static final String CAPABILITY_TYPES = "capability_types"; - private static final String INTERFACE_TYPES = "interface_types"; - private static final String POLICY_TYPES = "policy_types"; - private static final String TOPOLOGY_TEMPLATE = "topology_template"; - //Pavel - private static final String METADATA = "metadata"; - - private String ALLOWED_TYPE_SECTIONS[] = { - DEFINITION_VERSION, DESCRIPTION, IMPORTS, - DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES, - DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES, - RELATIONSHIP_TYPES, CAPABILITY_TYPES, - INTERFACE_TYPES, POLICY_TYPES, - TOPOLOGY_TEMPLATE, METADATA - }; - - private static ArrayList VALID_TEMPLATE_VERSIONS = _getVTV(); - - private static ArrayList _getVTV() { - ArrayList vtv = new ArrayList<>(); - vtv.add("tosca_simple_yaml_1_0"); - vtv.add("tosca_simple_yaml_1_1"); - ExtTools exttools = new ExtTools(); - vtv.addAll(exttools.getVersions()); - return vtv; - } - - //private LinkedHashMap customTypes; - private Object importDef; - //private String version; - - public TypeValidation(LinkedHashMap _customTypes, - Object _importDef) { - importDef = _importDef; - _validateTypeKeys(_customTypes); - } - - private void _validateTypeKeys(LinkedHashMap customTypes) { - - String sVersion = (String)customTypes.get(DEFINITION_VERSION); - if(sVersion != null) { - _validateTypeVersion(sVersion); - //version = sVersion; - } - for(String name: customTypes.keySet()) { - boolean bFound = false; - for(String ats: ALLOWED_TYPE_SECTIONS) { - if(name.equals(ats)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE138", String.format( - "UnknownFieldError: Template \"%s\" contains unknown field \"%s\"", - importDef.toString(),name))); - } - } - } - - private void _validateTypeVersion(String sVersion) { - boolean bFound = false; - String allowed = ""; - for(String atv: VALID_TEMPLATE_VERSIONS) { - allowed += "\"" + atv + "\" "; - if(sVersion.equals(atv)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE139", String.format( - "InvalidTemplateVersion: version \"%s\" in \"%s\" is not supported\n" + - "Allowed versions: [%s]", - sVersion,importDef.toString(),allowed))); - } - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidTemplateVersion -from toscaparser.common.exception import UnknownFieldError -from toscaparser.extensions.exttools import ExtTools - - -class TypeValidation(object): - - ALLOWED_TYPE_SECTIONS = (DEFINITION_VERSION, DESCRIPTION, IMPORTS, - DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES, - DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES, - RELATIONSHIP_TYPES, CAPABILITY_TYPES, - INTERFACE_TYPES, POLICY_TYPES, - TOPOLOGY_TEMPLATE) = \ - ('tosca_definitions_version', 'description', 'imports', - 'dsl_definitions', 'node_types', 'repositories', - 'data_types', 'artifact_types', 'group_types', - 'relationship_types', 'capability_types', - 'interface_types', 'policy_types', 'topology_template') - VALID_TEMPLATE_VERSIONS = ['tosca_simple_yaml_1_0'] - exttools = ExtTools() - VALID_TEMPLATE_VERSIONS.extend(exttools.get_versions()) - - def __init__(self, custom_types, import_def): - self.import_def = import_def - self._validate_type_keys(custom_types) - - def _validate_type_keys(self, custom_type): - version = custom_type[self.DEFINITION_VERSION] \ - if self.DEFINITION_VERSION in custom_type \ - else None - if version: - self._validate_type_version(version) - self.version = version - - for name in custom_type: - if name not in self.ALLOWED_TYPE_SECTIONS: - ValidationIssueCollector.appendException( -# UnknownFieldError(what='Template ' + (self.import_def), - UnknownFieldError(what= (self.import_def), - field=name)) - - def _validate_type_version(self, version): - if version not in self.VALID_TEMPLATE_VERSIONS: - ValidationIssueCollector.appendException( - InvalidTemplateVersion( -# what=version + ' in ' + self.import_def, - what=self.import_def, - valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS))) -*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java deleted file mode 100644 index 5cf7444..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java +++ /dev/null @@ -1,243 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements.constraints; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.openecomp.sdc.toscaparser.api.elements.ScalarUnit; -import org.openecomp.sdc.toscaparser.api.functions.Function; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public abstract class Constraint { - - // Parent class for constraints for a Property or Input - - protected static final String EQUAL = "equal"; - protected static final String GREATER_THAN = "greater_than"; - protected static final String GREATER_OR_EQUAL = "greater_or_equal"; - protected static final String LESS_THAN = "less_than"; - protected static final String LESS_OR_EQUAL = "less_or_equal"; - protected static final String IN_RANGE = "in_range"; - protected static final String VALID_VALUES = "valid_values"; - protected static final String LENGTH = "length"; - protected static final String MIN_LENGTH = "min_length"; - protected static final String MAX_LENGTH = "max_length"; - protected static final String PATTERN = "pattern"; - - protected static final String CONSTRAINTS[] = { - EQUAL, GREATER_THAN,GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, - IN_RANGE, VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN}; - - @SuppressWarnings("unchecked") - public static Constraint factory(String constraintClass,String propname,String proptype,Object constraint) { - - // a factory for the different Constraint classes - // replaces Python's __new__() usage - - if(!(constraint instanceof LinkedHashMap) || - ((LinkedHashMap)constraint).size() != 1) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE101", - "InvalidSchemaError: Invalid constraint schema " + constraint.toString())); - } - - if(constraintClass.equals(EQUAL)) { - return new Equal(propname,proptype,constraint); - } - else if(constraintClass.equals(GREATER_THAN)) { - return new GreaterThan(propname,proptype,constraint); - } - else if(constraintClass.equals(GREATER_OR_EQUAL)) { - return new GreaterOrEqual(propname,proptype,constraint); - } - else if(constraintClass.equals(LESS_THAN)) { - return new LessThan(propname,proptype,constraint); - } - else if(constraintClass.equals(LESS_OR_EQUAL)) { - return new LessOrEqual(propname,proptype,constraint); - } - else if(constraintClass.equals(IN_RANGE)) { - return new InRange(propname,proptype,constraint); - } - else if(constraintClass.equals(VALID_VALUES)) { - return new ValidValues(propname,proptype,constraint); - } - else if(constraintClass.equals(LENGTH)) { - return new Length(propname,proptype,constraint); - } - else if(constraintClass.equals(MIN_LENGTH)) { - return new MinLength(propname,proptype,constraint); - } - else if(constraintClass.equals(MAX_LENGTH)) { - return new MaxLength(propname,proptype,constraint); - } - else if(constraintClass.equals(PATTERN)) { - return new Pattern(propname,proptype,constraint); - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE102", String.format( - "InvalidSchemaError: Invalid property \"%s\"",constraintClass))); - return null; - } - } - - protected String constraintKey = "TBD"; - protected ArrayList validTypes = new ArrayList<>(); - protected ArrayList validPropTypes = new ArrayList<>(); - - protected String propertyName; - protected String propertyType; - protected Object constraintValue; - protected Object constraintValueMsg; - protected Object valueMsg; - - @SuppressWarnings("unchecked") - public Constraint(String propname,String proptype,Object constraint) { - - _setValues(); - - propertyName = propname; - propertyType = proptype; - constraintValue = ((LinkedHashMap)constraint).get(constraintKey); - constraintValueMsg = constraintValue; - boolean bFound = false; - for(String s: ScalarUnit.SCALAR_UNIT_TYPES) { - if(s.equals(propertyType)) { - bFound = true; - break; - } - } - if(bFound) { - constraintValue = _getScalarUnitConstraintValue(); - } - // check if constraint is valid for property type - bFound = false; - for(String s: validPropTypes) { - if(s.equals(propertyType)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE103", String.format( - "InvalidSchemaError: Property \"%s\" is not valid for data type \"%s\"", - constraintKey,propertyType))); - } - } - - @SuppressWarnings("unchecked") - private Object _getScalarUnitConstraintValue() { - // code differs from Python because of class creation - if(constraintValue instanceof ArrayList) { - ArrayList ret = new ArrayList<>(); - for(Object v: (ArrayList)constraintValue) { - ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType,v); - ret.add(su.getNumFromScalarUnit(null)); - } - return ret; - } - else { - ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType,constraintValue); - return su.getNumFromScalarUnit(null); - } - } - - public void validate(Object value) { - if (Function.isFunction(value)){ - //skipping constraints check for functions - return; - } - - valueMsg = value; - boolean bFound = false; - for(String s: ScalarUnit.SCALAR_UNIT_TYPES) { - if(s.equals(propertyType)) { - bFound = true; - break; - } - } - if(bFound) { - value = ScalarUnit.getScalarunitValue(propertyType,value,null); - } - if(!_isValid(value)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE008", "ValidationError: " + _errMsg(value))); - } - } - - protected abstract boolean _isValid(Object value); - - protected abstract void _setValues(); - - protected abstract String _errMsg(Object value); - -} - -/*python - -class Constraint(object): - '''Parent class for constraints for a Property or Input.''' - - CONSTRAINTS = (EQUAL, GREATER_THAN, - GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, IN_RANGE, - VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN) = \ - ('equal', 'greater_than', 'greater_or_equal', 'less_than', - 'less_or_equal', 'in_range', 'valid_values', 'length', - 'min_length', 'max_length', 'pattern') - - def __new__(cls, property_name, property_type, constraint): - if cls is not Constraint: - return super(Constraint, cls).__new__(cls) - - if(not isinstance(constraint, collections.Mapping) or - len(constraint) != 1): - ValidationIssueCollector.appendException( - InvalidSchemaError(message=_('Invalid constraint schema.'))) - - for type in constraint.keys(): - ConstraintClass = get_constraint_class(type) - if not ConstraintClass: - msg = _('Invalid property "%s".') % type - ValidationIssueCollector.appendException( - InvalidSchemaError(message=msg)) - - return ConstraintClass(property_name, property_type, constraint) - - def __init__(self, property_name, property_type, constraint): - self.property_name = property_name - self.property_type = property_type - self.constraint_value = constraint[self.constraint_key] - self.constraint_value_msg = self.constraint_value - if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: - self.constraint_value = self._get_scalarunit_constraint_value() - # check if constraint is valid for property type - if property_type not in self.valid_prop_types: - msg = _('Property "%(ctype)s" is not valid for data type ' - '"%(dtype)s".') % dict( - ctype=self.constraint_key, - dtype=property_type) - ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) - - def _get_scalarunit_constraint_value(self): - if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: - ScalarUnit_Class = (scalarunit. - get_scalarunit_class(self.property_type)) - if isinstance(self.constraint_value, list): - return [ScalarUnit_Class(v).get_num_from_scalar_unit() - for v in self.constraint_value] - else: - return (ScalarUnit_Class(self.constraint_value). - get_num_from_scalar_unit()) - - def _err_msg(self, value): - return _('Property "%s" could not be validated.') % self.property_name - - def validate(self, value): - self.value_msg = value - if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: - value = scalarunit.get_scalarunit_value(self.property_type, value) - if not self._is_valid(value): - err_msg = self._err_msg(value) - ValidationIssueCollector.appendException( - ValidationError(message=err_msg)) - - -*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Equal.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Equal.java deleted file mode 100644 index e16cac3..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Equal.java +++ /dev/null @@ -1,61 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements.constraints; - -public class Equal extends Constraint { - - protected void _setValues() { - - constraintKey = EQUAL; - - for(String s: Schema.PROPERTY_TYPES) { - validPropTypes.add(s); - } - - } - - public Equal(String name,String type,Object c) { - super(name,type,c); - - } - - protected boolean _isValid(Object val) { - // equality of objects is tricky so we're comparing - // the toString() representation - if(val.toString().equals(constraintValue.toString())) { - return true; - } - return false; - } - - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" is not equal to \"%s\"", - valueMsg,propertyName,constraintValueMsg); - } - -} - -/*python - -class Equal(Constraint): -"""Constraint class for "equal" - -Constrains a property or parameter to a value equal to ('=') -the value declared. -""" - -constraint_key = Constraint.EQUAL - -valid_prop_types = Schema.PROPERTY_TYPES - -def _is_valid(self, value): - if value == self.constraint_value: - return True - - return False - -def _err_msg(self, value): - return (_('The value "%(pvalue)s" of property "%(pname)s" is not ' - 'equal to "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=self.value_msg, - cvalue=self.constraint_value_msg)) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java deleted file mode 100644 index ad6183e..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java +++ /dev/null @@ -1,114 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements.constraints; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.Date; - -import org.openecomp.sdc.toscaparser.api.functions.Function; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class GreaterOrEqual extends Constraint { - // Constraint class for "greater_or_equal" - - // Constrains a property or parameter to a value greater than or equal - // to ('>=') the value declared. - - protected void _setValues() { - - constraintKey = GREATER_OR_EQUAL; - - validTypes.add("Integer"); - validTypes.add("Double"); - validTypes.add("Float"); - // timestamps are loaded as Date objects - validTypes.add("Date"); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public GreaterOrEqual(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE104", "InvalidSchemaError: The property \"greater_or_equal\" expects comparable values")); - } - } - - - - @Override - protected boolean _isValid(Object value) { - if(Function.isFunction(value)) { - return true; - } - - // timestamps - if(value instanceof Date) { - if(constraintValue instanceof Date) { - return !((Date)value).before((Date)constraintValue); - } - return false; - } - // all others - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 >= n2; - } - - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be greater or equal to \"%s\"", - valueMsg,propertyName,constraintValueMsg); - } -} - -/*python - -class GreaterOrEqual(Constraint): -"""Constraint class for "greater_or_equal" - -Constrains a property or parameter to a value greater than or equal -to ('>=') the value declared. -""" - -constraint_key = Constraint.GREATER_OR_EQUAL - -valid_types = (int, float, datetime.date, - datetime.time, datetime.datetime) - -valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, - Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, - Schema.SCALAR_UNIT_TIME) - -def __init__(self, property_name, property_type, constraint): - super(GreaterOrEqual, self).__init__(property_name, property_type, - constraint) - if not isinstance(self.constraint_value, self.valid_types): - ThreadLocalsHolder.getCollector().appendException( - InvalidSchemaError(message=_('The property ' - '"greater_or_equal" expects ' - 'comparable values.'))) - -def _is_valid(self, value): - if toscaparser.functions.is_function(value) or \ - value >= self.constraint_value: - return True - return False - -def _err_msg(self, value): - return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' - 'greater than or equal to "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=self.value_msg, - cvalue=self.constraint_value_msg)) - - -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java deleted file mode 100644 index b9e06b3..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java +++ /dev/null @@ -1,103 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements.constraints; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.Date; - -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class GreaterThan extends Constraint { - - @Override - protected void _setValues() { - - constraintKey = GREATER_THAN; - - validTypes.add("Integer"); - validTypes.add("Double"); - validTypes.add("Float"); - // timestamps are loaded as Date objects - validTypes.add("Date"); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public GreaterThan(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE105", "InvalidSchemaError: The property \"greater_than\" expects comparable values")); - } - } - - @Override - protected boolean _isValid(Object value) { - - // timestamps - if(value instanceof Date) { - if(constraintValue instanceof Date) { - return ((Date)value).after((Date)constraintValue); - } - return false; - } - - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 > n2; - } - - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be greater than \"%s\"", - valueMsg,propertyName,constraintValueMsg); - } - -} - -/* -class GreaterThan(Constraint): - """Constraint class for "greater_than" - - Constrains a property or parameter to a value greater than ('>') - the value declared. - """ - - constraint_key = Constraint.GREATER_THAN - - valid_types = (int, float, datetime.date, - datetime.time, datetime.datetime) - - valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, - Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, - Schema.SCALAR_UNIT_TIME) - - def __init__(self, property_name, property_type, constraint): - super(GreaterThan, self).__init__(property_name, property_type, - constraint) - if not isinstance(constraint[self.GREATER_THAN], self.valid_types): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "greater_than" ' - 'expects comparable values.'))) - - def _is_valid(self, value): - if value > self.constraint_value: - return True - - return False - - def _err_msg(self, value): - return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' - 'greater than "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=self.value_msg, - cvalue=self.constraint_value_msg)) -*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java deleted file mode 100644 index 7d0d654..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java +++ /dev/null @@ -1,172 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements.constraints; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.Date; - -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; - -public class InRange extends Constraint { - // Constraint class for "in_range" - - //Constrains a property or parameter to a value in range of (inclusive) - //the two values declared. - - private static final String UNBOUNDED = "UNBOUNDED"; - - private Object min,max; - - protected void _setValues() { - - constraintKey = IN_RANGE; - - validTypes.add("Integer"); - validTypes.add("Double"); - validTypes.add("Float"); - validTypes.add("String"); - // timestamps are loaded as Date objects - validTypes.add("Date"); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - validPropTypes.add(Schema.RANGE); - - } - - @SuppressWarnings("unchecked") - public InRange(String name,String type,Object c) { - super(name,type,c); - - if(!(constraintValue instanceof ArrayList) || ((ArrayList)constraintValue).size() != 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE106", "InvalidSchemaError: The property \"in_range\" expects a list")); - - } - - ArrayList alcv = (ArrayList)constraintValue; - String msg = "The property \"in_range\" expects comparable values"; - for(Object vo: alcv) { - if(!validTypes.contains(vo.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE107", "InvalidSchemaError: " + msg)); - } - // The only string we allow for range is the special value 'UNBOUNDED' - if((vo instanceof String) && !((String)vo).equals(UNBOUNDED)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE108", "InvalidSchemaError: " + msg)); - } - } - min = alcv.get(0); - max = alcv.get(1); - - } - - @Override - protected boolean _isValid(Object value) { - - // timestamps - if(value instanceof Date) { - if(min instanceof Date && max instanceof Date) { - return !((Date)value).before((Date)min) && - !((Date)value).after((Date)max); - } - return false; - } - - Double dvalue = new Double(value.toString()); - if(!(min instanceof String)) { - if(dvalue < new Double(min.toString())) { - return false; - } - } - else if(!((String)min).equals(UNBOUNDED)) { - return false; - } - if(!(max instanceof String)) { - if(dvalue > new Double(max.toString())) { - return false; - } - } - else if(!((String)max).equals(UNBOUNDED)) { - return false; - } - return true; - } - - @Override - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" is out of range \"(min:%s, max:%s)\"", - valueMsg,propertyName,min.toString(),max.toString()); - } - -} - -/*python - -class InRange(Constraint): - """Constraint class for "in_range" - - Constrains a property or parameter to a value in range of (inclusive) - the two values declared. - """ - UNBOUNDED = 'UNBOUNDED' - - constraint_key = Constraint.IN_RANGE - - valid_types = (int, float, datetime.date, - datetime.time, datetime.datetime, str) - - valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, - Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, - Schema.SCALAR_UNIT_TIME, Schema.RANGE) - - def __init__(self, property_name, property_type, constraint): - super(InRange, self).__init__(property_name, property_type, constraint) - if(not isinstance(self.constraint_value, collections.Sequence) or - (len(constraint[self.IN_RANGE]) != 2)): - ValidationIssueCollector.appendException( - InvalidSchemaError(message=_('The property "in_range" ' - 'expects a list.'))) - - msg = _('The property "in_range" expects comparable values.') - for value in self.constraint_value: - if not isinstance(value, self.valid_types): - ValidationIssueCollector.appendException( - InvalidSchemaError(message=msg)) - # The only string we allow for range is the special value - # 'UNBOUNDED' - if(isinstance(value, str) and value != self.UNBOUNDED): - ValidationIssueCollector.appendException( - InvalidSchemaError(message=msg)) - - self.min = self.constraint_value[0] - self.max = self.constraint_value[1] - - def _is_valid(self, value): - if not isinstance(self.min, str): - if value < self.min: - return False - elif self.min != self.UNBOUNDED: - return False - if not isinstance(self.max, str): - if value > self.max: - return False - elif self.max != self.UNBOUNDED: - return False - return True - - def _err_msg(self, value): - return (_('The value "%(pvalue)s" of property "%(pname)s" is out of ' - 'range "(min:%(vmin)s, max:%(vmax)s)".') % - dict(pname=self.property_name, - pvalue=self.value_msg, - vmin=self.constraint_value_msg[0], - vmax=self.constraint_value_msg[1])) - -*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java deleted file mode 100644 index c94cda5..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java +++ /dev/null @@ -1,80 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements.constraints; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class Length extends Constraint { - // Constraint class for "length" - - // Constrains the property or parameter to a value of a given length. - - @Override - protected void _setValues() { - - constraintKey = LENGTH; - - validTypes.add("Integer"); - - validPropTypes.add(Schema.STRING); - - } - - public Length(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE109", "InvalidSchemaError: The property \"length\" expects an integer")); - } - } - - @Override - protected boolean _isValid(Object value) { - if(value instanceof String && constraintValue instanceof Integer && - ((String)value).length() == (Integer)constraintValue) { - return true; - } - return false; - } - - @Override - protected String _errMsg(Object value) { - return String.format("Length of value \"%s\" of property \"%s\" must be equal to \"%s\"", - value.toString(),propertyName,constraintValue.toString()); - } - -} - -/*python - class Length(Constraint): - """Constraint class for "length" - - Constrains the property or parameter to a value of a given length. - """ - - constraint_key = Constraint.LENGTH - - valid_types = (int, ) - - valid_prop_types = (Schema.STRING, ) - - def __init__(self, property_name, property_type, constraint): - super(Length, self).__init__(property_name, property_type, constraint) - if not isinstance(self.constraint_value, self.valid_types): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "length" expects ' - 'an integer.'))) - - def _is_valid(self, value): - if isinstance(value, str) and len(value) == self.constraint_value: - return True - - return False - - def _err_msg(self, value): - return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' - 'must be equal to "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=value, - cvalue=self.constraint_value)) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java deleted file mode 100644 index 1601e27..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java +++ /dev/null @@ -1,107 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements.constraints; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.Date; - -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class LessOrEqual extends Constraint { - // Constraint class for "less_or_equal" - - // Constrains a property or parameter to a value less than or equal - // to ('<=') the value declared. - - protected void _setValues() { - - constraintKey = LESS_OR_EQUAL; - - validTypes.add("Integer"); - validTypes.add("Double"); - validTypes.add("Float"); - // timestamps are loaded as Date objects - validTypes.add("Date"); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public LessOrEqual(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE110", "InvalidSchemaError: The property \"less_or_equal\" expects comparable values")); - } - } - - @Override - protected boolean _isValid(Object value) { - - // timestamps - if(value instanceof Date) { - if(constraintValue instanceof Date) { - return !((Date)value).after((Date)constraintValue); - } - return false; - } - - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 <= n2; - } - - @Override - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be less or equal to \"%s\"", - valueMsg,propertyName,constraintValueMsg); - } - -} - -/*python - -class LessOrEqual(Constraint): - """Constraint class for "less_or_equal" - - Constrains a property or parameter to a value less than or equal - to ('<=') the value declared. - """ - - constraint_key = Constraint.LESS_OR_EQUAL - - valid_types = (int, float, datetime.date, - datetime.time, datetime.datetime) - - valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, - Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, - Schema.SCALAR_UNIT_TIME) - - def __init__(self, property_name, property_type, constraint): - super(LessOrEqual, self).__init__(property_name, property_type, - constraint) - if not isinstance(self.constraint_value, self.valid_types): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "less_or_equal" ' - 'expects comparable values.'))) - - def _is_valid(self, value): - if value <= self.constraint_value: - return True - - return False - - def _err_msg(self, value): - return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' - 'less than or equal to "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=self.value_msg, - cvalue=self.constraint_value_msg)) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java deleted file mode 100644 index b867fa7..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java +++ /dev/null @@ -1,105 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements.constraints; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.Date; - -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class LessThan extends Constraint { - - @Override - protected void _setValues() { - - constraintKey = LESS_THAN; - - validTypes.add("Integer"); - validTypes.add("Double"); - validTypes.add("Float"); - // timestamps are loaded as Date objects - validTypes.add("Date"); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public LessThan(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE111", "InvalidSchemaError: The property \"less_than\" expects comparable values")); - } - } - - @Override - protected boolean _isValid(Object value) { - - // timestamps - if(value instanceof Date) { - if(constraintValue instanceof Date) { - return ((Date)value).before((Date)constraintValue); - } - return false; - } - - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 < n2; - } - - @Override - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be less than \"%s\"", - valueMsg,propertyName,constraintValueMsg); - } - -} - -/*python - -class LessThan(Constraint): -"""Constraint class for "less_than" - -Constrains a property or parameter to a value less than ('<') -the value declared. -""" - -constraint_key = Constraint.LESS_THAN - -valid_types = (int, float, datetime.date, - datetime.time, datetime.datetime) - -valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, - Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, - Schema.SCALAR_UNIT_TIME) - -def __init__(self, property_name, property_type, constraint): - super(LessThan, self).__init__(property_name, property_type, - constraint) - if not isinstance(self.constraint_value, self.valid_types): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "less_than" ' - 'expects comparable values.'))) - -def _is_valid(self, value): - if value < self.constraint_value: - return True - - return False - -def _err_msg(self, value): - return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' - 'less than "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=self.value_msg, - cvalue=self.constraint_value_msg)) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java deleted file mode 100644 index 48ac349..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java +++ /dev/null @@ -1,91 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements.constraints; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.LinkedHashMap; - -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class MaxLength extends Constraint { - // Constraint class for "min_length" - - // Constrains the property or parameter to a value of a maximum length. - - @Override - protected void _setValues() { - - constraintKey = MAX_LENGTH; - - validTypes.add("Integer"); - - validPropTypes.add(Schema.STRING); - validPropTypes.add(Schema.MAP); - - } - - public MaxLength(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE112", "InvalidSchemaError: The property \"max_length\" expects an integer")); - } - } - - @SuppressWarnings("unchecked") - @Override - protected boolean _isValid(Object value) { - if(value instanceof String && constraintValue instanceof Integer && - ((String)value).length() <= (Integer)constraintValue) { - return true; - } - else if(value instanceof LinkedHashMap && constraintValue instanceof Integer && - ((LinkedHashMap)value).size() <= (Integer)constraintValue) { - return true; - } - return false; - } - - @Override - protected String _errMsg(Object value) { - return String.format("Length of value \"%s\" of property \"%s\" must be no greater than \"%s\"", - value.toString(),propertyName,constraintValue.toString()); - } - -} - -/*python - -class MaxLength(Constraint): - """Constraint class for "max_length" - - Constrains the property or parameter to a value to a maximum length. - """ - - constraint_key = Constraint.MAX_LENGTH - - valid_types = (int, ) - - valid_prop_types = (Schema.STRING, Schema.MAP) - - def __init__(self, property_name, property_type, constraint): - super(MaxLength, self).__init__(property_name, property_type, - constraint) - if not isinstance(self.constraint_value, self.valid_types): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "max_length" ' - 'expects an integer.'))) - - def _is_valid(self, value): - if ((isinstance(value, str) or isinstance(value, dict)) and - len(value) <= self.constraint_value): - return True - - return False - - def _err_msg(self, value): - return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' - 'must be no greater than "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=value, - cvalue=self.constraint_value)) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java deleted file mode 100644 index 0203484..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java +++ /dev/null @@ -1,91 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements.constraints; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.LinkedHashMap; - -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class MinLength extends Constraint { - // Constraint class for "min_length" - - // Constrains the property or parameter to a value of a minimum length. - - @Override - protected void _setValues() { - - constraintKey = MIN_LENGTH; - - validTypes.add("Integer"); - - validPropTypes.add(Schema.STRING); - validPropTypes.add(Schema.MAP); - - } - - public MinLength(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE113", "InvalidSchemaError: The property \"min_length\" expects an integer")); - } - } - - @SuppressWarnings("unchecked") - @Override - protected boolean _isValid(Object value) { - if(value instanceof String && constraintValue instanceof Integer && - ((String)value).length() >= (Integer)constraintValue) { - return true; - } - else if(value instanceof LinkedHashMap && constraintValue instanceof Integer && - ((LinkedHashMap)value).size() >= (Integer)constraintValue) { - return true; - } - return false; - } - - @Override - protected String _errMsg(Object value) { - return String.format("Length of value \"%s\" of property \"%s\" must be at least \"%s\"", - value.toString(),propertyName,constraintValue.toString()); - } - -} - -/*python - -class MinLength(Constraint): - """Constraint class for "min_length" - - Constrains the property or parameter to a value to a minimum length. - """ - - constraint_key = Constraint.MIN_LENGTH - - valid_types = (int, ) - - valid_prop_types = (Schema.STRING, Schema.MAP) - - def __init__(self, property_name, property_type, constraint): - super(MinLength, self).__init__(property_name, property_type, - constraint) - if not isinstance(self.constraint_value, self.valid_types): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "min_length" ' - 'expects an integer.'))) - - def _is_valid(self, value): - if ((isinstance(value, str) or isinstance(value, dict)) and - len(value) >= self.constraint_value): - return True - - return False - - def _err_msg(self, value): - return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' - 'must be at least "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=value, - cvalue=self.constraint_value)) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java deleted file mode 100644 index a29bac6..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java +++ /dev/null @@ -1,97 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements.constraints; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.regex.Matcher; -import java.util.regex.PatternSyntaxException; - -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class Pattern extends Constraint { - - @Override - protected void _setValues() { - - constraintKey = PATTERN; - - validTypes.add("String"); - - validPropTypes.add(Schema.STRING); - - } - - - public Pattern(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE114", "InvalidSchemaError: The property \"pattern\" expects a string")); - } - } - - @Override - protected boolean _isValid(Object value) { - try { - if(!(value instanceof String)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE115", String.format("ValueError: Input value \"%s\" to \"pattern\" property \"%s\" must be a string", - value.toString(),propertyName))); - return false; - } - String strp = constraintValue.toString(); - String strm = value.toString(); - java.util.regex.Pattern pattern = java.util.regex.Pattern.compile(strp); - Matcher matcher = pattern.matcher(strm); - if(matcher.find() && matcher.end() == strm.length()) { - return true; - } - return false; - } - catch(PatternSyntaxException pse) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE116", String.format("ValueError: Invalid regex \"%s\" in \"pattern\" property \"%s\"", - constraintValue.toString(),propertyName))); - return false; - } - } - - @Override - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" does not match the pattern \"%s\"", - value.toString(),propertyName,constraintValue.toString()); - } - -} - -/*python - -class Pattern(Constraint): - """Constraint class for "pattern" - - Constrains the property or parameter to a value that is allowed by - the provided regular expression. - """ - - constraint_key = Constraint.PATTERN - - valid_types = (str, ) - - valid_prop_types = (Schema.STRING, ) - - def __init__(self, property_name, property_type, constraint): - super(Pattern, self).__init__(property_name, property_type, constraint) - if not isinstance(self.constraint_value, self.valid_types): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "pattern" ' - 'expects a string.'))) - self.match = re.compile(self.constraint_value).match - - def _is_valid(self, value): - match = self.match(value) - return match is not None and match.end() == len(value) - - def _err_msg(self, value): - return (_('The value "%(pvalue)s" of property "%(pname)s" does not ' - 'match pattern "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=value, - cvalue=self.constraint_value)) -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java deleted file mode 100644 index d0ee118..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java +++ /dev/null @@ -1,279 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements.constraints; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - - -public class Schema { - - private static final String TYPE = "type"; - private static final String REQUIRED = "required"; - private static final String DESCRIPTION = "description"; - private static final String DEFAULT = "default"; - private static final String CONSTRAINTS = "constraints"; - private static final String STATUS = "status"; - private static final String ENTRYSCHEMA = "entry_schema"; - private static final String KEYS[] = { - TYPE, REQUIRED, DESCRIPTION,DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS}; - - public static final String INTEGER = "integer"; - public static final String STRING = "string"; - public static final String BOOLEAN = "boolean"; - public static final String FLOAT = "float"; - public static final String RANGE = "range"; - public static final String NUMBER = "number"; - public static final String TIMESTAMP = "timestamp"; - public static final String LIST = "list"; - public static final String MAP = "map"; - public static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; - public static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; - public static final String SCALAR_UNIT_TIME = "scalar-unit.time"; - public static final String VERSION = "version"; - public static final String PORTDEF = "PortDef"; - public static final String PORTSPEC = "PortSpec"; //??? PortSpec.SHORTNAME - public static final String JSON = "json"; - - public static final String PROPERTY_TYPES[] = { - INTEGER, STRING, BOOLEAN, FLOAT, RANGE,NUMBER, TIMESTAMP, LIST, MAP, - SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, - VERSION, PORTDEF, PORTSPEC, JSON}; - - @SuppressWarnings("unused") - private static final String SCALAR_UNIT_SIZE_DEFAULT = "B"; - - private static Map SCALAR_UNIT_SIZE_DICT = new HashMap<>(); - static { - SCALAR_UNIT_SIZE_DICT.put("B", 1L); - SCALAR_UNIT_SIZE_DICT.put("KB", 1000L); - SCALAR_UNIT_SIZE_DICT.put("KIB", 1024L); - SCALAR_UNIT_SIZE_DICT.put("MB", 1000000L); - SCALAR_UNIT_SIZE_DICT.put("MIB", 1048576L); - SCALAR_UNIT_SIZE_DICT.put("GB", 1000000000L); - SCALAR_UNIT_SIZE_DICT.put("GIB", 1073741824L); - SCALAR_UNIT_SIZE_DICT.put("TB", 1000000000000L); - SCALAR_UNIT_SIZE_DICT.put("TIB", 1099511627776L); - } - - private String name; - private LinkedHashMap schema; - private int _len; - private ArrayList constraintsList; - - - public Schema(String _name,LinkedHashMap _schemaDict) { - name = _name; - - if(!(_schemaDict instanceof LinkedHashMap)) { - //msg = (_('Schema definition of "%(pname)s" must be a dict.') - // % dict(pname=name)) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE117", String.format( - "InvalidSchemaError: Schema definition of \"%s\" must be a dict",name))); - } - - if(_schemaDict.get("type") == null) { - //msg = (_('Schema definition of "%(pname)s" must have a "type" ' - // 'attribute.') % dict(pname=name)) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE118", String.format( - "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name))); - } - - schema = _schemaDict; - _len = 0; //??? None - constraintsList = new ArrayList<>(); - } - - public String getType() { - return (String)schema.get(TYPE); - } - - public boolean isRequired() { - return (boolean)schema.getOrDefault(REQUIRED, true); - } - - public String getDescription() { - return (String)schema.getOrDefault(DESCRIPTION,""); - } - - public Object getDefault() { - return schema.get(DEFAULT); - } - - public String getStatus() { - return (String)schema.getOrDefault(STATUS,""); - } - - @SuppressWarnings("unchecked") - public ArrayList getConstraints() { - if(constraintsList.size() == 0) { - Object cob = schema.get(CONSTRAINTS); - if(cob instanceof ArrayList) { - ArrayList constraintSchemata = (ArrayList)cob; - for(Object ob: constraintSchemata) { - if(ob instanceof LinkedHashMap) { - for(String cClass: ((LinkedHashMap)ob).keySet()) { - Constraint c = Constraint.factory(cClass,name,getType(),ob); - if(c != null) { - constraintsList.add(c); - } - else { - // error - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE119", String.format( - "UnknownFieldError: Constraint type \"%s\" for property \"%s\" is not supported", - cClass,name))); - } - break; - } - } - } - } - } - return constraintsList; - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getEntrySchema() { - return (LinkedHashMap)schema.get(ENTRYSCHEMA); - } - - // Python intrinsic methods... - - // substitute for __getitem__ (aka self[key]) - public Object getItem(String key) { - return schema.get(key); - } - - /* - def __iter__(self): - for k in self.KEYS: - try: - self.schema[k] - except KeyError: - pass - else: - yield k - */ - - // substitute for __len__ (aka self.len()) - public int getLen() { - int len = 0; - for(String k: KEYS) { - if(schema.get(k) != null) { - len++; - } - _len = len; - } - return _len; - } - // getter - public LinkedHashMap getSchema() { - return schema; - } - -} - -/*python - -class Schema(collections.Mapping): - -KEYS = ( - TYPE, REQUIRED, DESCRIPTION, - DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS -) = ( - 'type', 'required', 'description', - 'default', 'constraints', 'entry_schema', 'status' -) - -PROPERTY_TYPES = ( - INTEGER, STRING, BOOLEAN, FLOAT, RANGE, - NUMBER, TIMESTAMP, LIST, MAP, - SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, - VERSION, PORTDEF, PORTSPEC -) = ( - 'integer', 'string', 'boolean', 'float', 'range', - 'number', 'timestamp', 'list', 'map', - 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time', - 'version', 'PortDef', PortSpec.SHORTNAME -) - -SCALAR_UNIT_SIZE_DEFAULT = 'B' -SCALAR_UNIT_SIZE_DICT = {'B': 1, 'KB': 1000, 'KIB': 1024, 'MB': 1000000, - 'MIB': 1048576, 'GB': 1000000000, - 'GIB': 1073741824, 'TB': 1000000000000, - 'TIB': 1099511627776} - -def __init__(self, name, schema_dict): - self.name = name - if not isinstance(schema_dict, collections.Mapping): - msg = (_('Schema definition of "%(pname)s" must be a dict.') - % dict(pname=name)) - ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) - - try: - schema_dict['type'] - except KeyError: - msg = (_('Schema definition of "%(pname)s" must have a "type" ' - 'attribute.') % dict(pname=name)) - ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) - - self.schema = schema_dict - self._len = None - self.constraints_list = [] - -@property -def type(self): - return self.schema[self.TYPE] - -@property -def required(self): - return self.schema.get(self.REQUIRED, True) - -@property -def description(self): - return self.schema.get(self.DESCRIPTION, '') - -@property -def default(self): - return self.schema.get(self.DEFAULT) - -@property -def status(self): - return self.schema.get(self.STATUS, '') - -@property -def constraints(self): - if not self.constraints_list: - constraint_schemata = self.schema.get(self.CONSTRAINTS) - if constraint_schemata: - self.constraints_list = [Constraint(self.name, - self.type, - cschema) - for cschema in constraint_schemata] - return self.constraints_list - -@property -def entry_schema(self): - return self.schema.get(self.ENTRYSCHEMA) - -def __getitem__(self, key): - return self.schema[key] - -def __iter__(self): - for k in self.KEYS: - try: - self.schema[k] - except KeyError: - pass - else: - yield k - -def __len__(self): - if self._len is None: - self._len = len(list(iter(self))) - return self._len -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java.orig b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java.orig deleted file mode 100644 index 355f505..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java.orig +++ /dev/null @@ -1,281 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements.constraints; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector; - - -public class Schema { - - private static final String TYPE = "type"; - private static final String REQUIRED = "required"; - private static final String DESCRIPTION = "description"; - private static final String DEFAULT = "default"; - private static final String CONSTRAINTS = "constraints"; - private static final String STATUS = "status"; - private static final String ENTRYSCHEMA = "entry_schema"; - private static final String KEYS[] = { - TYPE, REQUIRED, DESCRIPTION,DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS}; - - public static final String INTEGER = "integer"; - public static final String STRING = "string"; - public static final String BOOLEAN = "boolean"; - public static final String FLOAT = "float"; - public static final String RANGE = "range"; - public static final String NUMBER = "number"; - public static final String TIMESTAMP = "timestamp"; - public static final String LIST = "list"; - public static final String MAP = "map"; - public static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; - public static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; - public static final String SCALAR_UNIT_TIME = "scalar-unit.time"; - public static final String VERSION = "version"; - public static final String PORTDEF = "PortDef"; - public static final String PORTSPEC = "PortSpec"; //??? PortSpec.SHORTNAME -<<<<<<< HEAD - public static final String JSON = "json"; -======= - public static final String JSON = "json"; ->>>>>>> master - - public static final String PROPERTY_TYPES[] = { - INTEGER, STRING, BOOLEAN, FLOAT, RANGE,NUMBER, TIMESTAMP, LIST, MAP, - SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, - VERSION, PORTDEF, PORTSPEC, JSON}; - - @SuppressWarnings("unused") - private static final String SCALAR_UNIT_SIZE_DEFAULT = "B"; - - private static Map SCALAR_UNIT_SIZE_DICT = new HashMap<>(); - static { - SCALAR_UNIT_SIZE_DICT.put("B", 1L); - SCALAR_UNIT_SIZE_DICT.put("KB", 1000L); - SCALAR_UNIT_SIZE_DICT.put("KIB", 1024L); - SCALAR_UNIT_SIZE_DICT.put("MB", 1000000L); - SCALAR_UNIT_SIZE_DICT.put("MIB", 1048576L); - SCALAR_UNIT_SIZE_DICT.put("GB", 1000000000L); - SCALAR_UNIT_SIZE_DICT.put("GIB", 1073741824L); - SCALAR_UNIT_SIZE_DICT.put("TB", 1000000000000L); - SCALAR_UNIT_SIZE_DICT.put("TIB", 1099511627776L); - } - - private String name; - private LinkedHashMap schema; - private int _len; - private ArrayList constraintsList; - - - public Schema(String _name,LinkedHashMap _schemaDict) { - name = _name; - - if(!(_schemaDict instanceof LinkedHashMap)) { - //msg = (_('Schema definition of "%(pname)s" must be a dict.') - // % dict(pname=name)) - ExceptionCollector.appendException(String.format( - "InvalidSchemaError: Schema definition of \"%s\" must be a dict",name)); - } - - if(_schemaDict.get("type") == null) { - //msg = (_('Schema definition of "%(pname)s" must have a "type" ' - // 'attribute.') % dict(pname=name)) - ExceptionCollector.appendException(String.format( - "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name)); - } - - schema = _schemaDict; - _len = 0; //??? None - constraintsList = new ArrayList<>(); - } - - public String getType() { - return (String)schema.get(TYPE); - } - - public boolean isRequired() { - return (boolean)schema.getOrDefault(REQUIRED, true); - } - - public String getDescription() { - return (String)schema.getOrDefault(DESCRIPTION,""); - } - - public Object getDefault() { - return schema.get(DEFAULT); - } - - public String getStatus() { - return (String)schema.getOrDefault(STATUS,""); - } - - @SuppressWarnings("unchecked") - public ArrayList getConstraints() { - if(constraintsList.size() == 0) { - Object cob = schema.get(CONSTRAINTS); - if(cob instanceof ArrayList) { - ArrayList constraintSchemata = (ArrayList)cob; - for(Object ob: constraintSchemata) { - if(ob instanceof LinkedHashMap) { - for(String cClass: ((LinkedHashMap)ob).keySet()) { - Constraint c = Constraint.factory(cClass,name,getType(),ob); - if(c != null) { - constraintsList.add(c); - } - else { - // error - ExceptionCollector.appendException(String.format( - "UnknownFieldError: Constraint type \"%s\" for property \"%s\" is not supported", - cClass,name)); - } - break; - } - } - } - } - } - return constraintsList; - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getEntrySchema() { - return (LinkedHashMap)schema.get(ENTRYSCHEMA); - } - - // Python intrinsic methods... - - // substitute for __getitem__ (aka self[key]) - public Object getItem(String key) { - return schema.get(key); - } - - /* - def __iter__(self): - for k in self.KEYS: - try: - self.schema[k] - except KeyError: - pass - else: - yield k - */ - - // substitute for __len__ (aka self.len()) - public int getLen() { - int len = 0; - for(String k: KEYS) { - if(schema.get(k) != null) { - len++; - } - _len = len; - } - return _len; - } - // getter - public LinkedHashMap getSchema() { - return schema; - } - -} - -/*python - -class Schema(collections.Mapping): - -KEYS = ( - TYPE, REQUIRED, DESCRIPTION, - DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS -) = ( - 'type', 'required', 'description', - 'default', 'constraints', 'entry_schema', 'status' -) - -PROPERTY_TYPES = ( - INTEGER, STRING, BOOLEAN, FLOAT, RANGE, - NUMBER, TIMESTAMP, LIST, MAP, - SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, - VERSION, PORTDEF, PORTSPEC -) = ( - 'integer', 'string', 'boolean', 'float', 'range', - 'number', 'timestamp', 'list', 'map', - 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time', - 'version', 'PortDef', PortSpec.SHORTNAME -) - -SCALAR_UNIT_SIZE_DEFAULT = 'B' -SCALAR_UNIT_SIZE_DICT = {'B': 1, 'KB': 1000, 'KIB': 1024, 'MB': 1000000, - 'MIB': 1048576, 'GB': 1000000000, - 'GIB': 1073741824, 'TB': 1000000000000, - 'TIB': 1099511627776} - -def __init__(self, name, schema_dict): - self.name = name - if not isinstance(schema_dict, collections.Mapping): - msg = (_('Schema definition of "%(pname)s" must be a dict.') - % dict(pname=name)) - ExceptionCollector.appendException(InvalidSchemaError(message=msg)) - - try: - schema_dict['type'] - except KeyError: - msg = (_('Schema definition of "%(pname)s" must have a "type" ' - 'attribute.') % dict(pname=name)) - ExceptionCollector.appendException(InvalidSchemaError(message=msg)) - - self.schema = schema_dict - self._len = None - self.constraints_list = [] - -@property -def type(self): - return self.schema[self.TYPE] - -@property -def required(self): - return self.schema.get(self.REQUIRED, True) - -@property -def description(self): - return self.schema.get(self.DESCRIPTION, '') - -@property -def default(self): - return self.schema.get(self.DEFAULT) - -@property -def status(self): - return self.schema.get(self.STATUS, '') - -@property -def constraints(self): - if not self.constraints_list: - constraint_schemata = self.schema.get(self.CONSTRAINTS) - if constraint_schemata: - self.constraints_list = [Constraint(self.name, - self.type, - cschema) - for cschema in constraint_schemata] - return self.constraints_list - -@property -def entry_schema(self): - return self.schema.get(self.ENTRYSCHEMA) - -def __getitem__(self, key): - return self.schema[key] - -def __iter__(self): - for k in self.KEYS: - try: - self.schema[k] - except KeyError: - pass - else: - yield k - -def __len__(self): - if self._len is None: - self._len = len(list(iter(self))) - return self._len -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java b/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java deleted file mode 100644 index 60b6be2..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java +++ /dev/null @@ -1,84 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements.constraints; - -import java.util.ArrayList; - -public class ValidValues extends Constraint { - - - protected void _setValues() { - - constraintKey = VALID_VALUES; - - for(String s: Schema.PROPERTY_TYPES) { - validPropTypes.add(s); - } - - } - - - public ValidValues(String name,String type,Object c) { - super(name,type,c); - - } - - @SuppressWarnings("unchecked") - protected boolean _isValid(Object val) { - if(!(constraintValue instanceof ArrayList)) { - return false; - } - if(val instanceof ArrayList) { - boolean bAll = true; - for(Object v: (ArrayList)val) { - if(!((ArrayList)constraintValue).contains(v)) { - bAll = false; - break; - }; - } - return bAll; - } - return ((ArrayList)constraintValue).contains(val); - } - - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" is not valid. Expected a value from \"%s\"", - value.toString(),propertyName,constraintValue.toString()); - } - -} - -/*python - -class ValidValues(Constraint): -"""Constraint class for "valid_values" - -Constrains a property or parameter to a value that is in the list of -declared values. -""" -constraint_key = Constraint.VALID_VALUES - -valid_prop_types = Schema.PROPERTY_TYPES - -def __init__(self, property_name, property_type, constraint): - super(ValidValues, self).__init__(property_name, property_type, - constraint) - if not isinstance(self.constraint_value, collections.Sequence): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "valid_values" ' - 'expects a list.'))) - -def _is_valid(self, value): - print '*** payton parser validating ',value,' in ',self.constraint_value#GGG - if isinstance(value, list): - return all(v in self.constraint_value for v in value) - return value in self.constraint_value - -def _err_msg(self, value): - allowed = '[%s]' % ', '.join(str(a) for a in self.constraint_value) - return (_('The value "%(pvalue)s" of property "%(pname)s" is not ' - 'valid. Expected a value from "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=value, - cvalue=allowed)) - - -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java b/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java deleted file mode 100644 index f0e0afa..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java +++ /dev/null @@ -1,192 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.extensions; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.reflections.Reflections; -import org.reflections.scanners.ResourcesScanner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Set; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class ExtTools { - - private static Logger log = LoggerFactory.getLogger(ExtTools.class.getName()); - - private static LinkedHashMap EXTENSION_INFO = new LinkedHashMap<>(); - - public ExtTools() { - - EXTENSION_INFO = _loadExtensions(); - } - - private LinkedHashMap _loadExtensions() { - - LinkedHashMap extensions = new LinkedHashMap<>(); - - Reflections reflections = new Reflections("extensions", new ResourcesScanner()); - Set resourcePaths = reflections.getResources(Pattern.compile(".*\\.py$")); - - for(String resourcePath : resourcePaths) { - try (InputStream is = ExtTools.class.getClassLoader().getResourceAsStream(resourcePath); - InputStreamReader isr = new InputStreamReader(is, Charset.forName("UTF-8")); - BufferedReader br = new BufferedReader(isr);){ - String version = null; - ArrayList sections = null; - String defsFile = null; - String line; - - Pattern pattern = Pattern.compile("^([^#]\\S+)\\s*=\\s*(\\S.*)$"); - while ((line = br.readLine()) != null) { - line = line.replace("'", "\""); - Matcher matcher = pattern.matcher(line.toString()); - if (matcher.find()) { - if (matcher.group(1).equals("VERSION")) { - version = matcher.group(2); - if (version.startsWith("'") || version.startsWith("\"")) { - version = version.substring(1, version.length() - 1); - } - } - else if (matcher.group(1).equals("DEFS_FILE")) { - String fn = matcher.group(2); - if (fn.startsWith("'") || fn.startsWith("\"")) { - fn = fn.substring(1, fn.length() - 1); - } - defsFile = resourcePath.replaceFirst("\\w*.py$", fn); - } - else if (matcher.group(1).equals("SECTIONS")) { - sections = new ArrayList<>(); - Pattern secpat = Pattern.compile("\"([^\"]+)\""); - Matcher secmat = secpat.matcher(matcher.group(2)); - while (secmat.find()) { - sections.add(secmat.group(1)); - } - } - } - } - - if (version != null && defsFile != null) { - LinkedHashMap ext = new LinkedHashMap<>(); - ext.put("defs_file", defsFile); - if (sections != null) { - ext.put("sections", sections); - } - extensions.put(version, ext); - } - else { - // error - } - } - catch (Exception e) { - log.error("ExtTools - _loadExtensions - {}", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue - ("JE281", "Failed to load extensions" + e.getMessage())); - // ... - } - } - return extensions; - } - - public ArrayList getVersions() { - return new ArrayList(EXTENSION_INFO.keySet()); - } - - public LinkedHashMap> getSections() { - LinkedHashMap> sections = new LinkedHashMap<>(); - for(String version: EXTENSION_INFO.keySet()) { - LinkedHashMap eiv = (LinkedHashMap)EXTENSION_INFO.get(version); - sections.put(version,(ArrayList)eiv.get("sections")); - } - return sections; - } - - public String getDefsFile(String version) { - LinkedHashMap eiv = (LinkedHashMap)EXTENSION_INFO.get(version); - return (String)eiv.get("defs_file"); - } - -} - -/*python - -from toscaparser.common.exception import ToscaExtAttributeError -from toscaparser.common.exception import ToscaExtImportError - -log = logging.getLogger("tosca.model") - -REQUIRED_ATTRIBUTES = ['VERSION', 'DEFS_FILE'] - - -class ExtTools(object): - def __init__(self): - self.EXTENSION_INFO = self._load_extensions() - - def _load_extensions(self): - '''Dynamically load all the extensions .''' - extensions = {} - - # Use the absolute path of the class path - abs_path = os.path.dirname(os.path.abspath(__file__)) - - extdirs = [e for e in os.listdir(abs_path) if - not e.startswith('tests') and - os.path.isdir(os.path.join(abs_path, e))] - - for e in extdirs: - log.info(e) - extpath = abs_path + '/' + e - # Grab all the extension files in the given path - ext_files = [f for f in os.listdir(extpath) if f.endswith('.py') - and not f.startswith('__init__')] - - # For each module, pick out the target translation class - for f in ext_files: - log.info(f) - ext_name = 'toscaparser/extensions/' + e + '/' + f.strip('.py') - ext_name = ext_name.replace('/', '.') - try: - extinfo = importlib.import_module(ext_name) - version = getattr(extinfo, 'VERSION') - defs_file = extpath + '/' + getattr(extinfo, 'DEFS_FILE') - - # Sections is an optional attribute - sections = getattr(extinfo, 'SECTIONS', ()) - - extensions[version] = {'sections': sections, - 'defs_file': defs_file} - except ImportError: - raise ToscaExtImportError(ext_name=ext_name) - except AttributeError: - attrs = ', '.join(REQUIRED_ATTRIBUTES) - raise ToscaExtAttributeError(ext_name=ext_name, - attrs=attrs) - - print 'Extensions ',extensions#GGG - return extensions - - def get_versions(self): - return self.EXTENSION_INFO.keys() - - def get_sections(self): - sections = {} - for version in self.EXTENSION_INFO.keys(): - sections[version] = self.EXTENSION_INFO[version]['sections'] - - return sections - - def get_defs_file(self, version): - versiondata = self.EXTENSION_INFO.get(version) - - if versiondata: - return versiondata.get('defs_file') - else: - return None -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java deleted file mode 100644 index 84afbc9..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java +++ /dev/null @@ -1,78 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.functions; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; - -import org.openecomp.sdc.toscaparser.api.TopologyTemplate; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class Concat extends Function { - // Validate the function and provide an instance of the function - - // Concatenation of values are supposed to be produced at runtime and - // therefore its the responsibility of the TOSCA engine to implement the - // evaluation of Concat functions. - - // Arguments: - - // * List of strings that needs to be concatenated - - // Example: - - // [ 'http://', - // get_attribute: [ server, public_address ], - // ':' , - // get_attribute: [ server, port ] ] - - - public Concat(TopologyTemplate ttpl,Object context,String name,ArrayList args) { - super(ttpl,context,name,args); - } - - @Override - public Object result() { - return this; - } - - @Override - void validate() { - if(args.size() < 1) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE145", - "ValueError: Invalid arguments for function \"concat\". " + - "Expected at least one argument")); - } - } - -} - -/*python - -class Concat(Function): -"""Validate the function and provide an instance of the function - -Concatenation of values are supposed to be produced at runtime and -therefore its the responsibility of the TOSCA engine to implement the -evaluation of Concat functions. - -Arguments: - -* List of strings that needs to be concatenated - -Example: - - [ 'http://', - get_attribute: [ server, public_address ], - ':' , - get_attribute: [ server, port ] ] -""" - -def validate(self): - if len(self.args) < 1: - ValidationIsshueCollector.appendException( - ValueError(_('Invalid arguments for function "{0}". Expected ' - 'at least one arguments.').format(CONCAT))) - -def result(self): - return self -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java deleted file mode 100644 index 3437735..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java +++ /dev/null @@ -1,236 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.functions; - -import java.util.*; - -import org.openecomp.sdc.toscaparser.api.TopologyTemplate; -import org.openecomp.sdc.toscaparser.api.ToscaTemplate; - -public abstract class Function { - - protected static final String GET_PROPERTY = "get_property"; - protected static final String GET_ATTRIBUTE = "get_attribute"; - protected static final String GET_INPUT = "get_input"; - protected static final String GET_OPERATION_OUTPUT = "get_operation_output"; - protected static final String CONCAT = "concat"; - protected static final String TOKEN = "token"; - - protected static final String SELF = "SELF"; - protected static final String HOST = "HOST"; - protected static final String TARGET = "TARGET"; - protected static final String SOURCE = "SOURCE"; - - protected static final String HOSTED_ON = "tosca.relationships.HostedOn"; - - protected static HashMap functionMappings = _getFunctionMappings(); - - private static HashMap _getFunctionMappings() { - HashMap map = new HashMap<>(); - map.put(GET_PROPERTY,"GetProperty"); - map.put(GET_INPUT, "GetInput"); - map.put(GET_ATTRIBUTE, "GetAttribute"); - map.put(GET_OPERATION_OUTPUT, "GetOperationOutput"); - map.put(CONCAT, "Concat"); - map.put(TOKEN, "Token"); - return map; - } - - protected TopologyTemplate toscaTpl; - protected Object context; - protected String name; - protected ArrayList args; - - - public Function(TopologyTemplate _toscaTpl,Object _context,String _name,ArrayList _args) { - toscaTpl = _toscaTpl; - context = _context; - name = _name; - args = _args; - validate(); - - } - - abstract Object result(); - - abstract void validate(); - - @SuppressWarnings("unchecked") - public static boolean isFunction(Object funcObj) { - // Returns True if the provided function is a Tosca intrinsic function. - // - //Examples: - // - //* "{ get_property: { SELF, port } }" - //* "{ get_input: db_name }" - //* Function instance - - //:param function: Function as string or a Function instance. - //:return: True if function is a Tosca intrinsic function, otherwise False. - // - - if(funcObj instanceof LinkedHashMap) { - LinkedHashMap function = (LinkedHashMap)funcObj; - if(function.size() == 1) { - String funcName = (new ArrayList(function.keySet())).get(0); - return functionMappings.keySet().contains(funcName); - } - } - return (funcObj instanceof Function); - } - - @SuppressWarnings("unchecked") - public static Object getFunction(TopologyTemplate ttpl,Object context,Object rawFunctionObj, boolean resolveGetInput) { - // Gets a Function instance representing the provided template function. - - // If the format provided raw_function format is not relevant for template - // functions or if the function name doesn't exist in function mapping the - // method returns the provided raw_function. - // - // :param tosca_tpl: The tosca template. - // :param node_template: The node template the function is specified for. - // :param raw_function: The raw function as dict. - // :return: Template function as Function instance or the raw_function if - // parsing was unsuccessful. - - - // iterate over leaves of the properties's tree and convert function leaves to function object, - // support List and Map nested, - // assuming that leaf value of function is always map type contains 1 item (e.g. my_leaf: {get_input: xxx}). - - if (rawFunctionObj instanceof LinkedHashMap) { // In map type case - LinkedHashMap rawFunction = ((LinkedHashMap) rawFunctionObj); - if(rawFunction.size() == 1 && - !(rawFunction.values().iterator().next() instanceof LinkedHashMap)) { // End point - return getFunctionForObjectItem(ttpl, context, rawFunction, resolveGetInput); - } else { - return getFunctionForMap(ttpl, context, rawFunction, resolveGetInput); - } - } else if (rawFunctionObj instanceof ArrayList) { // In list type case - return getFunctionForList(ttpl, context, (ArrayList) rawFunctionObj, resolveGetInput); - } - - return rawFunctionObj; - } - - private static Object getFunctionForList(TopologyTemplate ttpl, Object context, ArrayList rawFunctionObj, boolean resolveGetInput) { - // iterate over list properties in recursion, convert leaves to function, - // and collect them in the same hierarchy as the original list. - ArrayList rawFunctionObjList = new ArrayList<>(); - for (Object rawFunctionObjItem: rawFunctionObj) { - rawFunctionObjList.add(getFunction(ttpl, context, rawFunctionObjItem, resolveGetInput)); - } - return rawFunctionObjList; - } - - private static Object getFunctionForMap(TopologyTemplate ttpl, Object context, LinkedHashMap rawFunction, boolean resolveGetInput) { - // iterate over map nested properties in recursion, convert leaves to function, - // and collect them in the same hierarchy as the original map. - LinkedHashMap rawFunctionObjMap = new LinkedHashMap(); - for (Object rawFunctionObjItem: rawFunction.entrySet()) { - Object itemValue = getFunction(ttpl, context, ((Map.Entry)rawFunctionObjItem).getValue(), resolveGetInput); - rawFunctionObjMap.put(((Map.Entry)rawFunctionObjItem).getKey(), itemValue); - } - return rawFunctionObjMap; - } - - private static Object getFunctionForObjectItem(TopologyTemplate ttpl, Object context, Object rawFunctionObjItem, boolean resolveGetInput) { - if(isFunction(rawFunctionObjItem)) { - LinkedHashMap rawFunction = (LinkedHashMap) rawFunctionObjItem; - String funcName = (new ArrayList(rawFunction.keySet())).get(0); - if (functionMappings.keySet().contains(funcName)) { - String funcType = functionMappings.get(funcName); - Object oargs = (new ArrayList(rawFunction.values())).get(0); - ArrayList funcArgs; - if (oargs instanceof ArrayList) { - funcArgs = (ArrayList) oargs; - } else { - funcArgs = new ArrayList<>(); - funcArgs.add(oargs); - } - - switch (funcType) { - case "GetInput": - if (resolveGetInput) { - GetInput input = new GetInput(ttpl, context, funcName, funcArgs); - return input.result(); - } - return new GetInput(ttpl, context, funcName, funcArgs); - case "GetAttribute": - return new GetAttribute(ttpl, context, funcName, funcArgs); - case "GetProperty": - return new GetProperty(ttpl, context, funcName, funcArgs); - case "GetOperationOutput": - return new GetOperationOutput(ttpl, context, funcName, funcArgs); - case "Concat": - return new Concat(ttpl, context, funcName, funcArgs); - case "Token": - return new Token(ttpl, context, funcName, funcArgs); - } - } - } - - return rawFunctionObjItem; - } - - @Override - public String toString() { - String argsStr = args.size() > 1 ? args.toString() : args.get(0).toString(); - return name + ":" + argsStr; - } -} - -/*python - -from toscaparser.common.exception import ValidationIsshueCollector -from toscaparser.common.exception import UnknownInputError -from toscaparser.dataentity import DataEntity -from toscaparser.elements.constraints import Schema -from toscaparser.elements.datatype import DataType -from toscaparser.elements.entity_type import EntityType -from toscaparser.elements.relationshiptype import RelationshipType -from toscaparser.elements.statefulentitytype import StatefulEntityType -from toscaparser.utils.gettextutils import _ - - -GET_PROPERTY = 'get_property' -GET_ATTRIBUTE = 'get_attribute' -GET_INPUT = 'get_input' -GET_OPERATION_OUTPUT = 'get_operation_output' -CONCAT = 'concat' -TOKEN = 'token' - -SELF = 'SELF' -HOST = 'HOST' -TARGET = 'TARGET' -SOURCE = 'SOURCE' - -HOSTED_ON = 'tosca.relationships.HostedOn' - - -@six.add_metaclass(abc.ABCMeta) -class Function(object): - """An abstract type for representing a Tosca template function.""" - - def __init__(self, tosca_tpl, context, name, args): - self.tosca_tpl = tosca_tpl - self.context = context - self.name = name - self.args = args - self.validate() - - @abc.abstractmethod - def result(self): - """Invokes the function and returns its result - - Some methods invocation may only be relevant on runtime (for example, - getting runtime properties) and therefore its the responsibility of - the orchestrator/translator to take care of such functions invocation. - - :return: Function invocation result. - """ - return {self.name: self.args} - - @abc.abstractmethod - def validate(self): - """Validates function arguments.""" - pass -*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java deleted file mode 100644 index 5433aac..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java +++ /dev/null @@ -1,523 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.functions; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -import org.openecomp.sdc.toscaparser.api.*; -import org.openecomp.sdc.toscaparser.api.elements.AttributeDef; -import org.openecomp.sdc.toscaparser.api.elements.CapabilityTypeDef; -import org.openecomp.sdc.toscaparser.api.elements.DataType; -import org.openecomp.sdc.toscaparser.api.elements.EntityType; -import org.openecomp.sdc.toscaparser.api.elements.NodeType; -import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; -import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; -import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType; -import org.openecomp.sdc.toscaparser.api.elements.constraints.Schema; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class GetAttribute extends Function { - // Get an attribute value of an entity defined in the service template - - // Node template attributes values are set in runtime and therefore its the - // responsibility of the Tosca engine to implement the evaluation of - // get_attribute functions. - - // Arguments: - - // * Node template name | HOST. - // * Attribute name. - - // If the HOST keyword is passed as the node template name argument the - // function will search each node template along the HostedOn relationship - // chain until a node which contains the attribute is found. - - // Examples: - - // * { get_attribute: [ server, private_address ] } - // * { get_attribute: [ HOST, private_address ] } - // * { get_attribute: [ HOST, private_address, 0 ] } - // * { get_attribute: [ HOST, private_address, 0, some_prop] } - - public GetAttribute(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl, context, name, args); - } - - @Override - void validate() { - if (args.size() < 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", - "ValueError: Illegal arguments for function \"get_attribute\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); - return; - } else if (args.size() == 2) { - _findNodeTemplateContainingAttribute(); - } else { - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - if (nodeTpl == null) { - return; - } - int index = 2; - AttributeDef attr = nodeTpl.getTypeDefinition().getAttributeDefValue((String) args.get(1)); - if (attr != null) { - // found - } else { - index = 3; - // then check the req or caps - if (!(args.get(1) instanceof String) || !(args.get(2) instanceof String)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", "ValueError: Illegal arguments for function \"get_attribute\". Expected a String argument")); - } - - attr = _findReqOrCapAttribute(args.get(1).toString(), args.get(2).toString()); - if (attr == null) { - return; - } - } - - - String valueType = (String) attr.getSchema().get("type"); - if (args.size() > index) { - for (Object elem : args.subList(index, args.size())) { - if (valueType.equals("list")) { - if (!(elem instanceof Integer)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE147", String.format( - "ValueError: Illegal arguments for function \"get_attribute\" \"%s\". Expected positive integer argument", - elem.toString()))); - } - Object ob = attr.getSchema().get("entry_schema"); - valueType = (String) - ((LinkedHashMap) ob).get("type"); - } else if (valueType.equals("map")) { - Object ob = attr.getSchema().get("entry_schema"); - valueType = (String) - ((LinkedHashMap) ob).get("type"); - } else { - boolean bFound = false; - for (String p : Schema.PROPERTY_TYPES) { - if (p.equals(valueType)) { - bFound = true; - break; - } - } - if (bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE148", String.format( - "ValueError: 'Illegal arguments for function \"get_attribute\". Unexpected attribute/index value \"%s\"", - elem))); - return; - } else { // It is a complex type - DataType dataType = new DataType(valueType, null); - LinkedHashMap props = - dataType.getAllProperties(); - PropertyDef prop = props.get((String) elem); - if (prop != null) { - valueType = (String) prop.getSchema().get("type"); - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE149", String.format( - "KeyError: Illegal arguments for function \"get_attribute\". Attribute name \"%s\" not found in \"%\"", - elem, valueType))); - } - } - } - } - } - } - } - - @Override - public Object result() { - return this; - } - - private NodeTemplate getReferencedNodeTemplate() { - // Gets the NodeTemplate instance the get_attribute function refers to - - // If HOST keyword was used as the node template argument, the node - // template which contains the attribute along the HostedOn relationship - // chain will be returned. - - return _findNodeTemplateContainingAttribute(); - - } - - // Attributes can be explicitly created as part of the type definition - // or a property name can be implicitly used as an attribute name - private NodeTemplate _findNodeTemplateContainingAttribute() { - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - if (nodeTpl != null && - !_attributeExistsInType(nodeTpl.getTypeDefinition()) && - !nodeTpl.getProperties().keySet().contains(getAttributeName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE150", String.format( - "KeyError: Attribute \"%s\" was not found in node template \"%s\"", - getAttributeName(), nodeTpl.getName()))); - } - return nodeTpl; - } - - private boolean _attributeExistsInType(StatefulEntityType typeDefinition) { - LinkedHashMap attrsDef = typeDefinition.getAttributesDef(); - return attrsDef.get(getAttributeName()) != null; - } - - private NodeTemplate _findHostContainingAttribute(String nodeTemplateName) { - NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); - if (nodeTemplate != null) { - LinkedHashMap hostedOnRel = - (LinkedHashMap) EntityType.TOSCA_DEF.get(HOSTED_ON); - for (RequirementAssignment r : nodeTemplate.getRequirements().getAll()) { - String targetName = r.getNodeTemplateName(); - NodeTemplate targetNode = _findNodeTemplate(targetName); - NodeType targetType = (NodeType) targetNode.getTypeDefinition(); - for (CapabilityTypeDef capability : targetType.getCapabilitiesObjects()) { -// if(((ArrayList)hostedOnRel.get("valid_target_types")).contains(capability.getType())) { - if (capability.inheritsFrom((ArrayList) hostedOnRel.get("valid_target_types"))) { - if (_attributeExistsInType(targetType)) { - return targetNode; - } - return _findHostContainingAttribute(targetName); - } - } - } - } - return null; - } - - - private NodeTemplate _findNodeTemplate(String nodeTemplateName) { - if (nodeTemplateName.equals(HOST)) { - // Currently this is the only way to tell whether the function - // is used within the outputs section of the TOSCA template. - if (context instanceof ArrayList) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE151", - "ValueError: \"get_attribute: [ HOST, ... ]\" is not allowed in \"outputs\" section of the TOSCA template")); - return null; - } - NodeTemplate nodeTpl = _findHostContainingAttribute(SELF); - if (nodeTpl == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE152", String.format( - "ValueError: \"get_attribute: [ HOST, ... ]\" was used in " + - "node template \"%s\" but \"%s\" was not found in " + - "the relationship chain", ((NodeTemplate) context).getName(), HOSTED_ON))); - return null; - } - return nodeTpl; - } - if (nodeTemplateName.equals(TARGET)) { - if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE153", - "KeyError: \"TARGET\" keyword can only be used in context " + - " to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate) context).getTarget(); - } - if (nodeTemplateName.equals(SOURCE)) { - if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE154", - "KeyError: \"SOURCE\" keyword can only be used in context " + - " to \"Relationships\" source node")); - return null; - } - return ((RelationshipTemplate) context).getTarget(); - } - String name; - if (nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { - name = ((NodeTemplate) context).getName(); - } else { - name = nodeTemplateName; - } - for (NodeTemplate nt : toscaTpl.getNodeTemplates()) { - if (nt.getName().equals(name)) { - return nt; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE155", String.format( - "KeyError: Node template \"%s\" was not found", nodeTemplateName))); - return null; - } - - public AttributeDef _findReqOrCapAttribute(String reqOrCap, String attrName) { - - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - // Find attribute in node template's requirements - for (RequirementAssignment r : nodeTpl.getRequirements().getAll()) { - String nodeName = r.getNodeTemplateName(); - if (r.getName().equals(reqOrCap)) { - NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); - return _getCapabilityAttribute(nodeTemplate, r.getName(), attrName); - } - } - // If requirement was not found, look in node template's capabilities - return _getCapabilityAttribute(nodeTpl, reqOrCap, attrName); - } - - private AttributeDef _getCapabilityAttribute(NodeTemplate nodeTemplate, - String capabilityName, - String attrName) { - // Gets a node template capability attribute - CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); - - if (cap != null) { - AttributeDef attribute = null; - LinkedHashMap attrs = - cap.getDefinition().getAttributesDef(); - if (attrs != null && attrs.keySet().contains(attrName)) { - attribute = attrs.get(attrName); - } - if (attribute == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE156", String.format( - "KeyError: Attribute \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - attrName, capabilityName, nodeTemplate.getName(), ((NodeTemplate) context).getName()))); - } - return attribute; - } - String msg = String.format( - "Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", - capabilityName, ((NodeTemplate) context).getName(), nodeTemplate.getName()); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE157", "KeyError: " + msg)); - return null; - } - - String getNodeTemplateName() { - return (String) args.get(0); - } - - String getAttributeName() { - return (String) args.get(1); - } - -} - -/*python - -class GetAttribute(Function): -"""Get an attribute value of an entity defined in the service template - -Node template attributes values are set in runtime and therefore its the -responsibility of the Tosca engine to implement the evaluation of -get_attribute functions. - -Arguments: - -* Node template name | HOST. -* Attribute name. - -If the HOST keyword is passed as the node template name argument the -function will search each node template along the HostedOn relationship -chain until a node which contains the attribute is found. - -Examples: - -* { get_attribute: [ server, private_address ] } -* { get_attribute: [ HOST, private_address ] } -* { get_attribute: [ HOST, private_address, 0 ] } -* { get_attribute: [ HOST, private_address, 0, some_prop] } -""" - -def validate(self): - if len(self.args) < 2: - ValidationIssueCollector.appendException( - ValueError(_('Illegal arguments for function "{0}". Expected ' - 'arguments: "node-template-name", "req-or-cap"' - '(optional), "property name"' - ).format(GET_ATTRIBUTE))) - return - elif len(self.args) == 2: - self._find_node_template_containing_attribute() - else: - node_tpl = self._find_node_template(self.args[0]) - if node_tpl is None: - return - index = 2 - attrs = node_tpl.type_definition.get_attributes_def() - found = [attrs[self.args[1]]] if self.args[1] in attrs else [] - if found: - attr = found[0] - else: - index = 3 - # then check the req or caps - attr = self._find_req_or_cap_attribute(self.args[1], - self.args[2]) - - value_type = attr.schema['type'] - if len(self.args) > index: - for elem in self.args[index:]: - if value_type == "list": - if not isinstance(elem, int): - ValidationIssueCollector.appendException( - ValueError(_('Illegal arguments for function' - ' "{0}". "{1}" Expected positive' - ' integer argument' - ).format(GET_ATTRIBUTE, elem))) - value_type = attr.schema['entry_schema']['type'] - elif value_type == "map": - value_type = attr.schema['entry_schema']['type'] - elif value_type in Schema.PROPERTY_TYPES: - ValidationIssueCollector.appendException( - ValueError(_('Illegal arguments for function' - ' "{0}". Unexpected attribute/' - 'index value "{1}"' - ).format(GET_ATTRIBUTE, elem))) - return - else: # It is a complex type - data_type = DataType(value_type) - props = data_type.get_all_properties() - found = [props[elem]] if elem in props else [] - if found: - prop = found[0] - value_type = prop.schema['type'] - else: - ValidationIssueCollector.appendException( - KeyError(_('Illegal arguments for function' - ' "{0}". Attribute name "{1}" not' - ' found in "{2}"' - ).format(GET_ATTRIBUTE, - elem, - value_type))) - -def result(self): - return self - -def get_referenced_node_template(self): - """Gets the NodeTemplate instance the get_attribute function refers to. - - If HOST keyword was used as the node template argument, the node - template which contains the attribute along the HostedOn relationship - chain will be returned. - """ - return self._find_node_template_containing_attribute() - -# Attributes can be explicitly created as part of the type definition -# or a property name can be implicitly used as an attribute name -def _find_node_template_containing_attribute(self): - node_tpl = self._find_node_template(self.args[0]) - if node_tpl and \ - not self._attribute_exists_in_type(node_tpl.type_definition) \ - and self.attribute_name not in node_tpl.get_properties(): - ValidationIssueCollector.appendException( - KeyError(_('Attribute "%(att)s" was not found in node ' - 'template "%(ntpl)s".') % - {'att': self.attribute_name, - 'ntpl': node_tpl.name})) - return node_tpl - -def _attribute_exists_in_type(self, type_definition): - attrs_def = type_definition.get_attributes_def() - found = [attrs_def[self.attribute_name]] \ - if self.attribute_name in attrs_def else [] - return len(found) == 1 - -def _find_host_containing_attribute(self, node_template_name=SELF): - node_template = self._find_node_template(node_template_name) - if node_template: - hosted_on_rel = EntityType.TOSCA_DEF[HOSTED_ON] - for r in node_template.requirements: - for requirement, target_name in r.items(): - target_node = self._find_node_template(target_name) - target_type = target_node.type_definition - for capability in target_type.get_capabilities_objects(): - if capability.type in \ - hosted_on_rel['valid_target_types']: - if self._attribute_exists_in_type(target_type): - return target_node - return self._find_host_containing_attribute( - target_name) - -def _find_node_template(self, node_template_name): - if node_template_name == HOST: - # Currently this is the only way to tell whether the function - # is used within the outputs section of the TOSCA template. - if isinstance(self.context, list): - ValidationIssueCollector.appendException( - ValueError(_( - '"get_attribute: [ HOST, ... ]" is not allowed in ' - '"outputs" section of the TOSCA template.'))) - return - node_tpl = self._find_host_containing_attribute() - if not node_tpl: - ValidationIssueCollector.appendException( - ValueError(_( - '"get_attribute: [ HOST, ... ]" was used in node ' - 'template "{0}" but "{1}" was not found in ' - 'the relationship chain.').format(self.context.name, - HOSTED_ON))) - return - return node_tpl - if node_template_name == TARGET: - if not isinstance(self.context.type_definition, RelationshipType): - ValidationIssueCollector.appendException( - KeyError(_('"TARGET" keyword can only be used in context' - ' to "Relationships" target node'))) - return - return self.context.target - if node_template_name == SOURCE: - if not isinstance(self.context.type_definition, RelationshipType): - ValidationIssueCollector.appendException( - KeyError(_('"SOURCE" keyword can only be used in context' - ' to "Relationships" source node'))) - return - return self.context.source - name = self.context.name \ - if node_template_name == SELF and \ - not isinstance(self.context, list) \ - else node_template_name - for node_template in self.tosca_tpl.nodetemplates: - if node_template.name == name: - return node_template - ValidationIssueCollector.appendException( - KeyError(_( - 'Node template "{0}" was not found.' - ).format(node_template_name))) - -def _find_req_or_cap_attribute(self, req_or_cap, attr_name): - node_tpl = self._find_node_template(self.args[0]) - # Find attribute in node template's requirements - for r in node_tpl.requirements: - for req, node_name in r.items(): - if req == req_or_cap: - node_template = self._find_node_template(node_name) - return self._get_capability_attribute( - node_template, - req, - attr_name) - # If requirement was not found, look in node template's capabilities - return self._get_capability_attribute(node_tpl, - req_or_cap, - attr_name) - -def _get_capability_attribute(self, - node_template, - capability_name, - attr_name): - """Gets a node template capability attribute.""" - caps = node_template.get_capabilities() - if caps and capability_name in caps.keys(): - cap = caps[capability_name] - attribute = None - attrs = cap.definition.get_attributes_def() - if attrs and attr_name in attrs.keys(): - attribute = attrs[attr_name] - if not attribute: - ValidationIssueCollector.appendException( - KeyError(_('Attribute "%(attr)s" was not found in ' - 'capability "%(cap)s" of node template ' - '"%(ntpl1)s" referenced from node template ' - '"%(ntpl2)s".') % {'attr': attr_name, - 'cap': capability_name, - 'ntpl1': node_template.name, - 'ntpl2': self.context.name})) - return attribute - msg = _('Requirement/CapabilityAssignment "{0}" referenced from node template ' - '"{1}" was not found in node template "{2}".').format( - capability_name, - self.context.name, - node_template.name) - ValidationIssueCollector.appendException(KeyError(msg)) - -@property -def node_template_name(self): - return self.args[0] - -@property -def attribute_name(self): - return self.args[1] -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java deleted file mode 100644 index 0c96b0f..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java +++ /dev/null @@ -1,137 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.functions; - -import org.openecomp.sdc.toscaparser.api.DataEntity; -import org.openecomp.sdc.toscaparser.api.TopologyTemplate; -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.openecomp.sdc.toscaparser.api.parameters.Input; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -public class GetInput extends Function { - - public GetInput(TopologyTemplate toscaTpl,Object context,String name,ArrayList _args) { - super(toscaTpl,context,name,_args); - - } - - @Override - void validate() { -// if(args.size() != 1) { -// //PA - changed to WARNING from CRITICAL after talking to Renana, 22/05/2017 -// ThreadLocalsHolder.getCollector().appendWarning(String.format( -// "ValueError: Expected one argument for function \"get_input\" but received \"%s\"", -// args.toString())); -// } - if(args.size() > 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE009", String.format( - "ValueError: Expected max 2 arguments for function \"get_input\" but received \"%s\"", - args.size()))); - } - boolean bFound = false; - for(Input inp: toscaTpl.getInputs()) { - if(inp.getName().equals(args.get(0))) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE158", String.format( - "UnknownInputError: Unknown input \"%s\"",args.get(0)))); - } - } - - public Object result() { - if(toscaTpl.getParsedParams() != null && - toscaTpl.getParsedParams().get(getInputName()) != null) { - LinkedHashMap ttinp = (LinkedHashMap)toscaTpl.getTpl().get("inputs"); - LinkedHashMap ttinpinp = (LinkedHashMap)ttinp.get(getInputName()); - String type = (String)ttinpinp.get("type"); - - Object value = DataEntity.validateDatatype( - type, toscaTpl.getParsedParams().get(getInputName()),null,null,null); - //SDC resolving Get Input - if (value instanceof ArrayList){ - if(args.size() == 2 && args.get(1) instanceof Integer && ((ArrayList) value).size()> (Integer)args.get(1)){ - return ((ArrayList) value).get((Integer) args.get(1)); - } - else{ - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE273",String.format( - "GetInputError: cannot resolve input name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0)))); - return null; - } - } - return value; - } - - Input inputDef = null; - for(Input inpDef: toscaTpl.getInputs()) { - if(getInputName().equals(inpDef.getName())) { - inputDef = inpDef; - break; - } - } - if(inputDef != null) { - if (args.size() == 2 && inputDef.getDefault() != null && inputDef.getDefault() instanceof ArrayList){ - if ( args.get(1) instanceof Integer - && ((ArrayList) inputDef.getDefault()).size()> ((Integer)args.get(1)).intValue()) { - return ((ArrayList) inputDef.getDefault()).get(((Integer)args.get(1)).intValue()); - }else{ - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE274",(String.format( - "GetInputError: cannot resolve input Def name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0))))); - return null; - } - } - return inputDef.getDefault(); - } - return null; - } - - public String getInputName() { - return (String)args.get(0); - } - -} - -/*python - -class GetInput(Function): -"""Get a property value declared within the input of the service template. - -Arguments: - -* Input name. - -Example: - -* get_input: port -""" - -def validate(self): - if len(self.args) != 1: - ValidationIssueCollector.appendException( - ValueError(_( - 'Expected one argument for function "get_input" but ' - 'received "%s".') % self.args)) - inputs = [input.name for input in self.tosca_tpl.inputs] - if self.args[0] not in inputs: - ValidationIssueCollector.appendException( - UnknownInputError(input_name=self.args[0])) - -def result(self): - if self.tosca_tpl.parsed_params and \ - self.input_name in self.tosca_tpl.parsed_params: - return DataEntity.validate_datatype( - self.tosca_tpl.tpl['inputs'][self.input_name]['type'], - self.tosca_tpl.parsed_params[self.input_name]) - - input = [input_def for input_def in self.tosca_tpl.inputs - if self.input_name == input_def.name][0] - return input.default - -@property -def input_name(self): - return self.args[0] - -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java deleted file mode 100644 index 7af7eeb..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java +++ /dev/null @@ -1,226 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.functions; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; - -import org.openecomp.sdc.toscaparser.api.*; -import org.openecomp.sdc.toscaparser.api.elements.InterfacesDef; -import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; -import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class GetOperationOutput extends Function { - - public GetOperationOutput(TopologyTemplate ttpl,Object context,String name,ArrayList args) { - super(ttpl,context,name,args); - } - - @Override - public void validate() { - if(args.size() == 4) { - _findNodeTemplate((String)args.get(0)); - String interfaceName = _findInterfaceName((String)args.get(1)); - _findOperationName(interfaceName,(String)args.get(2)); - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE159", - "ValueError: Illegal arguments for function \"get_operation_output\". " + - "Expected arguments: \"template_name\",\"interface_name\"," + - "\"operation_name\",\"output_variable_name\"")); - } - } - - private String _findInterfaceName(String _interfaceName) { - boolean bFound = false; - for(String sect: InterfacesDef.SECTIONS) { - if(sect.equals(_interfaceName)) { - bFound = true; - break; - } - } - if(bFound) { - return _interfaceName; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE160", String.format( - "ValueError: invalid interface name \"%s\" in \"get_operation_output\"", - _interfaceName))); - return null; - } - } - - private String _findOperationName(String interfaceName,String operationName) { - - if(interfaceName.equals("Configure") || - interfaceName.equals("tosca.interfaces.node.relationship.Configure")) { - boolean bFound = false; - for(String sect: StatefulEntityType.interfacesRelationshipConfigureOperations) { - if(sect.equals(operationName)) { - bFound = true; - break; - } - } - if(bFound) { - return operationName; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE161", String.format( - "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", - operationName))); - return null; - } - } - if(interfaceName.equals("Standard") || - interfaceName.equals("tosca.interfaces.node.lifecycle.Standard")) { - boolean bFound = false; - for(String sect: StatefulEntityType.interfacesNodeLifecycleOperations) { - if(sect.equals(operationName)) { - bFound = true; - break; - } - } - if(bFound) { - return operationName; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE162", String.format( - "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", - operationName))); - return null; - } - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE163", String.format( - "ValueError: Invalid interface name \"%s\" in \"get_operation_output\"", - interfaceName))); - return null; - } - } - - private NodeTemplate _findNodeTemplate(String nodeTemplateName) { - if(nodeTemplateName.equals(TARGET)) { - if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE164", - "KeyError: \"TARGET\" keyword can only be used in context " + - " to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate)context).getTarget(); - } - if(nodeTemplateName.equals(SOURCE)) { - if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE165", - "KeyError: \"SOURCE\" keyword can only be used in context " + - " to \"Relationships\" source node")); - return null; - } - return ((RelationshipTemplate)context).getTarget(); - } - String name; - if(nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { - name = ((NodeTemplate)context).getName(); - } - else { - name = nodeTemplateName; - } - for(NodeTemplate nt: toscaTpl.getNodeTemplates()) { - if(nodeTemplateName.equals(name)) { - return nt; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE166", String.format( - "KeyError: Node template \"%s\" was not found",nodeTemplateName))); - return null; - } - - @Override - public Object result() { - return this; - } - -} - -/*python - -class GetOperationOutput(Function): -def validate(self): - if len(self.args) == 4: - self._find_node_template(self.args[0]) - interface_name = self._find_interface_name(self.args[1]) - self._find_operation_name(interface_name, self.args[2]) - else: - ValidationIssueCollector.appendException( - ValueError(_('Illegal arguments for function "{0}". Expected ' - 'arguments: "template_name","interface_name",' - '"operation_name","output_variable_name"' - ).format(GET_OPERATION_OUTPUT))) - return - -def _find_interface_name(self, interface_name): - if interface_name in toscaparser.elements.interfaces.SECTIONS: - return interface_name - else: - ValidationIssueCollector.appendException( - ValueError(_('Enter a valid interface name' - ).format(GET_OPERATION_OUTPUT))) - return - -def _find_operation_name(self, interface_name, operation_name): - if(interface_name == 'Configure' or - interface_name == 'tosca.interfaces.node.relationship.Configure'): - if(operation_name in - StatefulEntityType. - interfaces_relationship_configure_operations): - return operation_name - else: - ValidationIssueCollector.appendException( - ValueError(_('Enter an operation of Configure interface' - ).format(GET_OPERATION_OUTPUT))) - return - elif(interface_name == 'Standard' or - interface_name == 'tosca.interfaces.node.lifecycle.Standard'): - if(operation_name in - StatefulEntityType.interfaces_node_lifecycle_operations): - return operation_name - else: - ValidationIssueCollector.appendException( - ValueError(_('Enter an operation of Standard interface' - ).format(GET_OPERATION_OUTPUT))) - return - else: - ValidationIssueCollector.appendException( - ValueError(_('Enter a valid operation name' - ).format(GET_OPERATION_OUTPUT))) - return - -def _find_node_template(self, node_template_name): - if node_template_name == TARGET: - if not isinstance(self.context.type_definition, RelationshipType): - ValidationIssueCollector.appendException( - KeyError(_('"TARGET" keyword can only be used in context' - ' to "Relationships" target node'))) - return - return self.context.target - if node_template_name == SOURCE: - if not isinstance(self.context.type_definition, RelationshipType): - ValidationIssueCollector.appendException( - KeyError(_('"SOURCE" keyword can only be used in context' - ' to "Relationships" source node'))) - return - return self.context.source - name = self.context.name \ - if node_template_name == SELF and \ - not isinstance(self.context, list) \ - else node_template_name - for node_template in self.tosca_tpl.nodetemplates: - if node_template.name == name: - return node_template - ValidationIssueCollector.appendException( - KeyError(_( - 'Node template "{0}" was not found.' - ).format(node_template_name))) - -def result(self): - return self -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java deleted file mode 100644 index 1abee6e..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java +++ /dev/null @@ -1,627 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.functions; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -import org.openecomp.sdc.toscaparser.api.*; -import org.openecomp.sdc.toscaparser.api.elements.CapabilityTypeDef; -import org.openecomp.sdc.toscaparser.api.elements.EntityType; -import org.openecomp.sdc.toscaparser.api.elements.NodeType; -import org.openecomp.sdc.toscaparser.api.elements.PropertyDef; -import org.openecomp.sdc.toscaparser.api.elements.RelationshipType; -import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class GetProperty extends Function { - // Get a property value of an entity defined in the same service template - - // Arguments: - - // * Node template name | SELF | HOST | SOURCE | TARGET. - // * Requirement or capability name (optional). - // * Property name. - - // If requirement or capability name is specified, the behavior is as follows: - // The req or cap name is first looked up in the specified node template's - // requirements. - // If found, it would search for a matching capability - // of an other node template and get its property as specified in function - // arguments. - // Otherwise, the req or cap name would be looked up in the specified - // node template's capabilities and if found, it would return the property of - // the capability as specified in function arguments. - - // Examples: - - // * { get_property: [ mysql_server, port ] } - // * { get_property: [ SELF, db_port ] } - // * { get_property: [ SELF, database_endpoint, port ] } - // * { get_property: [ SELF, database_endpoint, port, 1 ] } - - - public GetProperty(TopologyTemplate ttpl,Object context,String name,ArrayList args) { - super(ttpl,context,name,args); - } - - @Override - void validate() { - if(args.size() < 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE167", - "ValueError: Illegal arguments for function \"get_property\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); - return; - } - if(args.size() == 2) { - Property foundProp = _findProperty((String)args.get(1)); - if(foundProp == null) { - return; - } - Object prop = foundProp.getValue(); - if(prop instanceof Function) { - Function.getFunction(toscaTpl,context, prop, toscaTpl.getResolveGetInput()); - } - } - else if(args.size() >= 3) { - // do not use _find_property to avoid raise KeyError - // if the prop is not found - // First check if there is property with this name - NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); - LinkedHashMap props; - if(nodeTpl != null) { - props = nodeTpl.getProperties(); - } - else { - props = new LinkedHashMap<>(); - } - int index = 2; - Object propertyValue; - if(props.get(args.get(1)) != null) { - propertyValue = ((Property)props.get(args.get(1))).getValue(); - } - else { - index = 3; - // then check the req or caps - propertyValue = _findReqOrCapProperty((String)args.get(1),(String)args.get(2)); - } - - if(args.size() > index) { - for(Object elem: args.subList(index,args.size()-1)) { - if(propertyValue instanceof ArrayList) { - int intElem = (int)elem; - propertyValue = _getIndexValue(propertyValue,intElem); - } - else { - propertyValue = _getAttributeValue(propertyValue,(String)elem); - } - } - } - } - } - - @SuppressWarnings("unchecked") - private Object _findReqOrCapProperty(String reqOrCap,String propertyName) { - NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); - if(nodeTpl == null) { - return null; - } - // look for property in node template's requirements - for(RequirementAssignment req: nodeTpl.getRequirements().getAll()) { - String nodeName = req.getNodeTemplateName(); - if(req.getName().equals(reqOrCap)) { - NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); - return _getCapabilityProperty(nodeTemplate,req.getName(),propertyName,true); - } - } - // If requirement was not found, look in node template's capabilities - return _getCapabilityProperty(nodeTpl,reqOrCap,propertyName,true); - } - - private Object _getCapabilityProperty(NodeTemplate nodeTemplate, - String capabilityName, - String propertyName, - boolean throwErrors) { - - // Gets a node template capability property - Object property = null; - CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); - if(cap != null) { - LinkedHashMap props = cap.getProperties(); - if(props != null && props.get(propertyName) != null) { - property = ((Property)props.get(propertyName)).getValue(); - } - if(property == null && throwErrors) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE168", String.format( - "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - propertyName,capabilityName,nodeTemplate.getName(),((NodeTemplate)context).getName()))); - } - return property; - } - if(throwErrors) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE169", String.format( - "KeyError: Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", - capabilityName,((NodeTemplate)context).getName(),nodeTemplate.getName()))); - } - - return null; - } - - private Property _findProperty(String propertyName) { - NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); - if(nodeTpl == null) { - return null; - } - LinkedHashMap props = nodeTpl.getProperties(); - Property found = props.get(propertyName); - if(found == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE170", String.format( - "KeyError: Property \"%s\" was not found in node template \"%s\"", - propertyName,nodeTpl.getName()))); - } - return found; - } - - private NodeTemplate _findNodeTemplate(String nodeTemplateName) { - if(nodeTemplateName.equals(SELF)) { - return (NodeTemplate)context; - } - // enable the HOST value in the function - if(nodeTemplateName.equals(HOST)) { - NodeTemplate node = _findHostContainingProperty(null); - if(node == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE171", String.format( - "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - (String)args.get(2),(String)args.get(1),((NodeTemplate)context).getName()))); - return null; - } - return node; - } - if(nodeTemplateName.equals(TARGET)) { - if(!(((RelationshipTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE172", - "KeyError: \"TARGET\" keyword can only be used in context to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate)context).getTarget(); - } - if(nodeTemplateName.equals(SOURCE)) { - if(!(((RelationshipTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE173", - "KeyError: \"SOURCE\" keyword can only be used in context to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate)context).getSource(); - } - if(toscaTpl.getNodeTemplates() == null) { - return null; - } - for(NodeTemplate nodeTemplate: toscaTpl.getNodeTemplates()) { - if(nodeTemplate.getName().equals(nodeTemplateName)) { - return nodeTemplate; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE174", String.format( - "KeyError: Node template \"%s\" was not found. Referenced from Node Template \"%s\"", - nodeTemplateName,((NodeTemplate)context).getName()))); - - return null; - } - - @SuppressWarnings("rawtypes") - private Object _getIndexValue(Object value,int index) { - if(value instanceof ArrayList) { - if(index < ((ArrayList)value).size()) { - return ((ArrayList)value).get(index); - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE175", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an element with index %d", - args.get(2),args.get(1),((NodeTemplate)context).getName(),index))); - - } - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE176", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a list", - args.get(2),args.get(1),((NodeTemplate)context).getName()))); - } - return null; - } - - @SuppressWarnings("unchecked") - private Object _getAttributeValue(Object value,String attribute) { - if(value instanceof LinkedHashMap) { - Object ov = ((LinkedHashMap)value).get(attribute); - if(ov != null) { - return ov; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE177", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an attribute named \"%s\"", - args.get(2),args.get(1),((NodeTemplate)context).getName(),attribute))); - } - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE178", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a dict", - args.get(2),args.get(1),((NodeTemplate)context).getName()))); - } - return null; - } - - // Add this functions similar to get_attribute case - private NodeTemplate _findHostContainingProperty(String nodeTemplateName) { - if(nodeTemplateName == null) { - nodeTemplateName = SELF; - } - NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); - LinkedHashMap hostedOnRel = (LinkedHashMap) - EntityType.TOSCA_DEF.get(HOSTED_ON); - for(RequirementAssignment requirement: nodeTemplate.getRequirements().getAll()) { - String targetName = requirement.getNodeTemplateName(); - NodeTemplate targetNode = _findNodeTemplate(targetName); - NodeType targetType = (NodeType)targetNode.getTypeDefinition(); - for(CapabilityTypeDef capDef: targetType.getCapabilitiesObjects()) { - if(capDef.inheritsFrom((ArrayList)hostedOnRel.get("valid_target_types"))) { - if(_propertyExistsInType(targetType)) { - return targetNode; - } - // If requirement was not found, look in node - // template's capabilities - if(args.size() > 2 && - _getCapabilityProperty(targetNode,(String)args.get(1),(String)args.get(2),false) != null) { - return targetNode; - } - - return _findHostContainingProperty(targetName); - } - } - - } - return null; - } - - private boolean _propertyExistsInType(StatefulEntityType typeDefinition) { - LinkedHashMap propsDef = typeDefinition.getPropertiesDef(); - return propsDef.keySet().contains((String)args.get(1)); - } - - @Override - public Object result() { - Object propertyValue; - if(args.size() >= 3) { - // First check if there is property with this name - NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); - LinkedHashMap props; - if(nodeTpl != null) { - props = nodeTpl.getProperties(); - } - else { - props = new LinkedHashMap<>(); - } - int index = 2; - if(props.get(args.get(1)) != null) { - propertyValue = ((Property)props.get(args.get(1))).getValue(); - } - else { - index = 3; - // then check the req or caps - propertyValue = _findReqOrCapProperty((String)args.get(1),(String)args.get(2)); - } - - if(args.size() > index) { - for(Object elem: args.subList(index,args.size()-1)) { - if(propertyValue instanceof ArrayList) { - int intElem = (int)elem; - propertyValue = _getIndexValue(propertyValue,intElem); - } - else { - propertyValue = _getAttributeValue(propertyValue,(String)elem); - } - } - } - } - else { - propertyValue = _findProperty((String)args.get(1)).getValue(); - } - if(propertyValue instanceof Function) { - return ((Function)propertyValue).result(); - } - return Function.getFunction(toscaTpl,context,propertyValue, toscaTpl.getResolveGetInput()); - } - - public String getNodeTemplateName() { - return (String)args.get(0); - } - - public String getPropertyName() { - if(args.size() > 2) { - return (String)args.get(2); - } - return (String)args.get(1); - } - - public String getReqorCap() { - if(args.size() > 2) { - return (String)args.get(1); - } - return null; - } - -} - -/*python - -class GetProperty(Function): -"""Get a property value of an entity defined in the same service template. - -Arguments: - -* Node template name | SELF | HOST | SOURCE | TARGET. -* Requirement or capability name (optional). -* Property name. - -If requirement or capability name is specified, the behavior is as follows: -The req or cap name is first looked up in the specified node template's -requirements. -If found, it would search for a matching capability -of an other node template and get its property as specified in function -arguments. -Otherwise, the req or cap name would be looked up in the specified -node template's capabilities and if found, it would return the property of -the capability as specified in function arguments. - -Examples: - -* { get_property: [ mysql_server, port ] } -* { get_property: [ SELF, db_port ] } -* { get_property: [ SELF, database_endpoint, port ] } -* { get_property: [ SELF, database_endpoint, port, 1 ] } -""" - -def validate(self): - if len(self.args) < 2: - ValidationIssueCollector.appendException( - ValueError(_( - 'Expected arguments: "node-template-name", "req-or-cap" ' - '(optional), "property name".'))) - return - if len(self.args) == 2: - found_prop = self._find_property(self.args[1]) - if not found_prop: - return - prop = found_prop.value - if not isinstance(prop, Function): - get_function(self.tosca_tpl, self.context, prop) - elif len(self.args) >= 3: - # do not use _find_property to avoid raise KeyError - # if the prop is not found - # First check if there is property with this name - node_tpl = self._find_node_template(self.args[0]) - props = node_tpl.get_properties() if node_tpl else [] - index = 2 - found = [props[self.args[1]]] if self.args[1] in props else [] - if found: - property_value = found[0].value - else: - index = 3 - # then check the req or caps - property_value = self._find_req_or_cap_property(self.args[1], - self.args[2]) - if len(self.args) > index: - for elem in self.args[index:]: - if isinstance(property_value, list): - int_elem = int(elem) - property_value = self._get_index_value(property_value, - int_elem) - else: - property_value = self._get_attribute_value( - property_value, - elem) - -def _find_req_or_cap_property(self, req_or_cap, property_name): - node_tpl = self._find_node_template(self.args[0]) - # Find property in node template's requirements - for r in node_tpl.requirements: - for req, node_name in r.items(): - if req == req_or_cap: - node_template = self._find_node_template(node_name) - return self._get_capability_property( - node_template, - req, - property_name) - # If requirement was not found, look in node template's capabilities - return self._get_capability_property(node_tpl, - req_or_cap, - property_name) - -def _get_capability_property(self, - node_template, - capability_name, - property_name): - """Gets a node template capability property.""" - caps = node_template.get_capabilities() - if caps and capability_name in caps.keys(): - cap = caps[capability_name] - property = None - props = cap.get_properties() - if props and property_name in props.keys(): - property = props[property_name].value - if not property: - ValidationIssueCollector.appendException( - KeyError(_('Property "%(prop)s" was not found in ' - 'capability "%(cap)s" of node template ' - '"%(ntpl1)s" referenced from node template ' - '"%(ntpl2)s".') % {'prop': property_name, - 'cap': capability_name, - 'ntpl1': node_template.name, - 'ntpl2': self.context.name})) - return property - msg = _('Requirement/CapabilityAssignment "{0}" referenced from node template ' - '"{1}" was not found in node template "{2}".').format( - capability_name, - self.context.name, - node_template.name) - ValidationIssueCollector.appendException(KeyError(msg)) - -def _find_property(self, property_name): - node_tpl = self._find_node_template(self.args[0]) - if not node_tpl: - return - props = node_tpl.get_properties() - found = [props[property_name]] if property_name in props else [] - if len(found) == 0: - ValidationIssueCollector.appendException( - KeyError(_('Property "%(prop)s" was not found in node ' - 'template "%(ntpl)s".') % - {'prop': property_name, - 'ntpl': node_tpl.name})) - return None - return found[0] - -def _find_node_template(self, node_template_name): - if node_template_name == SELF: - return self.context - # enable the HOST value in the function - if node_template_name == HOST: - return self._find_host_containing_property() - if node_template_name == TARGET: - if not isinstance(self.context.type_definition, RelationshipType): - ValidationIssueCollector.appendException( - KeyError(_('"TARGET" keyword can only be used in context' - ' to "Relationships" target node'))) - return - return self.context.target - if node_template_name == SOURCE: - if not isinstance(self.context.type_definition, RelationshipType): - ValidationIssueCollector.appendException( - KeyError(_('"SOURCE" keyword can only be used in context' - ' to "Relationships" source node'))) - return - return self.context.source - if not hasattr(self.tosca_tpl, 'nodetemplates'): - return - for node_template in self.tosca_tpl.nodetemplates: - if node_template.name == node_template_name: - return node_template - ValidationIssueCollector.appendException( - KeyError(_( - 'Node template "{0}" was not found.' - ).format(node_template_name))) - -def _get_index_value(self, value, index): - if isinstance(value, list): - if index < len(value): - return value[index] - else: - ValidationIssueCollector.appendException( - KeyError(_( - "Property '{0}' found in capability '{1}'" - " referenced from node template {2}" - " must have an element with index {3}."). - format(self.args[2], - self.args[1], - self.context.name, - index))) - else: - ValidationIssueCollector.appendException( - KeyError(_( - "Property '{0}' found in capability '{1}'" - " referenced from node template {2}" - " must be a list.").format(self.args[2], - self.args[1], - self.context.name))) - -def _get_attribute_value(self, value, attibute): - if isinstance(value, dict): - if attibute in value: - return value[attibute] - else: - ValidationIssueCollector.appendException( - KeyError(_( - "Property '{0}' found in capability '{1}'" - " referenced from node template {2}" - " must have an attribute named {3}."). - format(self.args[2], - self.args[1], - self.context.name, - attibute))) - else: - ValidationIssueCollector.appendException( - KeyError(_( - "Property '{0}' found in capability '{1}'" - " referenced from node template {2}" - " must be a dict.").format(self.args[2], - self.args[1], - self.context.name))) - -# Add this functions similar to get_attribute case -def _find_host_containing_property(self, node_template_name=SELF): - node_template = self._find_node_template(node_template_name) - hosted_on_rel = EntityType.TOSCA_DEF[HOSTED_ON] - for r in node_template.requirements: - for requirement, target_name in r.items(): - target_node = self._find_node_template(target_name) - target_type = target_node.type_definition - for capability in target_type.get_capabilities_objects(): - if capability.type in hosted_on_rel['valid_target_types']: - if self._property_exists_in_type(target_type): - return target_node - return self._find_host_containing_property( - target_name) - return None - -def _property_exists_in_type(self, type_definition): - props_def = type_definition.get_properties_def() - found = [props_def[self.args[1]]] \ - if self.args[1] in props_def else [] - return len(found) == 1 - -def result(self): - if len(self.args) >= 3: - # First check if there is property with this name - node_tpl = self._find_node_template(self.args[0]) - props = node_tpl.get_properties() if node_tpl else [] - index = 2 - found = [props[self.args[1]]] if self.args[1] in props else [] - if found: - property_value = found[0].value - else: - index = 3 - # then check the req or caps - property_value = self._find_req_or_cap_property(self.args[1], - self.args[2]) - if len(self.args) > index: - for elem in self.args[index:]: - if isinstance(property_value, list): - int_elem = int(elem) - property_value = self._get_index_value(property_value, - int_elem) - else: - property_value = self._get_attribute_value( - property_value, - elem) - else: - property_value = self._find_property(self.args[1]).value - if isinstance(property_value, Function): - return property_value.result() - return get_function(self.tosca_tpl, - self.context, - property_value) - -@property -def node_template_name(self): - return self.args[0] - -@property -def property_name(self): - if len(self.args) > 2: - return self.args[2] - return self.args[1] - -@property -def req_or_cap(self): - if len(self.args) > 2: - return self.args[1] - return None -*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java b/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java deleted file mode 100644 index 8f35a80..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java +++ /dev/null @@ -1,111 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.functions; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; - -import org.openecomp.sdc.toscaparser.api.TopologyTemplate; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class Token extends Function { - // Validate the function and provide an instance of the function - - //The token function is used within a TOSCA service template on a string to - //parse out (tokenize) substrings separated by one or more token characters - //within a larger string. - - //Arguments: - - //* The composite string that contains one or more substrings separated by - // token characters. - //* The string that contains one or more token characters that separate - // substrings within the composite string. - //* The integer indicates the index of the substring to return from the - // composite string. Note that the first substring is denoted by using - // the '0' (zero) integer value. - - //Example: - - // [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ] - - - public Token(TopologyTemplate ttpl,Object context,String name,ArrayList args) { - super(ttpl,context,name,args); - } - - @Override - public Object result() { - return this; - } - - @Override - void validate() { - if(args.size() < 3) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE180", - "ValueError: Invalid arguments for function \"token\". " + - "Expected at least three arguments")); - } - else { - if(!(args.get(1) instanceof String) || - ((String)args.get(1)).length() != 1) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE181", - "ValueError: Invalid arguments for function \"token\". " + - "Expected single char value as second argument")); - } - if(!(args.get(2) instanceof Integer)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE182", - "ValueError: Invalid arguments for function \"token\"" + - "Expected integer value as third argument")); - } - } - } - -} - -/*python - -class Token(Function): -"""Validate the function and provide an instance of the function - -The token function is used within a TOSCA service template on a string to -parse out (tokenize) substrings separated by one or more token characters -within a larger string. - - -Arguments: - -* The composite string that contains one or more substrings separated by - token characters. -* The string that contains one or more token characters that separate - substrings within the composite string. -* The integer indicates the index of the substring to return from the - composite string. Note that the first substring is denoted by using - the '0' (zero) integer value. - -Example: - - [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ] - -""" - -def validate(self): - if len(self.args) < 3: - ValidationIssueCollector.appendException( - ValueError(_('Invalid arguments for function "{0}". Expected ' - 'at least three arguments.').format(TOKEN))) - else: - if not isinstance(self.args[1], str) or len(self.args[1]) != 1: - ValidationIssueCollector.appendException( - ValueError(_('Invalid arguments for function "{0}". ' - 'Expected single char value as second ' - 'argument.').format(TOKEN))) - - if not isinstance(self.args[2], int): - ValidationIssueCollector.appendException( - ValueError(_('Invalid arguments for function "{0}". ' - 'Expected integer value as third ' - 'argument.').format(TOKEN))) - -def result(self): - return self -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java b/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java deleted file mode 100644 index 15ca6da..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java +++ /dev/null @@ -1,233 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.parameters; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.LinkedHashMap; - -import org.openecomp.sdc.toscaparser.api.DataEntity; -import org.openecomp.sdc.toscaparser.api.elements.EntityType; -import org.openecomp.sdc.toscaparser.api.elements.constraints.Constraint; -import org.openecomp.sdc.toscaparser.api.elements.constraints.Schema; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class Input { - - private static final String TYPE = "type"; - private static final String DESCRIPTION = "description"; - private static final String DEFAULT = "default"; - private static final String CONSTRAINTS = "constraints"; - private static final String REQUIRED = "required"; - private static final String STATUS = "status"; - private static final String ENTRY_SCHEMA = "entry_schema"; - - public static final String INTEGER = "integer"; - public static final String STRING = "string"; - public static final String BOOLEAN = "boolean"; - public static final String FLOAT = "float"; - public static final String LIST = "list"; - public static final String MAP = "map"; - public static final String JSON = "json"; - - private static String INPUTFIELD[] = { - TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED,STATUS, ENTRY_SCHEMA - }; - - private static String PRIMITIVE_TYPES[] = { - INTEGER, STRING, BOOLEAN, FLOAT, LIST, MAP, JSON - }; - - private String name; - private Schema schema; - private LinkedHashMap customDefs; - - public Input(){ - /** - * Added to support Input serialization - */ - } - - public Input(String _name,LinkedHashMap _schemaDict,LinkedHashMap _customDefs) { - name = _name; - schema = new Schema(_name,_schemaDict); - customDefs = _customDefs; - } - - public String getName() { - return name; - } - - public String getType() { - return schema.getType(); - } - - public String getDescription() { - return schema.getDescription(); - } - - public boolean isRequired() { - return schema.isRequired(); - } - - public Object getDefault() { - return schema.getDefault(); - } - - public ArrayList getConstraints() { - return schema.getConstraints(); - } - - public void validate(Object value) { - _validateField(); - _validateType(getType()); - if(value != null) { - _validateValue(value); - } - } - - private void _validateField() { - for(String key: schema.getSchema().keySet()) { - boolean bFound = false; - for(String ifld: INPUTFIELD) { - if(key.equals(ifld)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE214", String.format( - "UnknownFieldError: Input \"%s\" contains unknown field \"%s\"", - name,key))); - } - } - } - - private void _validateType(String inputType) { - boolean bFound = false; - for(String pt: Schema.PROPERTY_TYPES) { - if(pt.equals(inputType)) { - bFound = true; - break; - } - } - - if(!bFound) { - if(customDefs.get(inputType) != null) { - bFound = true; - } - } - - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE215", String.format( - "ValueError: Invalid type \"%s\"",inputType))); - } - } - - private void _validateValue(Object value) { - Object datatype = null; - if(EntityType.TOSCA_DEF.get(getType()) != null) { - datatype = EntityType.TOSCA_DEF.get(getType()); - } - else if(EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()) != null) { - datatype = EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()); - } - - String type = getType(); - // if it's one of the basic types DON'T look in customDefs - if(Arrays.asList(PRIMITIVE_TYPES).contains(type)) { - DataEntity.validateDatatype(getType(), value, null, (LinkedHashMap)datatype, null); - return; - } - else if(customDefs.get(getType()) != null) { - datatype = customDefs.get(getType()); - DataEntity.validateDatatype(getType(), value, (LinkedHashMap)datatype, customDefs, null); - return; - } - - DataEntity.validateDatatype(getType(), value, null, (LinkedHashMap)datatype, null); - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import MissingRequiredFieldError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.dataentity import DataEntity -from toscaparser.elements.constraints import Schema -from toscaparser.elements.entity_type import EntityType -from toscaparser.utils.gettextutils import _ - - -log = logging.getLogger('tosca') - - -class Input(object): - - INPUTFIELD = (TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED, STATUS, - ENTRY_SCHEMA) = ('type', 'description', 'default', - 'constraints', 'required', 'status', - 'entry_schema') - - def __init__(self, name, schema_dict): - self.name = name - self.schema = Schema(name, schema_dict) - - self._validate_field() - self.validate_type(self.type) - - @property - def type(self): - return self.schema.type - - @property - def required(self): - return self.schema.required - - @property - def description(self): - return self.schema.description - - @property - def default(self): - return self.schema.default - - @property - def constraints(self): - return self.schema.constraints - - @property - def status(self): - return self.schema.status - - def validate(self, value=None): - if value is not None: - self._validate_value(value) - - def _validate_field(self): - for name in self.schema.schema: - if name not in self.INPUTFIELD: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Input "%s"' % self.name, - field=name)) - - def validate_type(self, input_type): - if input_type not in Schema.PROPERTY_TYPES: - ValidationIssueCollector.appendException( - ValueError(_('Invalid type "%s".') % type)) - - # tODO(anyone) Need to test for any built-in datatype not just network - # that is, tosca.datatypes.* and not assume tosca.datatypes.network.* - # tODO(anyone) Add support for tosca.datatypes.Credential - def _validate_value(self, value): - tosca = EntityType.TOSCA_DEF - datatype = None - if self.type in tosca: - datatype = tosca[self.type] - elif EntityType.DATATYPE_NETWORK_PREFIX + self.type in tosca: - datatype = tosca[EntityType.DATATYPE_NETWORK_PREFIX + self.type] - - DataEntity.validate_datatype(self.type, value, None, datatype) - -*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java b/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java deleted file mode 100644 index 381388b..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java +++ /dev/null @@ -1,110 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.parameters; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.LinkedHashMap; - -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class Output { - - private static final String DESCRIPTION = "description"; - public static final String VALUE = "value"; - private static final String OUTPUTFIELD[] = {DESCRIPTION, VALUE}; - - private String name; - private LinkedHashMap attrs;//TYPE??? - - public Output(String oname,LinkedHashMap oattrs) { - name = oname; - attrs = oattrs; - } - - public String getDescription() { - return (String)attrs.get(DESCRIPTION); - } - - public Object getValue() { - return attrs.get(VALUE); - } - - public void validate() { - _validateField(); - } - - private void _validateField() { - if(!(attrs instanceof LinkedHashMap)) { - //TODO wrong error message... - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE216", String.format( - "ValidationError: Output \"%s\" has wrong type. Expecting a dict", - name))); - } - - if(getValue() == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE217", String.format( - "MissingRequiredFieldError: Output \"%s\" is missing required \"%s\"", - name,VALUE))); - } - for(String key: attrs.keySet()) { - boolean bFound = false; - for(String of: OUTPUTFIELD) { - if(key.equals(of)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE218", String.format( - "UnknownFieldError: Output \"%s\" contains unknown field \"%s\"", - name,key))); - } - } - } - - // getter/setter - - public String getName() { - return name; - } - - public void setAttr(String name,Object value) { - attrs.put(name, value); - } -} - -/*python - -class Output(object): - - OUTPUTFIELD = (DESCRIPTION, VALUE) = ('description', 'value') - - def __init__(self, name, attrs): - self.name = name - self.attrs = attrs - - @property - def description(self): - return self.attrs.get(self.DESCRIPTION) - - @property - def value(self): - return self.attrs.get(self.VALUE) - - def validate(self): - self._validate_field() - - def _validate_field(self): - if not isinstance(self.attrs, dict): - ValidationIssueCollector.appendException( - MissingRequiredFieldError(what='Output "%s"' % self.name, - required=self.VALUE)) - if self.value is None: - ValidationIssueCollector.appendException( - MissingRequiredFieldError(what='Output "%s"' % self.name, - required=self.VALUE)) - for name in self.attrs: - if name not in self.OUTPUTFIELD: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Output "%s"' % self.name, - field=name)) -*/ diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java b/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java deleted file mode 100644 index b64bd9a..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java +++ /dev/null @@ -1,785 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.prereq; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.io.BufferedOutputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.RandomAccessFile; -import java.net.URL; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; -import java.util.*; -import java.util.zip.ZipEntry; -import java.util.zip.ZipFile; -import java.util.zip.ZipInputStream; - -import org.openecomp.sdc.toscaparser.api.ImportsLoader; -import org.openecomp.sdc.toscaparser.api.common.JToscaException; -import org.openecomp.sdc.toscaparser.api.utils.JToscaErrorCodes; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.openecomp.sdc.toscaparser.api.utils.UrlUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.yaml.snakeyaml.Yaml; - -public class CSAR { - - private static Logger log = LoggerFactory.getLogger(CSAR.class.getName()); - private static final ArrayList META_PROPERTIES_FILES = new ArrayList<>(Arrays.asList("TOSCA-Metadata/TOSCA.meta", "csar.meta")); - - private String path; - private boolean isFile; - private boolean isValidated; - private boolean errorCaught; - private String csar; - private String tempDir; -// private Metadata metaData; - private File tempFile; - private LinkedHashMap> metaProperties; - - public CSAR(String csarPath, boolean aFile) { - path = csarPath; - isFile = aFile; - isValidated = false; - errorCaught = false; - csar = null; - tempDir = null; - tempFile = null; - metaProperties = new LinkedHashMap<>(); - } - - public boolean validate() throws JToscaException { - isValidated = true; - - //validate that the file or URL exists - - if(isFile) { - File f = new File(path); - if (!f.isFile()) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE220", String.format("\"%s\" is not a file", path))); - return false; - } - else { - this.csar = path; - } - } - else { - if(!UrlUtils.validateUrl(path)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE221", String.format("ImportError: \"%s\" does not exist",path))); - return false; - } - // get it to a local file - try { - File tempFile = File.createTempFile("csartmp",".csar"); - Path ptf = Paths.get(tempFile.getPath()); - URL webfile = new URL(path); - InputStream in = webfile.openStream(); - Files.copy(in,ptf,StandardCopyOption.REPLACE_EXISTING); - } - catch(Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE222", "ImportError: failed to load CSAR from " + path)); - return false; - } - - log.debug("CSAR - validate - currently only files are supported"); - return false; - } - - _parseAndValidateMetaProperties(); - - if(errorCaught) { - return false; - } - - // validate that external references in the main template actually exist and are accessible - _validateExternalReferences(); - - return !errorCaught; - - } - - private void _parseAndValidateMetaProperties() throws JToscaException { - - ZipFile zf = null; - - try { - - // validate that it is a valid zip file - RandomAccessFile raf = new RandomAccessFile(csar, "r"); - long n = raf.readInt(); - raf.close(); - // check if Zip's magic number - if (n != 0x504B0304) { - String errorString = String.format("\"%s\" is not a valid zip file", csar); - log.error(errorString); - throw new JToscaException(errorString , JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue()); - } - - // validate that it contains the metadata file in the correct location - zf = new ZipFile(csar); - ZipEntry ze = zf.getEntry("TOSCA-Metadata/TOSCA.meta"); - if (ze == null) { - - String errorString = String.format( - "\"%s\" is not a valid CSAR as it does not contain the " + - "required file \"TOSCA.meta\" in the folder \"TOSCA-Metadata\"", csar); - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.MISSING_META_FILE.getValue()); - } - - //Going over expected metadata files and parsing them - for (String metaFile: META_PROPERTIES_FILES) { - - byte ba[] = new byte[4096]; - ze = zf.getEntry(metaFile); - if (ze != null) { - InputStream inputStream = zf.getInputStream(ze); - n = inputStream.read(ba, 0, 4096); - String md = new String(ba); - md = md.substring(0, (int) n); - - String errorString = String.format( - "The file \"%s\" in the" + - " CSAR \"%s\" does not contain valid YAML content", ze.getName(), csar); - - try { - Yaml yaml = new Yaml(); - Object mdo = yaml.load(md); - if (!(mdo instanceof LinkedHashMap)) { - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); - } - - String[] split = ze.getName().split("/"); - String fileName = split[split.length - 1]; - - if (!metaProperties.containsKey(fileName)) { - metaProperties.put(fileName, (LinkedHashMap) mdo); - } - } - catch(Exception e) { - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); - } - } - } - - // verify it has "Entry-Definition" - String edf = _getMetadata("Entry-Definitions"); - if (edf == null) { - String errorString = String.format( - "The CSAR \"%s\" is missing the required metadata " + - "\"Entry-Definitions\" in \"TOSCA-Metadata/TOSCA.meta\"", csar); - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.ENTRY_DEFINITION_NOT_DEFINED.getValue()); - } - - //validate that "Entry-Definitions' metadata value points to an existing file in the CSAR - boolean foundEDF = false; - Enumeration entries = zf.entries(); - while (entries.hasMoreElements()) { - ze = entries.nextElement(); - if (ze.getName().equals(edf)) { - foundEDF = true; - break; - } - } - if (!foundEDF) { - String errorString = String.format( - "The \"Entry-Definitions\" file defined in the CSAR \"%s\" does not exist", csar); - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.MISSING_ENTRY_DEFINITION_FILE.getValue()); - } - } catch (JToscaException e) { - //ThreadLocalsHolder.getCollector().appendCriticalException(e.getMessage()); - throw e; - } catch (Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE223", "ValidationError: " + e.getMessage())); - errorCaught = true; - } - - try { - if (zf != null) { - zf.close(); - } - } catch (IOException e) { - } - } - - public void cleanup() { - try { - if(tempFile != null) { - tempFile.delete(); - } - } - catch(Exception e) { - } - } - - private String _getMetadata(String key) throws JToscaException { - if(!isValidated) { - validate(); - } - Object value = _getMetaProperty("TOSCA.meta").get(key); - return value != null ? value.toString() : null; - } - - public String getAuthor() throws JToscaException { - return _getMetadata("Created-By"); - } - - public String getVersion() throws JToscaException { - return _getMetadata("CSAR-Version"); - } - - public LinkedHashMap> getMetaProperties() { - return metaProperties; - } - - private LinkedHashMap _getMetaProperty(String propertiesFile) { - return metaProperties.get(propertiesFile); - } - - public String getMainTemplate() throws JToscaException { - String entryDef = _getMetadata("Entry-Definitions"); - ZipFile zf; - boolean ok = false; - try { - zf = new ZipFile(path); - ok = (zf.getEntry(entryDef) != null); - zf.close(); - } - catch(IOException e) { - if(!ok) { - log.error("CSAR - getMainTemplate - failed to open {}", path); - } - } - if(ok) { - return entryDef; - } - else { - return null; - } - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getMainTemplateYaml() throws JToscaException { - String mainTemplate = tempDir + File.separator + getMainTemplate(); - if(mainTemplate != null) { - try (InputStream input = new FileInputStream(new File(mainTemplate));){ - Yaml yaml = new Yaml(); - Object data = yaml.load(input); - if(!(data instanceof LinkedHashMap)) { - throw new IOException(); - } - return (LinkedHashMap)data; - } - catch(Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE224", String.format( - "The file \"%s\" in the CSAR \"%s\" does not " + - "contain valid TOSCA YAML content", - mainTemplate,csar))); - } - } - return null; - } - - public String getDescription() throws JToscaException { - String desc = _getMetadata("Description"); - if(desc != null) { - return desc; - } - - Map metaData = metaProperties.get("TOSCA.meta"); - metaData.put("Description", getMainTemplateYaml().get("description")); - return _getMetadata("Description"); - } - - public String getTempDir() { - return tempDir; - } - - public void decompress() throws IOException, JToscaException { - if(!isValidated) { - validate(); - } - - if(tempDir == null || tempDir.isEmpty()) { - tempDir = Files.createTempDirectory("JTP").toString(); - unzip(path,tempDir); - } - } - - private void _validateExternalReferences() throws JToscaException { - // Extracts files referenced in the main template - // These references are currently supported: - // * imports - // * interface implementations - // * artifacts - try { - decompress(); - String mainTplFile = getMainTemplate(); - if(mainTplFile == null) { - return; - } - - LinkedHashMap mainTpl = getMainTemplateYaml(); - if(mainTpl.get("imports") != null) { - // this loads the imports - ImportsLoader il = new ImportsLoader((ArrayList)mainTpl.get("imports"), - tempDir + File.separator + mainTplFile, - (Object)null, - (LinkedHashMap)null); - } - - if(mainTpl.get("topology_template") != null) { - LinkedHashMap topologyTemplate = - (LinkedHashMap)mainTpl.get("topology_template"); - - if(topologyTemplate.get("node_templates") != null) { - LinkedHashMap nodeTemplates = - (LinkedHashMap)topologyTemplate.get("node_templates"); - for(String nodeTemplateKey: nodeTemplates.keySet()) { - LinkedHashMap nodeTemplate = - (LinkedHashMap)nodeTemplates.get(nodeTemplateKey); - if(nodeTemplate.get("artifacts") != null) { - LinkedHashMap artifacts = - (LinkedHashMap)nodeTemplate.get("artifacts"); - for(String artifactKey: artifacts.keySet()) { - Object artifact = artifacts.get(artifactKey); - if(artifact instanceof String) { - _validateExternalReference(mainTplFile,(String)artifact,true); - } - else if(artifact instanceof LinkedHashMap) { - String file = (String)((LinkedHashMap)artifact).get("file"); - if(file != null) { - _validateExternalReference(mainTplFile,file,true); - } - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE225", String.format( - "ValueError: Unexpected artifact definition for \"%s\"", - artifactKey))); - errorCaught = true; - } - } - } - if(nodeTemplate.get("interfaces") != null) { - LinkedHashMap interfaces = - (LinkedHashMap)nodeTemplate.get("interfaces"); - for(String interfaceKey: interfaces.keySet()) { - LinkedHashMap _interface = - (LinkedHashMap)interfaces.get(interfaceKey); - for(String operationKey: _interface.keySet()) { - Object operation = _interface.get(operationKey); - if(operation instanceof String) { - _validateExternalReference(mainTplFile,(String)operation,false); - } - else if(operation instanceof LinkedHashMap) { - String imp = (String)((LinkedHashMap)operation).get("implementation"); - if(imp != null) { - _validateExternalReference(mainTplFile,imp,true); - } - } - } - } - } - } - } - } - } - catch(IOException e) { - errorCaught = true; - } - finally { - // delete tempDir (only here?!?) - File fdir = new File(tempDir); - deleteDir(fdir); - tempDir = null; - } - } - - public static void deleteDir(File fdir) { - try { - if (fdir.isDirectory()) { - for (File c : fdir.listFiles()) - deleteDir(c); - } - fdir.delete(); - } - catch(Exception e) { - } - } - - private void _validateExternalReference(String tplFile,String resourceFile,boolean raiseExc) { - // Verify that the external resource exists - - // If resource_file is a URL verify that the URL is valid. - // If resource_file is a relative path verify that the path is valid - // considering base folder (self.temp_dir) and tpl_file. - // Note that in a CSAR resource_file cannot be an absolute path. - if(UrlUtils.validateUrl(resourceFile)) { - String msg = String.format("URLException: The resource at \"%s\" cannot be accessed",resourceFile); - try { - if(UrlUtils.isUrlAccessible(resourceFile)) { - return; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE226", msg)); - errorCaught = true; - } - } - catch (Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE227", msg)); - } - } - - String dirPath = Paths.get(tplFile).getParent().toString(); - String filePath = tempDir + File.separator + dirPath + File.separator + resourceFile; - File f = new File(filePath); - if(f.isFile()) { - return; - } - - if(raiseExc) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE228", String.format( - "ValueError: The resource \"%s\" does not exist",resourceFile))); - } - errorCaught = true; - } - - private void unzip(String zipFilePath, String destDirectory) throws IOException { - File destDir = new File(destDirectory); - if (!destDir.exists()) { - destDir.mkdir(); - } - - try (ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFilePath));){ - ZipEntry entry = zipIn.getNextEntry(); - // iterates over entries in the zip file - while (entry != null) { - // create all directories needed for nested items - String[] parts = entry.getName().split("/"); - String s = destDirectory + File.separator ; - for(int i=0; i< parts.length-1; i++) { - s += parts[i]; - File idir = new File(s); - if(!idir.exists()) { - idir.mkdir(); - } - s += File.separator; - } - String filePath = destDirectory + File.separator + entry.getName(); - if (!entry.isDirectory()) { - // if the entry is a file, extracts it - extractFile(zipIn, filePath); - } else { - // if the entry is a directory, make the directory - File dir = new File(filePath); - dir.mkdir(); - } - zipIn.closeEntry(); - entry = zipIn.getNextEntry(); - } - } - } - - /** - * Extracts a zip entry (file entry) - * @param zipIn - * @param filePath - * @throws IOException - */ - private static final int BUFFER_SIZE = 4096; - - private void extractFile(ZipInputStream zipIn, String filePath) throws IOException { - //BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath)); - try (FileOutputStream fos = new FileOutputStream(filePath); - BufferedOutputStream bos = new BufferedOutputStream(fos);){ - byte[] bytesIn = new byte[BUFFER_SIZE]; - int read = 0; - while ((read = zipIn.read(bytesIn)) != -1) { - bos.write(bytesIn, 0, read); - } - } - } - -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import URLException -from toscaparser.common.exception import ValidationError -from toscaparser.imports import ImportsLoader -from toscaparser.utils.gettextutils import _ -from toscaparser.utils.urlutils import UrlUtils - -try: # Python 2.x - from BytesIO import BytesIO -except ImportError: # Python 3.x - from io import BytesIO - - -class CSAR(object): - - def __init__(self, csar_file, a_file=True): - self.path = csar_file - self.a_file = a_file - self.is_validated = False - self.error_caught = False - self.csar = None - self.temp_dir = None - - def validate(self): - """Validate the provided CSAR file.""" - - self.is_validated = True - - # validate that the file or URL exists - missing_err_msg = (_('"%s" does not exist.') % self.path) - if self.a_file: - if not os.path.isfile(self.path): - ValidationIssueCollector.appendException( - ValidationError(message=missing_err_msg)) - return False - else: - self.csar = self.path - else: # a URL - if not UrlUtils.validate_url(self.path): - ValidationIssueCollector.appendException( - ValidationError(message=missing_err_msg)) - return False - else: - response = requests.get(self.path) - self.csar = BytesIO(response.content) - - # validate that it is a valid zip file - if not zipfile.is_zipfile(self.csar): - err_msg = (_('"%s" is not a valid zip file.') % self.path) - ValidationIssueCollector.appendException( - ValidationError(message=err_msg)) - return False - - # validate that it contains the metadata file in the correct location - self.zfile = zipfile.ZipFile(self.csar, 'r') - filelist = self.zfile.namelist() - if 'TOSCA-Metadata/TOSCA.meta' not in filelist: - err_msg = (_('"%s" is not a valid CSAR as it does not contain the ' - 'required file "TOSCA.meta" in the folder ' - '"TOSCA-Metadata".') % self.path) - ValidationIssueCollector.appendException( - ValidationError(message=err_msg)) - return False - - # validate that 'Entry-Definitions' property exists in TOSCA.meta - data = self.zfile.read('TOSCA-Metadata/TOSCA.meta') - invalid_yaml_err_msg = (_('The file "TOSCA-Metadata/TOSCA.meta" in ' - 'the CSAR "%s" does not contain valid YAML ' - 'content.') % self.path) - try: - meta = yaml.load(data) - if type(meta) is dict: - self.metadata = meta - else: - ValidationIssueCollector.appendException( - ValidationError(message=invalid_yaml_err_msg)) - return False - except yaml.YAMLError: - ValidationIssueCollector.appendException( - ValidationError(message=invalid_yaml_err_msg)) - return False - - if 'Entry-Definitions' not in self.metadata: - err_msg = (_('The CSAR "%s" is missing the required metadata ' - '"Entry-Definitions" in ' - '"TOSCA-Metadata/TOSCA.meta".') - % self.path) - ValidationIssueCollector.appendException( - ValidationError(message=err_msg)) - return False - - # validate that 'Entry-Definitions' metadata value points to an - # existing file in the CSAR - entry = self.metadata.get('Entry-Definitions') - if entry and entry not in filelist: - err_msg = (_('The "Entry-Definitions" file defined in the ' - 'CSAR "%s" does not exist.') % self.path) - ValidationIssueCollector.appendException( - ValidationError(message=err_msg)) - return False - - # validate that external references in the main template actually - # exist and are accessible - self._validate_external_references() - return not self.error_caught - - def get_metadata(self): - """Return the metadata dictionary.""" - - # validate the csar if not already validated - if not self.is_validated: - self.validate() - - # return a copy to avoid changes overwrite the original - return dict(self.metadata) if self.metadata else None - - def _get_metadata(self, key): - if not self.is_validated: - self.validate() - return self.metadata.get(key) - - def get_author(self): - return self._get_metadata('Created-By') - - def get_version(self): - return self._get_metadata('CSAR-Version') - - def get_main_template(self): - entry_def = self._get_metadata('Entry-Definitions') - if entry_def in self.zfile.namelist(): - return entry_def - - def get_main_template_yaml(self): - main_template = self.get_main_template() - if main_template: - data = self.zfile.read(main_template) - invalid_tosca_yaml_err_msg = ( - _('The file "%(template)s" in the CSAR "%(csar)s" does not ' - 'contain valid TOSCA YAML content.') % - {'template': main_template, 'csar': self.path}) - try: - tosca_yaml = yaml.load(data) - if type(tosca_yaml) is not dict: - ValidationIssueCollector.appendException( - ValidationError(message=invalid_tosca_yaml_err_msg)) - return tosca_yaml - except Exception: - ValidationIssueCollector.appendException( - ValidationError(message=invalid_tosca_yaml_err_msg)) - - def get_description(self): - desc = self._get_metadata('Description') - if desc is not None: - return desc - - self.metadata['Description'] = \ - self.get_main_template_yaml().get('description') - return self.metadata['Description'] - - def decompress(self): - if not self.is_validated: - self.validate() - self.temp_dir = tempfile.NamedTemporaryFile().name - with zipfile.ZipFile(self.csar, "r") as zf: - zf.extractall(self.temp_dir) - - def _validate_external_references(self): - """Extracts files referenced in the main template - - These references are currently supported: - * imports - * interface implementations - * artifacts - """ - try: - self.decompress() - main_tpl_file = self.get_main_template() - if not main_tpl_file: - return - main_tpl = self.get_main_template_yaml() - - if 'imports' in main_tpl: - ImportsLoader(main_tpl['imports'], - os.path.join(self.temp_dir, main_tpl_file)) - - if 'topology_template' in main_tpl: - topology_template = main_tpl['topology_template'] - - if 'node_templates' in topology_template: - node_templates = topology_template['node_templates'] - - for node_template_key in node_templates: - node_template = node_templates[node_template_key] - if 'artifacts' in node_template: - artifacts = node_template['artifacts'] - for artifact_key in artifacts: - artifact = artifacts[artifact_key] - if isinstance(artifact, six.string_types): - self._validate_external_reference( - main_tpl_file, - artifact) - elif isinstance(artifact, dict): - if 'file' in artifact: - self._validate_external_reference( - main_tpl_file, - artifact['file']) - else: - ValidationIssueCollector.appendException( - ValueError(_('Unexpected artifact ' - 'definition for "%s".') - % artifact_key)) - self.error_caught = True - if 'interfaces' in node_template: - interfaces = node_template['interfaces'] - for interface_key in interfaces: - interface = interfaces[interface_key] - for opertation_key in interface: - operation = interface[opertation_key] - if isinstance(operation, six.string_types): - self._validate_external_reference( - main_tpl_file, - operation, - False) - elif isinstance(operation, dict): - if 'implementation' in operation: - self._validate_external_reference( - main_tpl_file, - operation['implementation']) - finally: - if self.temp_dir: - shutil.rmtree(self.temp_dir) - - def _validate_external_reference(self, tpl_file, resource_file, - raise_exc=True): - """Verify that the external resource exists - - If resource_file is a URL verify that the URL is valid. - If resource_file is a relative path verify that the path is valid - considering base folder (self.temp_dir) and tpl_file. - Note that in a CSAR resource_file cannot be an absolute path. - """ - if UrlUtils.validate_url(resource_file): - msg = (_('The resource at "%s" cannot be accessed.') % - resource_file) - try: - if UrlUtils.url_accessible(resource_file): - return - else: - ValidationIssueCollector.appendException( - URLException(what=msg)) - self.error_caught = True - except Exception: - ValidationIssueCollector.appendException( - URLException(what=msg)) - self.error_caught = True - - if os.path.isfile(os.path.join(self.temp_dir, - os.path.dirname(tpl_file), - resource_file)): - return - - if raise_exc: - ValidationIssueCollector.appendException( - ValueError(_('The resource "%s" does not exist.') - % resource_file)) - self.error_caught = True -*/ - - diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java.orig b/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java.orig deleted file mode 100644 index b4d2614..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java.orig +++ /dev/null @@ -1,767 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.prereq; - -import java.io.BufferedOutputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.RandomAccessFile; -import java.net.URL; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; -import java.util.*; -import java.util.zip.ZipEntry; -import java.util.zip.ZipFile; -import java.util.zip.ZipInputStream; - -import org.openecomp.sdc.toscaparser.api.ImportsLoader; -import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector; -import org.openecomp.sdc.toscaparser.api.elements.Metadata; -import org.openecomp.sdc.toscaparser.api.utils.UrlUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.yaml.snakeyaml.Yaml; - -<<<<<<< HEAD:jtosca/src/main/java/org/openecomp/sdc/toscaparser/prereq/CSAR.java -import org.openecomp.sdc.toscaparser.ImportsLoader; -import org.openecomp.sdc.toscaparser.common.ExceptionCollector; -import org.openecomp.sdc.toscaparser.utils.UrlUtils; - -======= ->>>>>>> 243072-jtosca-package-fix:jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java -public class CSAR { - - private static Logger log = LoggerFactory.getLogger(CSAR.class.getName()); - private static final ArrayList META_PROPERTIES_FILES = new ArrayList<>(Arrays.asList("TOSCA-Metadata/TOSCA.meta", "csar.meta")); - - private String path; - private boolean isFile; - private boolean isValidated; - private boolean errorCaught; - private String csar; - private String tempDir; -// private Metadata metaData; - private File tempFile; - private LinkedHashMap> metaProperties; - - public CSAR(String csarPath, boolean aFile) { - path = csarPath; - isFile = aFile; - isValidated = false; - errorCaught = false; - csar = null; - tempDir = null; - tempFile = null; - metaProperties = new LinkedHashMap<>(); - } - - @SuppressWarnings("unchecked") - public boolean validate() { - isValidated = true; - - //validate that the file or URL exists - - if(isFile) { - File f = new File(path); - if (!f.isFile()) { - ExceptionCollector.appendException(String.format("\"%s\" is not a file", path)); - return false; - } - else { - this.csar = path; - } - } - else { - if(!UrlUtils.validateUrl(path)) { - ExceptionCollector.appendException(String.format("ImportError: \"%s\" does not exist",path)); - return false; - } - // get it to a local file - try { - File tempFile = File.createTempFile("csartmp",".csar"); - Path ptf = Paths.get(tempFile.getPath()); - URL webfile = new URL(path); - InputStream in = webfile.openStream(); - Files.copy(in,ptf,StandardCopyOption.REPLACE_EXISTING); - } - catch(Exception e) { - ExceptionCollector.appendException("ImportError: failed to load CSAR from " + path); - return false; - } - - log.debug("CSAR - validate - currently only files are supported"); - return false; - } - - _parseAndValidateMetaProperties(); - - if(errorCaught) { - return false; - } - - // validate that external references in the main template actually exist and are accessible - _validateExternalReferences(); - - return !errorCaught; - - } - - private void _parseAndValidateMetaProperties() { - - ZipFile zf = null; - - try { - - // validate that it is a valid zip file - RandomAccessFile raf = new RandomAccessFile(csar, "r"); - long n = raf.readInt(); - raf.close(); - // check if Zip's magic number - if (n != 0x504B0304) { - throw new IOException(String.format("\"%s\" is not a valid zip file", csar)); - } - - // validate that it contains the metadata file in the correct location - zf = new ZipFile(csar); - ZipEntry ze = zf.getEntry("TOSCA-Metadata/TOSCA.meta"); - if (ze == null) { - throw new IOException(String.format( - "\"%s\" is not a valid CSAR as it does not contain the " + - "required file \"TOSCA.meta\" in the folder \"TOSCA-Metadata\"", csar)); - } - - //Going over expected metadata files and parsing them - for (String metaFile: META_PROPERTIES_FILES) { - - byte ba[] = new byte[4096]; - ze = zf.getEntry(metaFile); - if (ze != null) { - InputStream inputStream = zf.getInputStream(ze); - n = inputStream.read(ba, 0, 4096); - - String md = new String(ba); - md = md.substring(0, (int) n); - Yaml yaml = new Yaml(); - Object mdo = yaml.load(md); - if (!(mdo instanceof LinkedHashMap)) { - throw new IOException(String.format( - "The file \"%s\" in the" + - " CSAR \"%s\" does not contain valid YAML content", ze.getName(), csar)); - } - - String[] split = ze.getName().split("/"); - String fileName = split[split.length - 1]; - - if (!metaProperties.containsKey(fileName)) { - metaProperties.put(fileName, (LinkedHashMap) mdo); - } - } - } - - // verify it has "Entry-Definition" - String edf = _getMetadata("Entry-Definitions"); - if (edf == null) { - throw new IOException(String.format( - "The CSAR \"%s\" is missing the required metadata " + - "\"Entry-Definitions\" in \"TOSCA-Metadata/TOSCA.meta\"", csar)); - } - - //validate that "Entry-Definitions' metadata value points to an existing file in the CSAR - boolean foundEDF = false; - Enumeration entries = zf.entries(); - while (entries.hasMoreElements()) { - ze = entries.nextElement(); - if (ze.getName().equals(edf)) { - foundEDF = true; - break; - } - } - if (!foundEDF) { - throw new IOException(String.format( - "The \"Entry-Definitions\" file defined in the CSAR \"%s\" does not exist", csar)); - } - } catch (Exception e) { - ExceptionCollector.appendException("ValidationError: " + e.getMessage()); - errorCaught = true; - } - - try { - if (zf != null) { - zf.close(); - } - } catch (IOException e) { - } - } - - public void cleanup() { - try { - if(tempFile != null) { - tempFile.delete(); - } - } - catch(Exception e) { - } - } - - private String _getMetadata(String key) { - if(!isValidated) { - validate(); - } - Object value = _getMetaProperty("TOSCA.meta").get(key); - return value != null ? value.toString() : null; - } - - public String getAuthor() { - return _getMetadata("Created-By"); - } - - public String getVersion() { - return _getMetadata("CSAR-Version"); - } - - public LinkedHashMap> getMetaProperties() { - return metaProperties; - } - - private LinkedHashMap _getMetaProperty(String propertiesFile) { - return metaProperties.get(propertiesFile); - } - - public String getMainTemplate() { - String entryDef = _getMetadata("Entry-Definitions"); - ZipFile zf; - boolean ok = false; - try { - zf = new ZipFile(path); - ok = (zf.getEntry(entryDef) != null); - zf.close(); - } - catch(IOException e) { - if(!ok) { - log.error("CSAR - getMainTemplate - failed to open {}", path); - } - } - if(ok) { - return entryDef; - } - else { - return null; - } - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getMainTemplateYaml() { - String mainTemplate = tempDir + File.separator + getMainTemplate(); - if(mainTemplate != null) { - try { - InputStream input = new FileInputStream(new File(mainTemplate)); - Yaml yaml = new Yaml(); - Object data = yaml.load(input); - if(!(data instanceof LinkedHashMap)) { - throw new IOException(); - } - return (LinkedHashMap)data; - } - catch(Exception e) { - ExceptionCollector.appendException(String.format( - "The file \"%s\" in the CSAR \"%s\" does not " + - "contain valid TOSCA YAML content", - mainTemplate,csar)); - } - } - return null; - } - - public String getDescription() { - String desc = _getMetadata("Description"); - if(desc != null) { - return desc; - } - - Map metaData = metaProperties.get("TOSCA.meta"); - metaData.put("Description", getMainTemplateYaml().get("description")); - return _getMetadata("Description"); - } - - public String getTempDir() { - return tempDir; - } - - public void decompress() throws IOException { - if(!isValidated) { - validate(); - } - tempDir = Files.createTempDirectory("JTP").toString(); - unzip(path,tempDir); - - } - - private void _validateExternalReferences() { - // Extracts files referenced in the main template - // These references are currently supported: - // * imports - // * interface implementations - // * artifacts - try { - decompress(); - String mainTplFile = getMainTemplate(); - if(mainTplFile == null) { - return; - } - - LinkedHashMap mainTpl = getMainTemplateYaml(); - if(mainTpl.get("imports") != null) { - // this loads the imports - ImportsLoader il = new ImportsLoader((ArrayList)mainTpl.get("imports"), - tempDir + File.separator + mainTplFile, - (Object)null, - (LinkedHashMap)null); - } - - if(mainTpl.get("topology_template") != null) { - LinkedHashMap topologyTemplate = - (LinkedHashMap)mainTpl.get("topology_template"); - - if(topologyTemplate.get("node_templates") != null) { - LinkedHashMap nodeTemplates = - (LinkedHashMap)topologyTemplate.get("node_templates"); - for(String nodeTemplateKey: nodeTemplates.keySet()) { - LinkedHashMap nodeTemplate = - (LinkedHashMap)nodeTemplates.get(nodeTemplateKey); - if(nodeTemplate.get("artifacts") != null) { - LinkedHashMap artifacts = - (LinkedHashMap)nodeTemplate.get("artifacts"); - for(String artifactKey: artifacts.keySet()) { - Object artifact = artifacts.get(artifactKey); - if(artifact instanceof String) { - _validateExternalReference(mainTplFile,(String)artifact,true); - } - else if(artifact instanceof LinkedHashMap) { - String file = (String)((LinkedHashMap)artifact).get("file"); - if(file != null) { - _validateExternalReference(mainTplFile,file,true); - } - } - else { - ExceptionCollector.appendException(String.format( - "ValueError: Unexpected artifact definition for \"%s\"", - artifactKey)); - errorCaught = true; - } - } - } - if(nodeTemplate.get("interfaces") != null) { - LinkedHashMap interfaces = - (LinkedHashMap)nodeTemplate.get("interfaces"); - for(String interfaceKey: interfaces.keySet()) { - LinkedHashMap _interface = - (LinkedHashMap)interfaces.get(interfaceKey); - for(String operationKey: _interface.keySet()) { - Object operation = _interface.get(operationKey); - if(operation instanceof String) { - _validateExternalReference(mainTplFile,(String)operation,false); - } - else if(operation instanceof LinkedHashMap) { - String imp = (String)((LinkedHashMap)operation).get("implementation"); - if(imp != null) { - _validateExternalReference(mainTplFile,imp,true); - } - } - } - } - } - } - } - } - } - catch(IOException e) { - errorCaught = true; - } - finally { - // delete tempDir (only here?!?) - File fdir = new File(tempDir); - deleteDir(fdir); - tempDir = null; - } - } - - public static void deleteDir(File fdir) { - try { - if (fdir.isDirectory()) { - for (File c : fdir.listFiles()) - deleteDir(c); - } - fdir.delete(); - } - catch(Exception e) { - } - } - - private void _validateExternalReference(String tplFile,String resourceFile,boolean raiseExc) { - // Verify that the external resource exists - - // If resource_file is a URL verify that the URL is valid. - // If resource_file is a relative path verify that the path is valid - // considering base folder (self.temp_dir) and tpl_file. - // Note that in a CSAR resource_file cannot be an absolute path. - if(UrlUtils.validateUrl(resourceFile)) { - String msg = String.format("URLException: The resource at \"%s\" cannot be accessed",resourceFile); - try { - if(UrlUtils.isUrlAccessible(resourceFile)) { - return; - } - else { - ExceptionCollector.appendException(msg); - errorCaught = true; - } - } - catch (Exception e) { - ExceptionCollector.appendException(msg); - } - } - - String dirPath = Paths.get(tplFile).getParent().toString(); - String filePath = tempDir + File.separator + dirPath + File.separator + resourceFile; - File f = new File(filePath); - if(f.isFile()) { - return; - } - - if(raiseExc) { - ExceptionCollector.appendException(String.format( - "ValueError: The resource \"%s\" does not exist",resourceFile)); - } - errorCaught = true; - } - - private void unzip(String zipFilePath, String destDirectory) throws IOException { - File destDir = new File(destDirectory); - if (!destDir.exists()) { - destDir.mkdir(); - } - ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFilePath)); - ZipEntry entry = zipIn.getNextEntry(); - // iterates over entries in the zip file - while (entry != null) { - // create all directories needed for nested items - String[] parts = entry.getName().split("/"); - String s = destDirectory + File.separator ; - for(int i=0; i< parts.length-1; i++) { - s += parts[i]; - File idir = new File(s); - if(!idir.exists()) { - idir.mkdir(); - } - s += File.separator; - } - String filePath = destDirectory + File.separator + entry.getName(); - if (!entry.isDirectory()) { - // if the entry is a file, extracts it - extractFile(zipIn, filePath); - } else { - // if the entry is a directory, make the directory - File dir = new File(filePath); - dir.mkdir(); - } - zipIn.closeEntry(); - entry = zipIn.getNextEntry(); - } - zipIn.close(); - } - - /** - * Extracts a zip entry (file entry) - * @param zipIn - * @param filePath - * @throws IOException - */ - private static final int BUFFER_SIZE = 4096; - - private void extractFile(ZipInputStream zipIn, String filePath) throws IOException { - //BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath)); - FileOutputStream fos = new FileOutputStream(filePath); - BufferedOutputStream bos = new BufferedOutputStream(fos); - byte[] bytesIn = new byte[BUFFER_SIZE]; - int read = 0; - while ((read = zipIn.read(bytesIn)) != -1) { - bos.write(bytesIn, 0, read); - } - bos.close(); - } - -} - -/*python - -from toscaparser.common.exception import ExceptionCollector -from toscaparser.common.exception import URLException -from toscaparser.common.exception import ValidationError -from toscaparser.imports import ImportsLoader -from toscaparser.utils.gettextutils import _ -from toscaparser.utils.urlutils import UrlUtils - -try: # Python 2.x - from BytesIO import BytesIO -except ImportError: # Python 3.x - from io import BytesIO - - -class CSAR(object): - - def __init__(self, csar_file, a_file=True): - self.path = csar_file - self.a_file = a_file - self.is_validated = False - self.error_caught = False - self.csar = None - self.temp_dir = None - - def validate(self): - """Validate the provided CSAR file.""" - - self.is_validated = True - - # validate that the file or URL exists - missing_err_msg = (_('"%s" does not exist.') % self.path) - if self.a_file: - if not os.path.isfile(self.path): - ExceptionCollector.appendException( - ValidationError(message=missing_err_msg)) - return False - else: - self.csar = self.path - else: # a URL - if not UrlUtils.validate_url(self.path): - ExceptionCollector.appendException( - ValidationError(message=missing_err_msg)) - return False - else: - response = requests.get(self.path) - self.csar = BytesIO(response.content) - - # validate that it is a valid zip file - if not zipfile.is_zipfile(self.csar): - err_msg = (_('"%s" is not a valid zip file.') % self.path) - ExceptionCollector.appendException( - ValidationError(message=err_msg)) - return False - - # validate that it contains the metadata file in the correct location - self.zfile = zipfile.ZipFile(self.csar, 'r') - filelist = self.zfile.namelist() - if 'TOSCA-Metadata/TOSCA.meta' not in filelist: - err_msg = (_('"%s" is not a valid CSAR as it does not contain the ' - 'required file "TOSCA.meta" in the folder ' - '"TOSCA-Metadata".') % self.path) - ExceptionCollector.appendException( - ValidationError(message=err_msg)) - return False - - # validate that 'Entry-Definitions' property exists in TOSCA.meta - data = self.zfile.read('TOSCA-Metadata/TOSCA.meta') - invalid_yaml_err_msg = (_('The file "TOSCA-Metadata/TOSCA.meta" in ' - 'the CSAR "%s" does not contain valid YAML ' - 'content.') % self.path) - try: - meta = yaml.load(data) - if type(meta) is dict: - self.metadata = meta - else: - ExceptionCollector.appendException( - ValidationError(message=invalid_yaml_err_msg)) - return False - except yaml.YAMLError: - ExceptionCollector.appendException( - ValidationError(message=invalid_yaml_err_msg)) - return False - - if 'Entry-Definitions' not in self.metadata: - err_msg = (_('The CSAR "%s" is missing the required metadata ' - '"Entry-Definitions" in ' - '"TOSCA-Metadata/TOSCA.meta".') - % self.path) - ExceptionCollector.appendException( - ValidationError(message=err_msg)) - return False - - # validate that 'Entry-Definitions' metadata value points to an - # existing file in the CSAR - entry = self.metadata.get('Entry-Definitions') - if entry and entry not in filelist: - err_msg = (_('The "Entry-Definitions" file defined in the ' - 'CSAR "%s" does not exist.') % self.path) - ExceptionCollector.appendException( - ValidationError(message=err_msg)) - return False - - # validate that external references in the main template actually - # exist and are accessible - self._validate_external_references() - return not self.error_caught - - def get_metadata(self): - """Return the metadata dictionary.""" - - # validate the csar if not already validated - if not self.is_validated: - self.validate() - - # return a copy to avoid changes overwrite the original - return dict(self.metadata) if self.metadata else None - - def _get_metadata(self, key): - if not self.is_validated: - self.validate() - return self.metadata.get(key) - - def get_author(self): - return self._get_metadata('Created-By') - - def get_version(self): - return self._get_metadata('CSAR-Version') - - def get_main_template(self): - entry_def = self._get_metadata('Entry-Definitions') - if entry_def in self.zfile.namelist(): - return entry_def - - def get_main_template_yaml(self): - main_template = self.get_main_template() - if main_template: - data = self.zfile.read(main_template) - invalid_tosca_yaml_err_msg = ( - _('The file "%(template)s" in the CSAR "%(csar)s" does not ' - 'contain valid TOSCA YAML content.') % - {'template': main_template, 'csar': self.path}) - try: - tosca_yaml = yaml.load(data) - if type(tosca_yaml) is not dict: - ExceptionCollector.appendException( - ValidationError(message=invalid_tosca_yaml_err_msg)) - return tosca_yaml - except Exception: - ExceptionCollector.appendException( - ValidationError(message=invalid_tosca_yaml_err_msg)) - - def get_description(self): - desc = self._get_metadata('Description') - if desc is not None: - return desc - - self.metadata['Description'] = \ - self.get_main_template_yaml().get('description') - return self.metadata['Description'] - - def decompress(self): - if not self.is_validated: - self.validate() - self.temp_dir = tempfile.NamedTemporaryFile().name - with zipfile.ZipFile(self.csar, "r") as zf: - zf.extractall(self.temp_dir) - - def _validate_external_references(self): - """Extracts files referenced in the main template - - These references are currently supported: - * imports - * interface implementations - * artifacts - """ - try: - self.decompress() - main_tpl_file = self.get_main_template() - if not main_tpl_file: - return - main_tpl = self.get_main_template_yaml() - - if 'imports' in main_tpl: - ImportsLoader(main_tpl['imports'], - os.path.join(self.temp_dir, main_tpl_file)) - - if 'topology_template' in main_tpl: - topology_template = main_tpl['topology_template'] - - if 'node_templates' in topology_template: - node_templates = topology_template['node_templates'] - - for node_template_key in node_templates: - node_template = node_templates[node_template_key] - if 'artifacts' in node_template: - artifacts = node_template['artifacts'] - for artifact_key in artifacts: - artifact = artifacts[artifact_key] - if isinstance(artifact, six.string_types): - self._validate_external_reference( - main_tpl_file, - artifact) - elif isinstance(artifact, dict): - if 'file' in artifact: - self._validate_external_reference( - main_tpl_file, - artifact['file']) - else: - ExceptionCollector.appendException( - ValueError(_('Unexpected artifact ' - 'definition for "%s".') - % artifact_key)) - self.error_caught = True - if 'interfaces' in node_template: - interfaces = node_template['interfaces'] - for interface_key in interfaces: - interface = interfaces[interface_key] - for opertation_key in interface: - operation = interface[opertation_key] - if isinstance(operation, six.string_types): - self._validate_external_reference( - main_tpl_file, - operation, - False) - elif isinstance(operation, dict): - if 'implementation' in operation: - self._validate_external_reference( - main_tpl_file, - operation['implementation']) - finally: - if self.temp_dir: - shutil.rmtree(self.temp_dir) - - def _validate_external_reference(self, tpl_file, resource_file, - raise_exc=True): - """Verify that the external resource exists - - If resource_file is a URL verify that the URL is valid. - If resource_file is a relative path verify that the path is valid - considering base folder (self.temp_dir) and tpl_file. - Note that in a CSAR resource_file cannot be an absolute path. - """ - if UrlUtils.validate_url(resource_file): - msg = (_('The resource at "%s" cannot be accessed.') % - resource_file) - try: - if UrlUtils.url_accessible(resource_file): - return - else: - ExceptionCollector.appendException( - URLException(what=msg)) - self.error_caught = True - except Exception: - ExceptionCollector.appendException( - URLException(what=msg)) - self.error_caught = True - - if os.path.isfile(os.path.join(self.temp_dir, - os.path.dirname(tpl_file), - resource_file)): - return - - if raise_exc: - ExceptionCollector.appendException( - ValueError(_('The resource "%s" does not exist.') - % resource_file)) - self.error_caught = True -*/ - - diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/CopyUtils.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/CopyUtils.java deleted file mode 100644 index db236e1..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/CopyUtils.java +++ /dev/null @@ -1,29 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.utils; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -public class CopyUtils { - - @SuppressWarnings("unchecked") - public static Object copyLhmOrAl(Object src) { - if(src instanceof LinkedHashMap) { - LinkedHashMap dst = new LinkedHashMap(); - for(Map.Entry me: ((LinkedHashMap)src).entrySet()) { - dst.put(me.getKey(),me.getValue()); - } - return dst; - } - else if(src instanceof ArrayList) { - ArrayList dst = new ArrayList(); - for(Object o: (ArrayList)src) { - dst.add(o); - } - return dst; - } - else { - return null; - } - } -} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/DumpUtils.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/DumpUtils.java deleted file mode 100644 index 32c69cd..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/DumpUtils.java +++ /dev/null @@ -1,55 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.utils; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -public class DumpUtils { - - @SuppressWarnings("unchecked") - public static void dumpYaml(Object yo,int level) { - final String indent = " "; - try { - if(yo == null) { - System.out.println(""); - return; - } - String cname = yo.getClass().getSimpleName(); - System.out.print(cname); - if(cname.equals("LinkedHashMap")) { - LinkedHashMap lhm = (LinkedHashMap)yo; - System.out.println(); - for(Map.Entry me: lhm.entrySet()) { - System.out.print(indent.substring(0,level) + me.getKey() + ": "); - dumpYaml(me.getValue(),level+2); - } - } - else if(cname.equals("ArrayList")) { - ArrayList al = (ArrayList)yo; - System.out.println(); - for (int i=0; i \"" + (String)yo + "\""); - } - else if(cname.equals("Integer")) { - System.out.println(" ==> " + (int)yo); - } - else if(cname.equals("Boolean")) { - System.out.println(" ==> " + (boolean)yo); - } - else if(cname.equals("Double")) { - System.out.println(" ==> " + (double)yo); - } - else { - System.out.println(" !! unexpected type"); - } - } - catch(Exception e) { - System.out.println("Exception!! " + e.getMessage()); - } - } -} \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java deleted file mode 100644 index 6fb4606..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.utils; - - -public enum JToscaErrorCodes { - MISSING_META_FILE("JE1001"), - INVALID_META_YAML_CONTENT("JE1002"), - ENTRY_DEFINITION_NOT_DEFINED("JE1003"), - MISSING_ENTRY_DEFINITION_FILE ("JE1004"), - GENERAL_ERROR("JE1005"), - PATH_NOT_VALID("JE1006"), - CSAR_TOSCA_VALIDATION_ERROR("JE1007"), - INVALID_CSAR_FORMAT("JE1008"); - - private String value; - - private JToscaErrorCodes(String value) { - this.value = value; - } - - public String getValue() { - return value; - } - - public static JToscaErrorCodes getByCode(String code) { - for(JToscaErrorCodes v : values()){ - if( v.getValue().equals(code)){ - return v; - } - } - return null; - } -} \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java deleted file mode 100644 index f749f7b..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java +++ /dev/null @@ -1,182 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.utils; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class TOSCAVersionProperty {// test with functions/test_concat.yaml - - private String version; - - private static final String versionRe = - "^(?([0-9][0-9]*))" + - "(\\.(?([0-9][0-9]*)))?" + - "(\\.(?([0-9][0-9]*)))?" + - "(\\.(?([0-9A-Za-z]+)))?" + - "(\\-(?[0-9])*)?$"; - - private String minorVersion = null; - private String majorVersion = null; - private String fixVersion = null; - private String qualifier = null; - private String buildVersion = null; - - - public TOSCAVersionProperty(Object _version) { - version = _version.toString(); - - if(version.equals("0") || version.equals("0.0") || version.equals("0.0.0")) { - //log.warning(_('Version assumed as not provided')) - version = ""; - return; - } - - Pattern pattern = Pattern.compile(versionRe); - Matcher matcher = pattern.matcher(version); - if(!matcher.find()) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE252", String.format( - "InvalidTOSCAVersionPropertyException: " + - "Value of TOSCA version property \"%s\" is invalid", - version))); - return; - } - minorVersion = matcher.group("gMinorVersion"); - majorVersion = matcher.group("gMajorVersion"); - fixVersion = matcher.group("gFixVersion"); - qualifier = _validateQualifier(matcher.group("gQualifier")); - buildVersion = _validateBuild(matcher.group("gBuildVersion")); - _validateMajorVersion(majorVersion); - - } - - private String _validateMajorVersion(String value) { - // Validate major version - - // Checks if only major version is provided and assumes - // minor version as 0. - // Eg: If version = 18, then it returns version = '18.0' - - if(minorVersion == null && buildVersion == null && !value.equals("0")) { - //log.warning(_('Minor version assumed "0".')) - version = version + "0"; - } - return value; - } - - private String _validateQualifier(String value) { - // Validate qualifier - - // TOSCA version is invalid if a qualifier is present without the - // fix version or with all of major, minor and fix version 0s. - - // For example, the following versions are invalid - // 18.0.abc - // 0.0.0.abc - - if((fixVersion == null && value != null) || - (minorVersion.equals("0") && majorVersion.equals("0") && - fixVersion.equals("0") && value != null)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE253", String.format( - "InvalidTOSCAVersionPropertyException: " + - "Value of TOSCA version property \"%s\" is invalid", - version))); - } - return value; - } - - private String _validateBuild(String value) { - // Validate build version - - // TOSCA version is invalid if build version is present without the qualifier. - // Eg: version = 18.0.0-1 is invalid. - - if(qualifier == null && value != null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE254", String.format( - "InvalidTOSCAVersionPropertyException: " + - "Value of TOSCA version property \"%s\" is invalid", - version))); - } - return value; - } - - public Object getVersion() { - return version; - } - -} - -/*python - -class TOSCAVersionProperty(object): - - VERSION_RE = re.compile('^(?P([0-9][0-9]*))' - '(\.(?P([0-9][0-9]*)))?' - '(\.(?P([0-9][0-9]*)))?' - '(\.(?P([0-9A-Za-z]+)))?' - '(\-(?P[0-9])*)?$') - - def __init__(self, version): - self.version = str(version) - match = self.VERSION_RE.match(self.version) - if not match: - ValidationIssueCollector.appendException( - InvalidTOSCAVersionPropertyException(what=(self.version))) - return - ver = match.groupdict() - if self.version in ['0', '0.0', '0.0.0']: - log.warning(_('Version assumed as not provided')) - self.version = None - self.minor_version = ver['minor_version'] - self.major_version = ver['major_version'] - self.fix_version = ver['fix_version'] - self.qualifier = self._validate_qualifier(ver['qualifier']) - self.build_version = self._validate_build(ver['build_version']) - self._validate_major_version(self.major_version) - - def _validate_major_version(self, value): - """Validate major version - - Checks if only major version is provided and assumes - minor version as 0. - Eg: If version = 18, then it returns version = '18.0' - """ - - if self.minor_version is None and self.build_version is None and \ - value != '0': - log.warning(_('Minor version assumed "0".')) - self.version = '.'.join([value, '0']) - return value - - def _validate_qualifier(self, value): - """Validate qualifier - - TOSCA version is invalid if a qualifier is present without the - fix version or with all of major, minor and fix version 0s. - - For example, the following versions are invalid - 18.0.abc - 0.0.0.abc - """ - if (self.fix_version is None and value) or \ - (self.minor_version == self.major_version == - self.fix_version == '0' and value): - ValidationIssueCollector.appendException( - InvalidTOSCAVersionPropertyException(what=(self.version))) - return value - - def _validate_build(self, value): - """Validate build version - - TOSCA version is invalid if build version is present without the - qualifier. - Eg: version = 18.0.0-1 is invalid. - """ - if not self.qualifier and value: - ValidationIssueCollector.appendException( - InvalidTOSCAVersionPropertyException(what=(self.version))) - return value - - def get_version(self): - return self.version -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java deleted file mode 100644 index 0e1531f..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java +++ /dev/null @@ -1,24 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.utils; - -import org.openecomp.sdc.toscaparser.api.common.ValidationIssueCollector; - -public class ThreadLocalsHolder { - - private static final ThreadLocal exceptionCollectorThreadLocal = new ThreadLocal<>(); - - private ThreadLocalsHolder(){} - - public static ValidationIssueCollector getCollector() { - return exceptionCollectorThreadLocal.get(); - } - - public static void setCollector(ValidationIssueCollector validationIssueCollector) { - cleanup(); - exceptionCollectorThreadLocal.set(validationIssueCollector); - } - - public static void cleanup(){ - exceptionCollectorThreadLocal.remove(); - } - -} diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java deleted file mode 100644 index 3383bd7..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java +++ /dev/null @@ -1,123 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.utils; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.io.IOException; -import java.net.HttpURLConnection; -import java.net.MalformedURLException; -import java.net.URL; - -public class UrlUtils { - - public static boolean validateUrl(String sUrl) { - // Validates whether the given path is a URL or not - - // If the given path includes a scheme (http, https, ftp, ...) and a net - // location (a domain name such as www.github.com) it is validated as a URL - try { - URL url = new URL(sUrl); - if(url.getProtocol().equals("file")) { - return true; - } - return url.getAuthority() != null; - } - catch(MalformedURLException e) { - return false; - } - } - - public static String joinUrl(String sUrl,String relativePath) { - // Builds a new URL from the given URL and the relative path - - // Example: - // url: http://www.githib.com/openstack/heat - // relative_path: heat-translator - // - joined: http://www.githib.com/openstack/heat-translator - if(!validateUrl(sUrl)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE255", String.format( - "ValueError: The URL \"%s\" is malformed",sUrl))); - } - try { - URL base = new URL(sUrl); - return (new URL(base,relativePath)).toString(); - } - catch(MalformedURLException e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE256", String.format( - "ValueError: Joining URL \"%s\" and relative path \"%s\" caused an exception",sUrl,relativePath))); - return sUrl; - } - } - - public static boolean isUrlAccessible(String sUrl) { - // Validates whether the given URL is accessible - - // Returns true if the get call returns a 200 response code. - // Otherwise, returns false. - try { - HttpURLConnection connection = (HttpURLConnection) new URL(sUrl).openConnection(); - connection.setRequestMethod("HEAD"); - int responseCode = connection.getResponseCode(); - return responseCode == 200; - } - catch(IOException e) { - return false; - } - } - -} - -/*python - -from six.moves.urllib.parse import urljoin -from six.moves.urllib.parse import urlparse -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.utils.gettextutils import _ - -try: - # Python 3.x - import urllib.request as urllib2 -except ImportError: - # Python 2.x - import urllib2 - - -class UrlUtils(object): - - @staticmethod - def validate_url(path): - """Validates whether the given path is a URL or not. - - If the given path includes a scheme (http, https, ftp, ...) and a net - location (a domain name such as www.github.com) it is validated as a - URL. - """ - parsed = urlparse(path) - if parsed.scheme == 'file': - # If the url uses the file scheme netloc will be "" - return True - else: - return bool(parsed.scheme) and bool(parsed.netloc) - - @staticmethod - def join_url(url, relative_path): - """Builds a new URL from the given URL and the relative path. - - Example: - url: http://www.githib.com/openstack/heat - relative_path: heat-translator - - joined: http://www.githib.com/openstack/heat-translator - """ - if not UrlUtils.validate_url(url): - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a valid URL.') % url)) - return urljoin(url, relative_path) - - @staticmethod - def url_accessible(url): - """Validates whether the given URL is accessible. - - Returns true if the get call returns a 200 response code. - Otherwise, returns false. - """ - return urllib2.urlopen(url).getcode() == 200 -*/ \ No newline at end of file diff --git a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java b/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java deleted file mode 100644 index 53f5bec..0000000 --- a/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java +++ /dev/null @@ -1,425 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.utils; - -import org.openecomp.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; -import java.util.Date; -import java.util.LinkedHashMap; - -public class ValidateUtils { - - private static final String RANGE_UNBOUNDED = "UNBOUNDED"; - - public static Object strToNum(Object value) { - // Convert a string representation of a number into a numeric type - // tODO(TBD) we should not allow numeric values in, input should be str - if(value instanceof Number) { - return value; - } - if(!(value instanceof String)) { - - } - try { - return Integer.parseInt((String)value); - } - catch(NumberFormatException e) { - } - try { - return Float.parseFloat((String)value); - } - catch(Exception e) { - } - return null; - } - - public static Object validateNumeric(Object value) { - if(value != null) { - if (!(value instanceof Number)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE257", String.format( - "ValueError: \"%s\" is not a numeric",value.toString()))); - } - } - return value; - } - - public static Object validateInteger(Object value) { - if(value != null) { - if (!(value instanceof Integer)) { - // allow "true" and "false" - if (value instanceof Boolean) { - return (Boolean) value ? 1 : 0; - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE258", String.format( - "ValueError: \"%s\" is not an integer",value.toString()))); - } - } - return value; - } - - public static Object validateFloat(Object value) { - if(value != null) { - if (!(value instanceof Float || value instanceof Double)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE259", String.format( - "ValueError: \"%s\" is not a float",value.toString()))); - } - } - return value; - } - - public static Object validateString(Object value) { - if(value != null) { - if (!(value instanceof String)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE260", String.format( - "ValueError: \'%s\' is not a string",value.toString()))); - } - } - return value; - } - - public static Object validateList(Object value) { - if(value != null) { - if (!(value instanceof ArrayList)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE261", String.format( - "ValueError: \"%s\" is not a list",value.toString()))); - } - } - return value; - } - - - @SuppressWarnings("unchecked") - public static Object validateRange(Object range) { - // list class check - validateList(range); - // validate range list has a min and max - if(range instanceof ArrayList && ((ArrayList)range).size() != 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE262", String.format( - "ValueError: \"%s\" is not a valid range",range.toString()))); - // too dangerous to continue... - return range; - } - // validate min and max are numerics or the keyword UNBOUNDED - boolean minTest = false; - boolean maxTest = false; - Object r0 = ((ArrayList)range).get(0); - Object r1 = ((ArrayList)range).get(1); - - if(!(r0 instanceof Integer) && !(r0 instanceof Float) || - !(r1 instanceof Integer) && !(r1 instanceof Float)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE263", String.format( - "ValueError: \"%s\" is not a valid range",range.toString()))); - // too dangerous to continue... - return range; - } - - Float min = 0.0F; - Float max = 0.0F; - if(r0 instanceof String && ((String)r0).equals(RANGE_UNBOUNDED)) { - minTest = true; - } - else { - min = r0 instanceof Integer ? ((Integer)r0).floatValue() : (Float)r0; - } - if(r1 instanceof String && ((String)r1).equals(RANGE_UNBOUNDED)) { - maxTest = true; - } - else { - max = r1 instanceof Integer ? ((Integer)r1).floatValue() : (Float)r1; - } - - // validate the max > min (account for UNBOUNDED) - if(!minTest && !maxTest) { - // Note: min == max is allowed - if(min > max) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE264", String.format( - "ValueError:\"%s\" is not a valid range",range.toString()))); - } - } - return range; - } - - @SuppressWarnings("unchecked") - public static Object validateValueInRange(Object value,Object range,String propName) { - // verify all 3 are numeric and convert to Floats - if(!(value instanceof Integer || value instanceof Float)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE265", String.format( - "ValueError: validateInRange: \"%s\" is not a number",range.toString()))); - return value; - } - Float fval = value instanceof Integer ? ((Integer)value).floatValue() : (Float)value; - - ////////////////////////// - //"validateRange(range);" - ////////////////////////// - // better safe than sorry... - // validate that range list has a min and max - if(range instanceof ArrayList && ((ArrayList)range).size() != 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE266", String.format( - "ValueError: \"%s\" is not a valid range",range.toString()))); - // too dangerous to continue... - return value; - } - // validate min and max are numerics or the keyword UNBOUNDED - boolean minTest = false; - boolean maxTest = false; - Object r0 = ((ArrayList)range).get(0); - Object r1 = ((ArrayList)range).get(1); - - if(!(r0 instanceof Integer) && !(r0 instanceof Float) || - !(r1 instanceof Integer) && !(r1 instanceof Float)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE267", String.format( - "ValueError: \"%s\" is not a valid range",range.toString()))); - // too dangerous to continue... - return value; - } - - Float min = 0.0F; - Float max = 0.0F; - if(r0 instanceof String && ((String)r0).equals(RANGE_UNBOUNDED)) { - minTest = true; - } - else { - min = r0 instanceof Integer ? ((Integer)r0).floatValue() : (Float)r0; - } - if(r1 instanceof String && ((String)r1).equals(RANGE_UNBOUNDED)) { - maxTest = true; - } - else { - max = r1 instanceof Integer ? ((Integer)r1).floatValue() : (Float)r1; - } - - // validate the max > min (account for UNBOUNDED) - if(!minTest && !maxTest) { - // Note: min == max is allowed - if(min > max) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE268", String.format( - "ValueError:\"%s\" is not a valid range",range.toString()))); - } - } - // finally... - boolean bError = false; - //Note: value is valid if equal to min - if(!minTest) { - if(fval < min) { - bError = true; - } - } - // Note: value is valid if equal to max - if(!maxTest) { - if(fval > max) { - bError = true; - } - } - if(bError) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE269", String.format( - "RangeValueError: Property \"%s\", \"%s\" not in range [\"%s\" - \"%s\"", - propName,value.toString(),r0.toString(),r1.toString()))); - } - return value; - } - - public static Object validateMap(Object ob) { - if(ob != null) { - if (!(ob instanceof LinkedHashMap)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE270", String.format( - "ValueError\"%s\" is not a map.",ob.toString()))); - } - } - return ob; - } - - public static Object validateBoolean(Object value) { - if(value != null) { - if (value instanceof Boolean) { - return value; - } - if (value instanceof String) { - String normalized = ((String) value).toLowerCase(); - if (normalized.equals("true") || normalized.equals("false")) { - return normalized.equals("true"); - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE271", String.format( - "ValueError: \"%s\" is not a boolean",value.toString()))); - } - return value; - } - - public static Object validateTimestamp(Object value) { - /* - try: - # Note: we must return our own exception message - # as dateutil's parser returns different types / values on - # different systems. OSX, for example, returns a tuple - # containing a different error message than Linux - dateutil.parser.parse(value) - except Exception as e: - original_err_msg = str(e) - log.error(original_err_msg) - ValidationIssueCollector.appendException( - ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') % - {'val': value, 'msg': original_err_msg})) - */ - - // timestamps are loaded as Date objects by the YAML parser - if(value != null) { - if (!(value instanceof Date)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE272", String.format( - "ValueError: \"%s\" is not a valid timestamp", - value.toString()))); - - } - } - return value; - } - -} - -/*python - -from toscaparser.elements import constraints -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidTOSCAVersionPropertyException -from toscaparser.common.exception import RangeValueError -from toscaparser.utils.gettextutils import _ - -log = logging.getLogger('tosca') - -RANGE_UNBOUNDED = 'UNBOUNDED' - - -def str_to_num(value): - '''Convert a string representation of a number into a numeric type.''' - # tODO(TBD) we should not allow numeric values in, input should be str - if isinstance(value, numbers.Number): - return value - try: - return int(value) - except ValueError: - return float(value) - - -def validate_numeric(value): - if not isinstance(value, numbers.Number): - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a numeric.') % value)) - return value - - -def validate_integer(value): - if not isinstance(value, int): - try: - value = int(value) - except Exception: - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not an integer.') % value)) - return value - - -def validate_float(value): - if not isinstance(value, float): - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a float.') % value)) - return value - - -def validate_string(value): - if not isinstance(value, six.string_types): - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a string.') % value)) - return value - - -def validate_list(value): - if not isinstance(value, list): - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a list.') % value)) - return value - - -def validate_range(range): - # list class check - validate_list(range) - # validate range list has a min and max - if len(range) != 2: - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a valid range.') % range)) - # validate min and max are numerics or the keyword UNBOUNDED - min_test = max_test = False - if not range[0] == RANGE_UNBOUNDED: - min = validate_numeric(range[0]) - else: - min_test = True - if not range[1] == RANGE_UNBOUNDED: - max = validate_numeric(range[1]) - else: - max_test = True - # validate the max > min (account for UNBOUNDED) - if not min_test and not max_test: - # Note: min == max is allowed - if min > max: - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a valid range.') % range)) - - return range - - -def validate_value_in_range(value, range, prop_name): - validate_numeric(value) - validate_range(range) - - # Note: value is valid if equal to min - if range[0] != RANGE_UNBOUNDED: - if value < range[0]: - ValidationIssueCollector.appendException( - RangeValueError(pname=prop_name, - pvalue=value, - vmin=range[0], - vmax=range[1])) - # Note: value is valid if equal to max - if range[1] != RANGE_UNBOUNDED: - if value > range[1]: - ValidationIssueCollector.appendException( - RangeValueError(pname=prop_name, - pvalue=value, - vmin=range[0], - vmax=range[1])) - return value - - -def validate_map(value): - if not isinstance(value, collections.Mapping): - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a map.') % value)) - return value - - -def validate_boolean(value): - if isinstance(value, bool): - return value - - if isinstance(value, str): - normalised = value.lower() - if normalised in ['true', 'false']: - return normalised == 'true' - - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a boolean.') % value)) - - -def validate_timestamp(value): - try: - # Note: we must return our own exception message - # as dateutil's parser returns different types / values on - # different systems. OSX, for example, returns a tuple - # containing a different error message than Linux - dateutil.parser.parse(value) - except Exception as e: - original_err_msg = str(e) - log.error(original_err_msg) - ValidationIssueCollector.appendException( - ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') % - {'val': value, 'msg': original_err_msg})) - return - -*/ \ No newline at end of file diff --git a/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java b/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java new file mode 100644 index 0000000..a5afa6b --- /dev/null +++ b/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java @@ -0,0 +1,79 @@ +package org.onap.sdc.toscaparser.api; + +import com.opencsv.CSVWriter; + +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Scanner; +//Generate excel file, include all validation issues errors in jtosca +//the error java code, the line number and file name for each error. +public class GetValidationIssues { + + public static CSVWriter fileWriter = null; + public static List data = new ArrayList<>(); + + public static void main(String[] args) { + System.out.println("GetAllValidationIssues - path to project files Directory is " + Arrays.toString(args)); + File jtoscaFiles = new File(args[0]+ "\\jtosca\\src\\main\\java\\org\\onap\\sdc\\toscaparser\\api"); + + try { + printFiles(jtoscaFiles); + fileWriter = new CSVWriter(new FileWriter(args[1]+"\\JToscaValidationIssues_"+System.currentTimeMillis()+".csv"), '\t'); + fileWriter.writeNext(new String[] {"Error Message", "Class Name", "Line No."}, false); + fileWriter.writeAll(data, false); + } catch (IOException e) { + e.printStackTrace(); + } finally { + try { + fileWriter.flush(); + fileWriter.close(); + } catch (IOException e) { + System.out.println("Error while flushing/closing fileWriter !!!"); + e.printStackTrace(); + } + } + } + + private static void printFiles(File dir) { + if (dir != null && dir.exists()) { + for (File file : dir.listFiles()) { + if (file.isDirectory()) + printFiles(file); + else { + Scanner scanner = null; + try { + scanner = new Scanner(file); + + int lineNum = 0; + while (scanner.hasNextLine()) { + String line = scanner.nextLine(); + lineNum++; + if (line.startsWith("/*python")) + break; + + if (!line.trim().startsWith("//") && !line.trim().startsWith("#") && line.contains("ThreadLocalsHolder.getCollector().appendValidationIssue")) { + String errMsg = line.trim(); + if (!errMsg.contains(";")) { + String nextLine = null; + while (scanner.hasNextLine() && (nextLine == null || !nextLine.contains(";"))) { + nextLine = scanner.nextLine(); + errMsg += nextLine.trim(); + } + } + + data.add(new String[]{errMsg, file.getName(), String.valueOf(lineNum)}); + } + } + } catch (IOException e) { + e.printStackTrace(); + } + } + } + } + } +} + diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java new file mode 100644 index 0000000..589e47c --- /dev/null +++ b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java @@ -0,0 +1,64 @@ +package org.onap.sdc.toscaparser.api; + +import org.junit.Test; +import org.onap.sdc.toscaparser.api.common.JToscaException; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.io.File; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +import static org.junit.Assert.assertEquals; + +public class JToscaImportTest { + + @Test + public void testNoMissingTypeValidationError() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource + ("csars/sdc-onboarding_csar.csar").getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List missingTypeErrors = ThreadLocalsHolder.getCollector() + .getValidationIssueReport() + .stream() + .filter(s -> s.contains("JE136")) + .collect(Collectors.toList()); + assertEquals(0, missingTypeErrors.size()); + } + + @Test + public void testNoStackOverFlowError() { + Exception jte = null; + try { + String fileStr = JToscaImportTest.class.getClassLoader().getResource + ("csars/sdc-onboarding_csar.csar").getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + } catch (Exception e){ + jte = e; + } + assertEquals(null, jte); + } + + @Test + public void testNoInvalidImports() throws JToscaException { + List fileNames = new ArrayList<>(); + fileNames.add("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); + fileNames.add("csars/sdc-onboarding_csar.csar"); + fileNames.add("csars/resource-Spgw-csar-ZTE.csar"); + + for (String fileName : fileNames) { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(fileName).getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List invalidImportErrors = ThreadLocalsHolder.getCollector() + .getValidationIssueReport() + .stream() + .filter(s -> s.contains("JE195")) + .collect(Collectors.toList()); + assertEquals(0, invalidImportErrors.size()); + } + } + +} diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java new file mode 100644 index 0000000..37c6d18 --- /dev/null +++ b/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java @@ -0,0 +1,61 @@ +package org.onap.sdc.toscaparser.api; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.util.LinkedHashMap; + +import org.junit.Test; +import org.onap.sdc.toscaparser.api.common.JToscaException; +import org.onap.sdc.toscaparser.api.utils.JToscaErrorCodes; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class JToscaMetadataParse { + + @Test + public void testMetadataParsedCorrectly() throws JToscaException { + String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/csar_hello_world.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + LinkedHashMap metadataProperties = toscaTemplate.getMetaProperties("TOSCA.meta"); + assertNotNull(metadataProperties); + Object entryDefinition = metadataProperties.get("Entry-Definitions"); + assertNotNull(entryDefinition); + assertEquals("tosca_helloworld.yaml", entryDefinition); + } + + @Test + public void noWarningsAfterParse() throws JToscaException { + String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/tmpCSAR_Huawei_vSPGW_fixed.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); + assertTrue(validationIssuesCaught == 0 ); + } + + @Test + public void testEmptyCsar() throws JToscaException { + String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/emptyCsar.csar").getFile(); + File file = new File(fileStr); + try { + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + } catch (JToscaException e) { + assertTrue(e.getCode().equals(JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue())); + } + int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); + assertTrue(validationIssuesCaught == 0 ); + } + + @Test + public void testEmptyPath() throws JToscaException { + String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("").getFile(); + File file = new File(fileStr); + try { + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + }catch (JToscaException e) { + assertTrue(e.getCode().equals(JToscaErrorCodes.PATH_NOT_VALID.getValue())); + } + } +} diff --git a/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java b/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java new file mode 100644 index 0000000..2a88c2b --- /dev/null +++ b/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java @@ -0,0 +1,55 @@ +package org.onap.sdc.toscaparser.api.elements; + +import org.junit.After; +import org.junit.Test; + +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + +import static org.junit.Assert.assertEquals; + +public class EntityTypeTest { + + private static final Map origMap = EntityType.TOSCA_DEF; + + @Test + public void testUpdateDefinitions() throws Exception { + + Map testData = new HashMap<>(); + testData.put("tosca.nodes.nfv.VNF", "{derived_from=tosca.nodes.Root, properties={id={type=string, description=ID of this VNF}, vendor={type=string, description=name of the vendor who generate this VNF}, version={type=version, description=version of the software for this VNF}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}]}"); + testData.put("tosca.nodes.nfv.VDU", "{derived_from=tosca.nodes.Compute, capabilities={high_availability={type=tosca.capabilities.nfv.HA}, virtualbinding={type=tosca.capabilities.nfv.VirtualBindable}, monitoring_parameter={type=tosca.capabilities.nfv.Metric}}, requirements=[{high_availability={capability=tosca.capabilities.nfv.HA, relationship=tosca.relationships.nfv.HA, node=tosca.nodes.nfv.VDU, occurrences=[0, 1]}}]}"); + testData.put("tosca.nodes.nfv.CP", "{derived_from=tosca.nodes.network.Port, properties={type={type=string, required=false}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}, {virtualBinding={capability=tosca.capabilities.nfv.VirtualBindable, relationship=tosca.relationships.nfv.VirtualBindsTo, node=tosca.nodes.nfv.VDU}}], attributes={address={type=string}}}"); + testData.put("tosca.nodes.nfv.VL", "{derived_from=tosca.nodes.network.Network, properties={vendor={type=string, required=true, description=name of the vendor who generate this VL}}, capabilities={virtual_linkable={type=tosca.capabilities.nfv.VirtualLinkable}}}"); + testData.put("tosca.nodes.nfv.VL.ELine", "{derived_from=tosca.nodes.nfv.VL, capabilities={virtual_linkable={occurrences=2}}}"); + testData.put("tosca.nodes.nfv.VL.ELAN", "{derived_from=tosca.nodes.nfv.VL}"); + testData.put("tosca.nodes.nfv.VL.ETree", "{derived_from=tosca.nodes.nfv.VL}"); + testData.put("tosca.nodes.nfv.FP", "{derived_from=tosca.nodes.Root, properties={policy={type=string, required=false, description=name of the vendor who generate this VL}}, requirements=[{forwarder={capability=tosca.capabilities.nfv.Forwarder, relationship=tosca.relationships.nfv.ForwardsTo}}]}"); + testData.put("tosca.groups.nfv.VNFFG", "{derived_from=tosca.groups.Root, properties={vendor={type=string, required=true, description=name of the vendor who generate this VNFFG}, version={type=string, required=true, description=version of this VNFFG}, number_of_endpoints={type=integer, required=true, description=count of the external endpoints included in this VNFFG}, dependent_virtual_link={type=list, entry_schema={type=string}, required=true, description=Reference to a VLD used in this Forwarding Graph}, connection_point={type=list, entry_schema={type=string}, required=true, description=Reference to Connection Points forming the VNFFG}, constituent_vnfs={type=list, entry_schema={type=string}, required=true, description=Reference to a list of VNFD used in this VNF Forwarding Graph}}}"); + testData.put("tosca.relationships.nfv.VirtualLinksTo", "{derived_from=tosca.relationships.network.LinksTo, valid_target_types=[tosca.capabilities.nfv.VirtualLinkable]}"); + testData.put("tosca.relationships.nfv.VirtualBindsTo", "{derived_from=tosca.relationships.network.BindsTo, valid_target_types=[tosca.capabilities.nfv.VirtualBindable]}"); + testData.put("tosca.relationships.nfv.HA", "{derived_from=tosca.relationships.Root, valid_target_types=[tosca.capabilities.nfv.HA]}"); + testData.put("tosca.relationships.nfv.Monitor", "{derived_from=tosca.relationships.ConnectsTo, valid_target_types=[tosca.capabilities.nfv.Metric]}"); + testData.put("tosca.relationships.nfv.ForwardsTo", "{derived_from=tosca.relationships.root, valid_target_types=[tosca.capabilities.nfv.Forwarder]}"); + testData.put("tosca.capabilities.nfv.VirtualLinkable", "{derived_from=tosca.capabilities.network.Linkable}"); + testData.put("tosca.capabilities.nfv.VirtualBindable", "{derived_from=tosca.capabilities.network.Bindable}"); + testData.put("tosca.capabilities.nfv.HA", "{derived_from=tosca.capabilities.Root, valid_source_types=[tosca.nodes.nfv.VDU]}"); + testData.put("tosca.capabilities.nfv.HA.ActiveActive", "{derived_from=tosca.capabilities.nfv.HA}"); + testData.put("tosca.capabilities.nfv.HA.ActivePassive", "{derived_from=tosca.capabilities.nfv.HA}"); + testData.put("tosca.capabilities.nfv.Metric", "{derived_from=tosca.capabilities.Root}"); + testData.put("tosca.capabilities.nfv.Forwarder", "{derived_from=tosca.capabilities.Root}"); + + Map expectedDefMap = origMap; + expectedDefMap.putAll(testData); + EntityType.updateDefinitions("tosca_simple_profile_for_nfv_1_0_0"); + + assertEquals(expectedDefMap, EntityType.TOSCA_DEF); + + } + + @After + public void tearDown() throws Exception { + EntityType.TOSCA_DEF = (LinkedHashMap) origMap; + } + +} \ No newline at end of file diff --git a/src/test/java/org/openecomp/sdc/toscaparser/api/GetValidationIssues.java b/src/test/java/org/openecomp/sdc/toscaparser/api/GetValidationIssues.java deleted file mode 100644 index 7a0eec1..0000000 --- a/src/test/java/org/openecomp/sdc/toscaparser/api/GetValidationIssues.java +++ /dev/null @@ -1,81 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import com.opencsv.CSVWriter; - -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileWriter; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Date; -import java.util.List; -import java.util.Scanner; -//Generate excel file, include all validation issues errors in jtosca -//the error java code, the line number and file name for each error. -public class GetValidationIssues { - - public static CSVWriter fileWriter = null; - public static List data = new ArrayList<>(); - - public static void main(String[] args) { - System.out.println("GetAllValidationIssues - path to project files Directory is " + Arrays.toString(args)); - File jtoscaFiles = new File(args[0]+ "\\jtosca\\src\\main\\java\\org\\openecomp\\sdc\\toscaparser\\api"); - - try { - printFiles(jtoscaFiles); - fileWriter = new CSVWriter(new FileWriter(args[1]+"\\JToscaValidationIssues_"+System.currentTimeMillis()+".csv"), '\t'); - fileWriter.writeNext(new String[] {"Error Message", "Class Name", "Line No."}, false); - fileWriter.writeAll(data, false); - } catch (IOException e) { - e.printStackTrace(); - } finally { - try { - fileWriter.flush(); - fileWriter.close(); - } catch (IOException e) { - System.out.println("Error while flushing/closing fileWriter !!!"); - e.printStackTrace(); - } - } - } - - private static void printFiles(File dir) { - if (dir != null && dir.exists()) { - for (File file : dir.listFiles()) { - if (file.isDirectory()) - printFiles(file); - else { - Scanner scanner = null; - try { - scanner = new Scanner(file); - - int lineNum = 0; - while (scanner.hasNextLine()) { - String line = scanner.nextLine(); - lineNum++; - if (line.startsWith("/*python")) - break; - - if (!line.trim().startsWith("//") && !line.trim().startsWith("#") && line.contains("ThreadLocalsHolder.getCollector().appendValidationIssue")) { - String errMsg = line.trim(); - if (!errMsg.contains(";")) { - String nextLine = null; - while (scanner.hasNextLine() && (nextLine == null || !nextLine.contains(";"))) { - nextLine = scanner.nextLine(); - errMsg += nextLine.trim(); - } - } - - data.add(new String[]{errMsg, file.getName(), String.valueOf(lineNum)}); - } - } - } catch (IOException e) { - e.printStackTrace(); - } - } - } - } - } -} - diff --git a/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java b/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java deleted file mode 100644 index c8a30fa..0000000 --- a/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaImportTest.java +++ /dev/null @@ -1,64 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import org.junit.Test; -import org.openecomp.sdc.toscaparser.api.common.JToscaException; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.io.File; -import java.util.ArrayList; -import java.util.List; -import java.util.stream.Collectors; - -import static org.junit.Assert.assertEquals; - -public class JToscaImportTest { - - @Test - public void testNoMissingTypeValidationError() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource - ("csars/sdc-onboarding_csar.csar").getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List missingTypeErrors = ThreadLocalsHolder.getCollector() - .getValidationIssueReport() - .stream() - .filter(s -> s.contains("JE136")) - .collect(Collectors.toList()); - assertEquals(0, missingTypeErrors.size()); - } - - @Test - public void testNoStackOverFlowError() { - Exception jte = null; - try { - String fileStr = JToscaImportTest.class.getClassLoader().getResource - ("csars/sdc-onboarding_csar.csar").getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - } catch (Exception e){ - jte = e; - } - assertEquals(null, jte); - } - - @Test - public void testNoInvalidImports() throws JToscaException { - List fileNames = new ArrayList<>(); - fileNames.add("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); - fileNames.add("csars/sdc-onboarding_csar.csar"); - fileNames.add("csars/resource-Spgw-csar-ZTE.csar"); - - for (String fileName : fileNames) { - String fileStr = JToscaImportTest.class.getClassLoader().getResource(fileName).getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List invalidImportErrors = ThreadLocalsHolder.getCollector() - .getValidationIssueReport() - .stream() - .filter(s -> s.contains("JE195")) - .collect(Collectors.toList()); - assertEquals(0, invalidImportErrors.size()); - } - } - -} diff --git a/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java b/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java deleted file mode 100644 index 8f55fa4..0000000 --- a/src/test/java/org/openecomp/sdc/toscaparser/api/JToscaMetadataParse.java +++ /dev/null @@ -1,61 +0,0 @@ -package org.openecomp.sdc.toscaparser.api; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.io.File; -import java.util.LinkedHashMap; - -import org.junit.Test; -import org.openecomp.sdc.toscaparser.api.common.JToscaException; -import org.openecomp.sdc.toscaparser.api.utils.JToscaErrorCodes; -import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class JToscaMetadataParse { - - @Test - public void testMetadataParsedCorrectly() throws JToscaException { - String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/csar_hello_world.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - LinkedHashMap metadataProperties = toscaTemplate.getMetaProperties("TOSCA.meta"); - assertNotNull(metadataProperties); - Object entryDefinition = metadataProperties.get("Entry-Definitions"); - assertNotNull(entryDefinition); - assertEquals("tosca_helloworld.yaml", entryDefinition); - } - - @Test - public void noWarningsAfterParse() throws JToscaException { - String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/tmpCSAR_Huawei_vSPGW_fixed.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); - assertTrue(validationIssuesCaught == 0 ); - } - - @Test - public void testEmptyCsar() throws JToscaException { - String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/emptyCsar.csar").getFile(); - File file = new File(fileStr); - try { - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - } catch (JToscaException e) { - assertTrue(e.getCode().equals(JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue())); - } - int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); - assertTrue(validationIssuesCaught == 0 ); - } - - @Test - public void testEmptyPath() throws JToscaException { - String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("").getFile(); - File file = new File(fileStr); - try { - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - }catch (JToscaException e) { - assertTrue(e.getCode().equals(JToscaErrorCodes.PATH_NOT_VALID.getValue())); - } - } -} diff --git a/src/test/java/org/openecomp/sdc/toscaparser/api/elements/EntityTypeTest.java b/src/test/java/org/openecomp/sdc/toscaparser/api/elements/EntityTypeTest.java deleted file mode 100644 index 8e74e99..0000000 --- a/src/test/java/org/openecomp/sdc/toscaparser/api/elements/EntityTypeTest.java +++ /dev/null @@ -1,56 +0,0 @@ -package org.openecomp.sdc.toscaparser.api.elements; - -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; - -import static org.junit.Assert.assertEquals; - -public class EntityTypeTest { - - private static final Map origMap = EntityType.TOSCA_DEF; - - @Test - public void testUpdateDefinitions() throws Exception { - - Map testData = new HashMap<>(); - testData.put("tosca.nodes.nfv.VNF", "{derived_from=tosca.nodes.Root, properties={id={type=string, description=ID of this VNF}, vendor={type=string, description=name of the vendor who generate this VNF}, version={type=version, description=version of the software for this VNF}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}]}"); - testData.put("tosca.nodes.nfv.VDU", "{derived_from=tosca.nodes.Compute, capabilities={high_availability={type=tosca.capabilities.nfv.HA}, virtualbinding={type=tosca.capabilities.nfv.VirtualBindable}, monitoring_parameter={type=tosca.capabilities.nfv.Metric}}, requirements=[{high_availability={capability=tosca.capabilities.nfv.HA, relationship=tosca.relationships.nfv.HA, node=tosca.nodes.nfv.VDU, occurrences=[0, 1]}}]}"); - testData.put("tosca.nodes.nfv.CP", "{derived_from=tosca.nodes.network.Port, properties={type={type=string, required=false}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}, {virtualBinding={capability=tosca.capabilities.nfv.VirtualBindable, relationship=tosca.relationships.nfv.VirtualBindsTo, node=tosca.nodes.nfv.VDU}}], attributes={address={type=string}}}"); - testData.put("tosca.nodes.nfv.VL", "{derived_from=tosca.nodes.network.Network, properties={vendor={type=string, required=true, description=name of the vendor who generate this VL}}, capabilities={virtual_linkable={type=tosca.capabilities.nfv.VirtualLinkable}}}"); - testData.put("tosca.nodes.nfv.VL.ELine", "{derived_from=tosca.nodes.nfv.VL, capabilities={virtual_linkable={occurrences=2}}}"); - testData.put("tosca.nodes.nfv.VL.ELAN", "{derived_from=tosca.nodes.nfv.VL}"); - testData.put("tosca.nodes.nfv.VL.ETree", "{derived_from=tosca.nodes.nfv.VL}"); - testData.put("tosca.nodes.nfv.FP", "{derived_from=tosca.nodes.Root, properties={policy={type=string, required=false, description=name of the vendor who generate this VL}}, requirements=[{forwarder={capability=tosca.capabilities.nfv.Forwarder, relationship=tosca.relationships.nfv.ForwardsTo}}]}"); - testData.put("tosca.groups.nfv.VNFFG", "{derived_from=tosca.groups.Root, properties={vendor={type=string, required=true, description=name of the vendor who generate this VNFFG}, version={type=string, required=true, description=version of this VNFFG}, number_of_endpoints={type=integer, required=true, description=count of the external endpoints included in this VNFFG}, dependent_virtual_link={type=list, entry_schema={type=string}, required=true, description=Reference to a VLD used in this Forwarding Graph}, connection_point={type=list, entry_schema={type=string}, required=true, description=Reference to Connection Points forming the VNFFG}, constituent_vnfs={type=list, entry_schema={type=string}, required=true, description=Reference to a list of VNFD used in this VNF Forwarding Graph}}}"); - testData.put("tosca.relationships.nfv.VirtualLinksTo", "{derived_from=tosca.relationships.network.LinksTo, valid_target_types=[tosca.capabilities.nfv.VirtualLinkable]}"); - testData.put("tosca.relationships.nfv.VirtualBindsTo", "{derived_from=tosca.relationships.network.BindsTo, valid_target_types=[tosca.capabilities.nfv.VirtualBindable]}"); - testData.put("tosca.relationships.nfv.HA", "{derived_from=tosca.relationships.Root, valid_target_types=[tosca.capabilities.nfv.HA]}"); - testData.put("tosca.relationships.nfv.Monitor", "{derived_from=tosca.relationships.ConnectsTo, valid_target_types=[tosca.capabilities.nfv.Metric]}"); - testData.put("tosca.relationships.nfv.ForwardsTo", "{derived_from=tosca.relationships.root, valid_target_types=[tosca.capabilities.nfv.Forwarder]}"); - testData.put("tosca.capabilities.nfv.VirtualLinkable", "{derived_from=tosca.capabilities.network.Linkable}"); - testData.put("tosca.capabilities.nfv.VirtualBindable", "{derived_from=tosca.capabilities.network.Bindable}"); - testData.put("tosca.capabilities.nfv.HA", "{derived_from=tosca.capabilities.Root, valid_source_types=[tosca.nodes.nfv.VDU]}"); - testData.put("tosca.capabilities.nfv.HA.ActiveActive", "{derived_from=tosca.capabilities.nfv.HA}"); - testData.put("tosca.capabilities.nfv.HA.ActivePassive", "{derived_from=tosca.capabilities.nfv.HA}"); - testData.put("tosca.capabilities.nfv.Metric", "{derived_from=tosca.capabilities.Root}"); - testData.put("tosca.capabilities.nfv.Forwarder", "{derived_from=tosca.capabilities.Root}"); - - Map expectedDefMap = origMap; - expectedDefMap.putAll(testData); - EntityType.updateDefinitions("tosca_simple_profile_for_nfv_1_0_0"); - - assertEquals(expectedDefMap, EntityType.TOSCA_DEF); - - } - - @After - public void tearDown() throws Exception { - EntityType.TOSCA_DEF = (LinkedHashMap) origMap; - } - -} \ No newline at end of file diff --git a/version.properties b/version.properties index a6be0db..235314e 100644 --- a/version.properties +++ b/version.properties @@ -4,8 +4,8 @@ # because they are used in Jenkins, whose plug-in doesn't support major=1 -minor=2 -patch=2 +minor=3 +patch=0 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From a492ff29a0ff1a69991dd2a101c0e06168d43c85 Mon Sep 17 00:00:00 2001 From: Yuli Shlosberg Date: Wed, 7 Mar 2018 18:21:34 +0200 Subject: update JTOSCA package names Change-Id: I201e07d38bbfb85fc518a4558eb04166f9cef44d Issue-ID: SDC-950 Signed-off-by: Yuli Shlosberg --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 8589eb3..0ae2ca8 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.3.0 + 1.3.0-SNAPSHOT sdc-jtosca -- cgit 1.2.3-korg From 435ce5399f3393ec1b7ddcaaa42a52aaedcb1e3d Mon Sep 17 00:00:00 2001 From: Rokhvarg David Date: Sun, 11 Mar 2018 14:23:17 +0200 Subject: vLAN Tagging - Support Tosca Policies Change-Id: I3086a3562dd4fa16f0a446884fbbd20041eb9691 Issue-ID: SDC-1056 Signed-off-by: Rokhvarg David --- pom.xml | 2 +- src/main/java/org/onap/sdc/toscaparser/api/Policy.java | 6 ++++++ src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java | 4 ++++ version.properties | 2 +- 4 files changed, 12 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 0ae2ca8..363050b 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.3.0-SNAPSHOT + 1.3.1-SNAPSHOT sdc-jtosca diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java index f7ec967..437563e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java @@ -119,6 +119,12 @@ public class Policy extends EntityTemplate { ", properties=" + properties + '}'; } + + public int compareTo(Policy other){ + if(this.equals(other)) + return 0; + return this.getName().compareTo(other.getName()) == 0 ? this.getType().compareTo(other.getType()) : this.getName().compareTo(other.getName()); + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java index 3d94d6e..b06c045 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java @@ -836,6 +836,10 @@ public class ToscaTemplate extends Object { return nestedToscaTemplatesWithTopology; } + public ConcurrentHashMap getNestedTopologyTemplates() { + return nestedToscaTplsWithTopology; + } + @Override public String toString() { return "ToscaTemplate{" + diff --git a/version.properties b/version.properties index 235314e..a61846b 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=3 -patch=0 +patch=1 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From d25b009dafd808485a06643b879d7a8d682e42e0 Mon Sep 17 00:00:00 2001 From: "Gitelman, Tal (tg851x)" Date: Mon, 12 Mar 2018 18:49:55 +0200 Subject: analyze and fix IQ server violations JTOSCA Change-Id: I8174f07a5a14be1e22d72baa1ad47895c1506ed1 Issue-ID: SDC-1100 Signed-off-by: Gitelman, Tal (tg851x) --- pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/pom.xml b/pom.xml index 363050b..a6361e2 100644 --- a/pom.xml +++ b/pom.xml @@ -61,6 +61,7 @@ junit junit 4.12 + test -- cgit 1.2.3-korg From ffb1042f8fae035e1c55eeb0792bebac6e7d0c94 Mon Sep 17 00:00:00 2001 From: "Gitelman, Tal (tg851x)" Date: Wed, 14 Mar 2018 19:45:14 +0200 Subject: analyze and fix IQ server violations JTOSCA Change-Id: I00addf73c5978844c81692a91f2c525621e9f3c4 Issue-ID: SDC-1100 Signed-off-by: Gitelman, Tal (tg851x) --- pom.xml | 2 +- version.properties | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index a6361e2..403f6fe 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.3.1-SNAPSHOT + 1.3.2-SNAPSHOT sdc-jtosca diff --git a/version.properties b/version.properties index a61846b..190988d 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=3 -patch=1 +patch=2 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 932e6d502a9c6cc32826628aef1506aff70f86b3 Mon Sep 17 00:00:00 2001 From: "Gitelman, Tal (tg851x)" Date: Tue, 20 Mar 2018 14:19:26 +0200 Subject: Upgrade SnakeYAML version for JTosca Change-Id: Ia2d2910edf4c7646ee4da2b1d454604af273ceac Issue-ID: SDC-234 Signed-off-by: Gitelman, Tal (tg851x) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 403f6fe..f346b39 100644 --- a/pom.xml +++ b/pom.xml @@ -44,7 +44,7 @@ org.yaml snakeyaml - 1.14 + 1.20 compile -- cgit 1.2.3-korg From b477bfc60d30f5949f3c1d38240378b8c641742a Mon Sep 17 00:00:00 2001 From: "Gitelman, Tal (tg851x)" Date: Tue, 20 Mar 2018 14:29:11 +0200 Subject: Upgrade SnakeYAML version for JTosca Change-Id: I156ecdcacd3c4187ffdb3134e1576e6c696fcd31 Issue-ID: SDC-234 Signed-off-by: Gitelman, Tal (tg851x) --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index f346b39..d876dd5 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.3.2-SNAPSHOT + 1.3.3-SNAPSHOT sdc-jtosca -- cgit 1.2.3-korg From 6c8381b3cbdc5a50ee359691f1b805c6206c1bbd Mon Sep 17 00:00:00 2001 From: "Gitelman, Tal (tg851x)" Date: Tue, 20 Mar 2018 14:48:57 +0200 Subject: Upgrade SnakeYAML version for JTosca Change-Id: I7cd66d82c4f48b2fbedd3424843caa6d8d226d6e Issue-ID: SDC-234 Signed-off-by: Gitelman, Tal (tg851x) --- version.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.properties b/version.properties index 190988d..5ad9fe3 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=3 -patch=2 +patch=3 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From e6a2f2bdf080bf99c407e2668b49d6eeff316a10 Mon Sep 17 00:00:00 2001 From: "Gitelman, Tal (tg851x)" Date: Tue, 20 Mar 2018 15:00:55 +0200 Subject: Upgrade SnakeYAML version for JTosca Change-Id: I332d0408b49a1e1b0752670afa745f50a2979122 Issue-ID: SDC-234 Signed-off-by: Gitelman, Tal (tg851x) --- pom.xml | 2 +- version.properties | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index d876dd5..db385b7 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.3.3-SNAPSHOT + 1.3.4-SNAPSHOT sdc-jtosca diff --git a/version.properties b/version.properties index 5ad9fe3..a8f201d 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=3 -patch=3 +patch=4 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 05d9b88fd55c81ffce49f8e764a05af195cc24f7 Mon Sep 17 00:00:00 2001 From: "Gitelman, Tal (tg851x)" Date: Tue, 20 Mar 2018 15:49:10 +0200 Subject: Upgrade SnakeYAML version for sdc-tosca Change-Id: I5af178f9cdcad6a733d73bf9bf5fbddfc46647bb Issue-ID: SDC-234 Signed-off-by: Gitelman, Tal (tg851x) --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index db385b7..680917a 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.3.4-SNAPSHOT + 1.3.3-SNAPSHOT sdc-jtosca @@ -44,7 +44,7 @@ org.yaml snakeyaml - 1.20 + 1.14 compile -- cgit 1.2.3-korg From 6263a57c5bb2da0f89292313927cac3e83157a83 Mon Sep 17 00:00:00 2001 From: Jessica Wagantall Date: Tue, 20 Mar 2018 12:31:52 -0700 Subject: Add INFO.yaml file Add INFO.yaml to list: - Project description - Properties - PTL information - Meeting information - Committer information Change-Id: I24243f641c76811ae749cdf9cf102db0978d3434 Issue-ID: CIMAN-134 Signed-off-by: Jessica Wagantall --- INFO.yaml | 49 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 INFO.yaml diff --git a/INFO.yaml b/INFO.yaml new file mode 100644 index 0000000..eae0f2a --- /dev/null +++ b/INFO.yaml @@ -0,0 +1,49 @@ +--- +project: 'sdc/jtosca' +project_creation_date: '2017-05-26' +lifecycle_state: 'Incubation' +project_lead: &onap_releng_ptl + name: 'Michael Lando' + email: 'ml636r@att.com' + id: 'ml636r' + company: 'ATT' + timezone: 'Israel/Lod' +primary_contact: *onap_releng_ptl +issue_tracking: + type: 'jira' + url: 'https://jira.onap.org/projects/SDC' + key: 'SDC' +meetings: + - type: 'zoom' + agenda: '' + url: 'https://wiki.onap.org/pages/viewpage.action?pageId=6592847' + server: 'n/a' + channel: 'n/a' + repeats: 'weekly' + time: '14:00 UTC' +committers: + - <<: *onap_releng_ptl + - name: 'Idan Amit' + email: 'ia096e@intl.att.com' + company: 'ATT' + id: 'idanamit' + timezone: 'Israel/Aviv' + - name: 'Tal Gitelman' + email: 'tg851x@intl.att.com' + company: 'ATT' + id: 'tgitelman' + timezone: 'Israel/Aviv' + - name: 'Yuli Shlosberg' + email: 'ys9693@att.com' + company: 'ATT' + id: 'ys9693' + timezone: 'Israel/Aviv' +tsc: + approval: 'https://lists.onap.org/pipermail/onap-tsc' + changes: + - type: 'Addition' + name: 'Michael Lando' + name: 'Idan Amit' + name: 'Tal Gitelman' + name: 'Yuli Shlosberg' + link: 'https://wiki.onap.org/pages/viewpage.action?pageId=25435557' -- cgit 1.2.3-korg From c2a2607d82026cf90643f4027f0decaa60817a9d Mon Sep 17 00:00:00 2001 From: Yuli Shlosberg Date: Wed, 28 Mar 2018 16:03:10 +0300 Subject: update of getNodeTypeByCap method Change-Id: Ia61081a2c2044d72a73783183c5da42d487e3fc5 Issue-ID: SDC-1181 Signed-off-by: Yuli Shlosberg --- src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java index 7dcc44d..07b3a87 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java @@ -134,7 +134,7 @@ public class NodeType extends StatefulEntityType { // Filter the node types ArrayList nodeTypes = new ArrayList<>(); for(String nt: customDef.keySet()) { - if(nt.startsWith(NODE_PREFIX) || nt.startsWith("org.onap") && !nt.equals("tosca.nodes.Root")) { + if(nt.startsWith(NODE_PREFIX) || nt.startsWith("org.openecomp") && !nt.equals("tosca.nodes.Root")) { nodeTypes.add(nt); } } -- cgit 1.2.3-korg From a8e5af36724128683fce5793a4c6641f03dcebf6 Mon Sep 17 00:00:00 2001 From: "Sheshukov, Natalia (ns019t)" Date: Sun, 8 Apr 2018 17:59:18 +0300 Subject: vLAN Tagging - Support Tosca Groups Change-Id: I51838dc8d3f003cdd5e7b5161cdc3e4b04f29442 Issue-ID: SDC-1196 Signed-off-by: Sheshukov, Natalia (ns019t) --- pom.xml | 2 +- src/main/java/org/onap/sdc/toscaparser/api/Group.java | 6 ++++++ src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java | 10 ++++++++++ 3 files changed, 17 insertions(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 680917a..990a7c9 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.3.3-SNAPSHOT + 1.3.4-SNAPSHOT sdc-jtosca diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Group.java b/src/main/java/org/onap/sdc/toscaparser/api/Group.java index f678083..d96b4a9 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Group.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Group.java @@ -87,6 +87,12 @@ public class Group extends EntityTemplate { ", metaData=" + metaData + '}'; } + + public int compareTo(Group other){ + if(this.equals(other)) + return 0; + return this.getName().compareTo(other.getName()) == 0 ? this.getType().compareTo(other.getType()) : this.getName().compareTo(other.getName()); + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java index b06c045..bdf962b 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java @@ -89,6 +89,7 @@ public class ToscaTemplate extends Object { private ArrayList nodeTemplates; private ArrayList outputs; private ArrayList policies; + private ArrayList groups; private ConcurrentHashMap nestedToscaTplsWithTopology; private ArrayList nestedToscaTemplatesWithTopology; private ToscaGraph graph; @@ -213,6 +214,7 @@ public class ToscaTemplate extends Object { this.nodeTemplates = _nodeTemplates(); this.outputs = _outputs(); this.policies = _policies(); + this.groups = _groups(); // _handleNestedToscaTemplatesWithTopology(); _handleNestedToscaTemplatesWithTopology(topologyTemplate); graph = new ToscaGraph(nodeTemplates); @@ -313,6 +315,10 @@ public class ToscaTemplate extends Object { private ArrayList _policies() { return topologyTemplate.getPolicies(); } + + private ArrayList _groups() { + return topologyTemplate.getGroups(); + } /** * This method is used to get consolidated custom definitions from all imports @@ -765,6 +771,10 @@ public class ToscaTemplate extends Object { return policies; } + public ArrayList getGroups() { + return groups; + } + public ArrayList getNodeTemplates() { return nodeTemplates; } -- cgit 1.2.3-korg From 86ada8c4d4b683737f01e2fb491ee278ca3f8033 Mon Sep 17 00:00:00 2001 From: "Sheshukov, Natalia (ns019t)" Date: Mon, 9 Apr 2018 14:59:52 +0300 Subject: vLAN Tagging Support Tosca Groups - TOSCA parser (refactoring) Change-Id: Ia2609bc7a73bfc0d928f4099a717b31109aa4586 Issue-ID: SDC-1199 Signed-off-by: Sheshukov, Natalia (ns019t) --- .../onap/sdc/toscaparser/api/EntityTemplate.java | 7 ++- .../java/org/onap/sdc/toscaparser/api/Group.java | 1 + .../org/onap/sdc/toscaparser/api/NodeTemplate.java | 9 +++ .../onap/sdc/toscaparser/api/TopologyTemplate.java | 67 +++++++++++----------- .../onap/sdc/toscaparser/api/ToscaTemplate.java | 1 + .../sdc/toscaparser/api/elements/GroupType.java | 31 +++++++++- 6 files changed, 79 insertions(+), 37 deletions(-) diff --git a/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java index 95c97dd..078dc44 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java @@ -255,7 +255,12 @@ public abstract class EntityTemplate { ((EntityType)typeDefinition).getValue(CAPABILITIES,entityTpl,true); if(caps != null) { //?!? getCapabilities defined only for NodeType... - LinkedHashMap capabilities = ((NodeType)typeDefinition).getCapabilities(); + LinkedHashMap capabilities = null; + if(typeDefinition instanceof NodeType){ + capabilities = ((NodeType)typeDefinition).getCapabilities(); + } else if (typeDefinition instanceof GroupType){ + capabilities = ((GroupType)typeDefinition).getCapabilities(); + } for(Map.Entry me: caps.entrySet()) { String name = me. getKey(); LinkedHashMap props = (LinkedHashMap)me.getValue(); diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Group.java b/src/main/java/org/onap/sdc/toscaparser/api/Group.java index d96b4a9..de031e6 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Group.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Group.java @@ -41,6 +41,7 @@ public class Group extends EntityTemplate { } memberNodes = _memberNodes; _validateKeys(); + getCapabilities(); } public Metadata getMetadata() { diff --git a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java index 250ef42..20bc210 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java @@ -21,6 +21,7 @@ public class NodeTemplate extends EntityTemplate { private ArrayList relationshipTpl; private LinkedHashMap _relationships; private SubstitutionMappings subMappingToscaTemplate; + private TopologyTemplate originComponentTemplate; private Metadata metadata; private static final String METADATA = "metadata"; @@ -453,6 +454,14 @@ public class NodeTemplate extends EntityTemplate { return getName(); } + public TopologyTemplate getOriginComponentTemplate() { + return originComponentTemplate; + } + + public void setOriginComponentTemplate(TopologyTemplate originComponentTemplate) { + this.originComponentTemplate = originComponentTemplate; + } + } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java index e2c268e..e3d3538 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java @@ -12,6 +12,7 @@ import org.onap.sdc.toscaparser.api.parameters.Output; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.ArrayList; +import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; @@ -188,39 +189,35 @@ public class TopologyTemplate { @SuppressWarnings("unchecked") private ArrayList _policies() { ArrayList alPolicies = new ArrayList<>(); - for(Object po: _tplPolicies()) { - LinkedHashMap policy = (LinkedHashMap)po; - for(Map.Entry me: policy.entrySet()) { - String policyName = me.getKey(); - LinkedHashMap policyTpl = (LinkedHashMap)me.getValue(); - ArrayList targetList = (ArrayList)policyTpl.get("targets"); - //ArrayList targetObjects = new ArrayList<>(); - ArrayList targetNodes = new ArrayList<>(); - ArrayList targetObjects = new ArrayList<>(); - ArrayList targetGroups = new ArrayList<>(); - String targetsType = "groups"; - if(targetList != null && targetList.size() >= 1) { - targetGroups = _getPolicyGroups(targetList); - if(targetGroups == null) { - targetsType = "node_templates"; - targetNodes = _getGroupMembers(targetList); - for(NodeTemplate nt: targetNodes) { - targetObjects.add(nt); - } - } - else { - for(Group gr: targetGroups) { - targetObjects.add(gr); - } - } - } - Policy policyObj = new Policy(policyName, - policyTpl, - targetObjects, - targetsType, - customDefs); - alPolicies.add(policyObj); + for(Map.Entry me: _tplPolicies().entrySet()) { + String policyName = me.getKey(); + LinkedHashMap policyTpl = (LinkedHashMap)me.getValue(); + ArrayList targetList = (ArrayList)policyTpl.get("targets"); + ArrayList targetNodes = new ArrayList<>(); + ArrayList targetObjects = new ArrayList<>(); + ArrayList targetGroups = new ArrayList<>(); + String targetsType = "groups"; + if(targetList != null && targetList.size() >= 1) { + targetGroups = _getPolicyGroups(targetList); + if(targetGroups == null || targetGroups.isEmpty()) { + targetsType = "node_templates"; + targetNodes = _getGroupMembers(targetList); + for(NodeTemplate nt: targetNodes) { + targetObjects.add(nt); + } + } + else { + for(Group gr: targetGroups) { + targetObjects.add(gr); + } + } } + Policy policyObj = new Policy(policyName, + policyTpl, + targetObjects, + targetsType, + customDefs); + alPolicies.add(policyObj); } return alPolicies; } @@ -368,12 +365,12 @@ public class TopologyTemplate { } @SuppressWarnings("unchecked") - private ArrayList _tplPolicies() { + private LinkedHashMap _tplPolicies() { if(tpl.get(POLICIES) != null) { - return (ArrayList)tpl.get(POLICIES); + return (LinkedHashMap)tpl.get(POLICIES); } else { - return new ArrayList(); + return new LinkedHashMap<>(); } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java index bdf962b..ea76a09 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java @@ -599,6 +599,7 @@ public class ToscaTemplate extends Object { parsedParams, nt, resolveGetInput); + nt.setOriginComponentTemplate(topologyWithSubMapping); if(topologyWithSubMapping.getSubstitutionMappings() != null) { // Record nested topology templates in top level template //nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java index 1419461..2f8c1e0 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java @@ -3,7 +3,9 @@ package org.onap.sdc.toscaparser.api.elements; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.ArrayList; import java.util.LinkedHashMap; +import java.util.Map; public class GroupType extends StatefulEntityType { @@ -124,7 +126,34 @@ public class GroupType extends StatefulEntityType { public String getType() { return groupType; } - + + @SuppressWarnings("unchecked") + public ArrayList getCapabilitiesObjects() { + // Return a list of capability objects + ArrayList typecapabilities = new ArrayList<>(); + LinkedHashMap caps = (LinkedHashMap)getValue(CAPABILITIES, null, true); + if(caps != null) { + // 'cname' is symbolic name of the capability + // 'cvalue' is a dict { 'type': } + for(Map.Entry me: caps.entrySet()) { + String cname = me.getKey(); + LinkedHashMap cvalue = (LinkedHashMap)me.getValue(); + String ctype = cvalue.get("type"); + CapabilityTypeDef cap = new CapabilityTypeDef(cname,ctype,type,customDef); + typecapabilities.add(cap); + } + } + return typecapabilities; + } + + public LinkedHashMap getCapabilities() { + // Return a dictionary of capability name-objects pairs + LinkedHashMap caps = new LinkedHashMap<>(); + for(CapabilityTypeDef ctd: getCapabilitiesObjects()) { + caps.put(ctd.getName(),ctd); + } + return caps; + } } -- cgit 1.2.3-korg From aeb31ac744e49499357863a844bbbf17ceb655a7 Mon Sep 17 00:00:00 2001 From: Lior Nachmias Date: Tue, 10 Apr 2018 17:56:18 +0300 Subject: Update license text Change-Id: Id95168bdc35d83dad964bdedfb4847ac72c0f6e1 Issue-ID: SDC-1118 Signed-off-by: Lior Nachmias --- LICENSE.TXT | 1 - 1 file changed, 1 deletion(-) diff --git a/LICENSE.TXT b/LICENSE.TXT index 2b91311..f479f8a 100644 --- a/LICENSE.TXT +++ b/LICENSE.TXT @@ -17,5 +17,4 @@ * See the License for the specific language governing permissions and * limitations under the License. * ============LICENSE_END============================================ -* ECOMP is a trademark and service mark of AT&T Intellectual Property. */ \ No newline at end of file -- cgit 1.2.3-korg From 5455ebcb5036a0568675939a5c68533e58adf9ad Mon Sep 17 00:00:00 2001 From: PriyanshuAgarwal Date: Tue, 10 Apr 2018 17:56:18 +0300 Subject: Interfaces support in SDC Parser Part 1 of the changes of interface support in SDC Parser. Change-Id: I3a5e0fdda69baad329460047a03f03665fbe577b Issue-ID: SDC-1197 Signed-off-by: priyanshu --- pom.xml | 4 +- .../org/onap/sdc/toscaparser/api/NodeTemplate.java | 41 +++++ .../toscaparser/api/elements/InterfacesDef.java | 187 +++++++++++++-------- version.properties | 2 +- 4 files changed, 157 insertions(+), 77 deletions(-) diff --git a/pom.xml b/pom.xml index 990a7c9..5a2d1be 100644 --- a/pom.xml +++ b/pom.xml @@ -4,8 +4,8 @@ org.onap.sdc.jtosca jtosca - 1.3.4-SNAPSHOT - sdc-jtosca + 1.3.5-SNAPSHOT + sdc-jtosca diff --git a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java index 20bc210..73b2341 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java @@ -1,5 +1,6 @@ package org.onap.sdc.toscaparser.api; +import static org.onap.sdc.toscaparser.api.elements.EntityType.TOSCA_DEF; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import java.util.ArrayList; @@ -405,6 +406,46 @@ public class NodeTemplate extends EntityTemplate { return allowedOperations; } + /** + * Get all interface details for given node template.
+ * @return Map that contains the list of all interfaces and their definitions. + * If none found, an empty map will be returned. + */ + public Map> getAllInterfaceDetailsForNodeType(){ + Map> interfaceMap = new LinkedHashMap<>(); + + // Get custom interface details + Map customInterfacesDetails = ((NodeType)typeDefinition).getInterfaces(); + // Get native interface details from tosca definitions + Object nativeInterfaceDetails = TOSCA_DEF.get(InterfacesDef.LIFECYCLE); + Map allInterfaceDetails = new LinkedHashMap<>(); + allInterfaceDetails.putAll(customInterfacesDetails); + if (nativeInterfaceDetails != null){ + allInterfaceDetails.put(InterfacesDef.LIFECYCLE, nativeInterfaceDetails); + } + + // Process all interface details from combined collection and return an interface Map with + // interface names and their definitions + for(Map.Entry me: allInterfaceDetails.entrySet()) { + ArrayList interfaces = new ArrayList<>(); + String interfaceType = me.getKey(); + Map interfaceValue = (Map)me.getValue(); + if(interfaceValue.containsKey("type")){ + interfaceType = (String) interfaceValue.get("type"); + } + + for(Map.Entry ve: interfaceValue.entrySet()) { + // Filter type as this is a reserved key and not an operation + if(!ve.getKey().equals("type")){ + InterfacesDef iface = new InterfacesDef(typeDefinition, interfaceType,this, ve.getKey(), ve.getValue()); + interfaces.add(iface); + } + } + interfaceMap.put(interfaceType, interfaces); + } + return interfaceMap; + } + private void _validateFields(LinkedHashMap nodetemplate) { for(String ntname: nodetemplate.keySet()) { boolean bFound = false; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java index f8669ed..86333d6 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java @@ -20,88 +20,88 @@ public class InterfacesDef extends StatefulEntityType { }; public static final String IMPLEMENTATION = "implementation"; + public static final String DESCRIPTION = "description"; public static final String INPUTS = "inputs"; - - public static final String INTERFACEVALUE[] = {IMPLEMENTATION, INPUTS}; public static final String INTERFACE_DEF_RESERVED_WORDS[] = { "type", "inputs", "derived_from", "version", "description"}; - + private EntityType ntype; private EntityTemplate nodeTemplate; - private String name; - private Object value; + + private String operationName; + private Object operationDef; private String implementation; private LinkedHashMap inputs; + private String description; - @SuppressWarnings("unchecked") public InterfacesDef(EntityType inodeType, - String interfaceType, - EntityTemplate inodeTemplate, - String iname, - Object ivalue) { + String interfaceType, + EntityTemplate inodeTemplate, + String iname, + Object ivalue) { // void super(); - - ntype = inodeType; - nodeTemplate = inodeTemplate; - type = interfaceType; - name = iname; - value = ivalue; - implementation = null; - inputs = null; - defs = new LinkedHashMap(); - - if(interfaceType.equals(LIFECYCLE_SHORTNAME)) { - interfaceType = LIFECYCLE; - } - if(interfaceType.equals(CONFIGURE_SHORTNAME)) { - interfaceType = CONFIGURE; - } - - // only NodeType has getInterfaces "hasattr(ntype,interfaces)" - // while RelationshipType does not - if(ntype instanceof NodeType) { - if(((NodeType)ntype).getInterfaces() != null && - ((NodeType)ntype).getInterfaces().values().contains(interfaceType)) { - LinkedHashMap nii = (LinkedHashMap) - ((NodeType)ntype).getInterfaces().get(interfaceType); - interfaceType = (String)nii.get("type"); - } - } - if(inodeType != null) { - if(nodeTemplate != null && nodeTemplate.getCustomDef() != null && - nodeTemplate.getCustomDef().values().contains(interfaceType)) { - defs = (LinkedHashMap) - nodeTemplate.getCustomDef().get(interfaceType); - } - else { - defs = (LinkedHashMap)TOSCA_DEF.get(interfaceType); - } - } - - if(ivalue != null) { - if(ivalue instanceof LinkedHashMap) { - for(Map.Entry me: ((LinkedHashMap)ivalue).entrySet()) { - if(me.getKey().equals("implementation")) { - implementation = (String)me.getValue(); - } - else if(me.getKey().equals("inputs")) { - inputs = (LinkedHashMap)me.getValue(); - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE123", String.format( - "UnknownFieldError: \"interfaces\" of template \"%s\" contain unknown field \"%s\"", - nodeTemplate.getName(),me.getKey()))); - } - } - } - else { - implementation = (String)ivalue; - } - } - } + + ntype = inodeType; + nodeTemplate = inodeTemplate; + type = interfaceType; + operationName = iname; + operationDef = ivalue; + implementation = null; + inputs = null; + defs = new LinkedHashMap(); + + if(interfaceType.equals(LIFECYCLE_SHORTNAME)) { + interfaceType = LIFECYCLE; + } + if(interfaceType.equals(CONFIGURE_SHORTNAME)) { + interfaceType = CONFIGURE; + } + + // only NodeType has getInterfaces "hasattr(ntype,interfaces)" + // while RelationshipType does not + if(ntype instanceof NodeType) { + if(((NodeType)ntype).getInterfaces() != null && + ((NodeType)ntype).getInterfaces().values().contains(interfaceType)) { + LinkedHashMap nii = (LinkedHashMap) + ((NodeType)ntype).getInterfaces().get(interfaceType); + interfaceType = (String)nii.get("type"); + } + } + if(inodeType != null) { + if(nodeTemplate != null && nodeTemplate.getCustomDef() != null && + nodeTemplate.getCustomDef().containsKey(interfaceType)) { + defs = (LinkedHashMap) + nodeTemplate.getCustomDef().get(interfaceType); + } + else { + defs = (LinkedHashMap)TOSCA_DEF.get(interfaceType); + } + } + + if(ivalue != null) { + if(ivalue instanceof LinkedHashMap) { + for(Map.Entry me: ((LinkedHashMap)ivalue).entrySet()) { + if(me.getKey().equals(IMPLEMENTATION)) { + implementation = (String)me.getValue(); + } + else if(me.getKey().equals(INPUTS)) { + inputs = (LinkedHashMap)me.getValue(); + } + else if(me.getKey().equals(DESCRIPTION)) { + description = (String)me.getValue(); + } + else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE123", String.format( + "UnknownFieldError: \"interfaces\" of template \"%s\" contain unknown field \"%s\"", + nodeTemplate.getName(),me.getKey()))); + } + } + } + } + } public ArrayList getLifecycleOps() { if(defs != null) { @@ -111,7 +111,20 @@ public class InterfacesDef extends StatefulEntityType { } return null; } - + + public ArrayList getInterfaceOps() { + if(defs != null) { + ArrayList ops = _ops(); + ArrayList idrw = new ArrayList<>(); + for(int i=0; i getConfigureOps() { if(defs != null) { if(type.equals(CONFIGURE)) { @@ -120,22 +133,48 @@ public class InterfacesDef extends StatefulEntityType { } return null; } - + private ArrayList _ops() { return new ArrayList(defs.keySet()); } - + // getters/setters - + public LinkedHashMap getInputs() { return inputs; } - + public void setInput(String name,Object value) { inputs.put(name, value); } + + public String getImplementation(){ + return implementation; + } + + public void setImplementation(String implementation){ + this.implementation = implementation; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getOperationName() { + return operationName; + } + + public void setOperationName(String operationName) { + this.operationName = operationName; + } } + + /*python # Licensed under the Apache License, Version 2.0 (the "License"); you may diff --git a/version.properties b/version.properties index a8f201d..a24b0ee 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=3 -patch=4 +patch=5 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 4b6318aa39e841804cc0a23140c2a923bc2a06a6 Mon Sep 17 00:00:00 2001 From: Michael Lando Date: Thu, 12 Apr 2018 17:05:13 +0300 Subject: remove license Change-Id: I64dd2f576c2cd0ddd0f9be647577fa6185431e3d Issue-ID: SDC-1220 Signed-off-by: Michael Lando --- src/test/resources/csars/sdc-onboarding_csar.csar | Bin 80596 -> 79654 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/src/test/resources/csars/sdc-onboarding_csar.csar b/src/test/resources/csars/sdc-onboarding_csar.csar index e1c3267..f12605d 100644 Binary files a/src/test/resources/csars/sdc-onboarding_csar.csar and b/src/test/resources/csars/sdc-onboarding_csar.csar differ -- cgit 1.2.3-korg From a1f9e908f1b29b3cc8c434bb3ca455196a0959bc Mon Sep 17 00:00:00 2001 From: "Sheshukov, Natalia (ns019t)" Date: Sun, 15 Apr 2018 13:11:15 +0300 Subject: JTosca Parser – support Annotations MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Change-Id: I256e42e5f4a6e5259c17b8de56d64b44afb7f42d Issue-ID: SDC-1223 Signed-off-by: Sheshukov, Natalia (ns019t) --- .../org/onap/sdc/toscaparser/api/Property.java | 7 + .../onap/sdc/toscaparser/api/ToscaTemplate.java | 57 ++++---- .../api/elements/enums/ToscaElementNames.java | 20 +++ .../sdc/toscaparser/api/parameters/Annotation.java | 76 +++++++++++ .../onap/sdc/toscaparser/api/parameters/Input.java | 122 +++++------------ .../onap/sdc/toscaparser/api/JToscaImportTest.java | 147 ++++++++++++++------- .../csars/service-AdiodVmxVpeBvService-csar.csar | Bin 0 -> 117439 bytes 7 files changed, 270 insertions(+), 159 deletions(-) create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java create mode 100644 src/test/resources/csars/service-AdiodVmxVpeBvService-csar.csar diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Property.java b/src/main/java/org/onap/sdc/toscaparser/api/Property.java index 227da0a..6d05af0 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Property.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Property.java @@ -2,7 +2,10 @@ package org.onap.sdc.toscaparser.api; import java.util.ArrayList; import java.util.LinkedHashMap; +import java.util.Map; +import org.onap.sdc.toscaparser.api.elements.PropertyDef; +import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; import org.onap.sdc.toscaparser.api.elements.constraints.Constraint; import org.onap.sdc.toscaparser.api.elements.constraints.Schema; import org.onap.sdc.toscaparser.api.functions.Function; @@ -29,6 +32,10 @@ public class Property { private Schema schema; private LinkedHashMap customDef; + public Property(Map.Entry propertyEntry){ + name = propertyEntry.getKey(); + value = propertyEntry.getValue(); + } public Property(String propname, Object propvalue, LinkedHashMap propschemaDict, diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java index ea76a09..bfd0716 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java @@ -1,28 +1,34 @@ package org.onap.sdc.toscaparser.api; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.common.ValidationIssueCollector; -import org.onap.sdc.toscaparser.api.parameters.Output; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; -import java.util.*; -import java.util.concurrent.ConcurrentHashMap; import java.nio.file.Files; -import java.util.function.Predicate; import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Predicate; import org.onap.sdc.toscaparser.api.common.JToscaException; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.common.ValidationIssueCollector; import org.onap.sdc.toscaparser.api.elements.EntityType; import org.onap.sdc.toscaparser.api.elements.Metadata; import org.onap.sdc.toscaparser.api.extensions.ExtTools; import org.onap.sdc.toscaparser.api.parameters.Input; +import org.onap.sdc.toscaparser.api.parameters.Output; import org.onap.sdc.toscaparser.api.prereq.CSAR; import org.onap.sdc.toscaparser.api.utils.JToscaErrorCodes; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.yaml.snakeyaml.Yaml; @@ -281,10 +287,12 @@ public class ToscaTemplate extends Object { return (String)tpl.get(DESCRIPTION); } + @SuppressWarnings("unchecked") private ArrayList _tplImports() { return (ArrayList)tpl.get(IMPORTS); } + @SuppressWarnings("unchecked") private ArrayList _tplRepositories() { LinkedHashMap repositories = (LinkedHashMap)tpl.get(REPOSITORIES); @@ -302,11 +310,6 @@ public class ToscaTemplate extends Object { return (LinkedHashMap)_getCustomTypes(RELATIONSHIP_TYPES,null); } - @SuppressWarnings("unchecked") - private LinkedHashMap _tplRelationshipTemplates() { - return (LinkedHashMap)_tplTopologyTemplate().get(RELATIONSHIP_TEMPLATES); - } - @SuppressWarnings("unchecked") private LinkedHashMap _tplTopologyTemplate() { return (LinkedHashMap)tpl.get(TOPOLOGY_TEMPLATE); @@ -332,6 +335,7 @@ public class ToscaTemplate extends Object { * @param alImports all imports which needs to be processed * @return the linked hash map containing all import definitions */ + @SuppressWarnings("unchecked") private LinkedHashMap _getAllCustomDefs(Object alImports) { String types[] = { @@ -346,7 +350,7 @@ public class ToscaTemplate extends Object { imports = sortImports(imports); for (Map map : imports) { - List> singleImportList = new ArrayList(); + List> singleImportList = new ArrayList<>(); singleImportList.add(map); Map importNameDetails = getValidFileNameForImportReference(singleImportList); @@ -398,7 +402,7 @@ public class ToscaTemplate extends Object { List> finalList2 = new ArrayList<>(); Iterator> itr = customImports.iterator(); while(itr.hasNext()) { - Map innerMap = itr.next(); + Map innerMap = itr.next(); if (innerMap.toString().contains("../")) { finalList2.add(innerMap); itr.remove(); @@ -451,8 +455,7 @@ public class ToscaTemplate extends Object { * @param customImports the custom imports * @return the map containing import file full and relative paths */ - private Map getValidFileNameForImportReference(List> - customImports){ + private Map getValidFileNameForImportReference(List> customImports){ String importFileName; Map retMap = new HashMap<>(); for (Map map1 : customImports) { @@ -575,6 +578,7 @@ public class ToscaTemplate extends Object { } // multi level nesting - RECURSIVE + @SuppressWarnings("unchecked") private void _handleNestedToscaTemplatesWithTopology(TopologyTemplate tt) { if(++nestingLoopCounter > 10) { log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting"); @@ -583,7 +587,6 @@ public class ToscaTemplate extends Object { // Reset Processed Imports for nested templates this.processedImports = new HashSet<>(); for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { - String fname = me.getKey(); LinkedHashMap toscaTpl = (LinkedHashMap)me.getValue(); for(NodeTemplate nt: tt.getNodeTemplates()) { @@ -761,6 +764,9 @@ public class ToscaTemplate extends Object { } public ArrayList getInputs() { + if(inputs != null){ + inputs.stream().forEach(Input::resetAnnotaions); + } return inputs; } @@ -821,6 +827,7 @@ public class ToscaTemplate extends Object { return pparams; } + @SuppressWarnings("unchecked") private String getSubMappingNodeType(LinkedHashMap toscaTpl) { // Return substitution mappings node type if(toscaTpl != null) { @@ -830,12 +837,6 @@ public class ToscaTemplate extends Object { return null; } - private boolean _hasSubstitutionMapping() { - // Return True if the template has valid substitution mappings - return topologyTemplate != null && - topologyTemplate.getSubstitutionMappings() != null; - } - public boolean hasNestedTemplates() { // Return True if the tosca template has nested templates return nestedToscaTemplatesWithTopology != null && @@ -881,6 +882,14 @@ public class ToscaTemplate extends Object { ", nestingLoopCounter=" + nestingLoopCounter + '}'; } + + public List getInputs(boolean annotationsRequired) { + if(inputs != null && annotationsRequired){ + inputs.stream().forEach(Input::parseAnnotations); + return inputs; + } + return getInputs(); + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java new file mode 100644 index 0000000..0ee201c --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java @@ -0,0 +1,20 @@ +package org.onap.sdc.toscaparser.api.elements.enums; + +public enum ToscaElementNames { + + TYPE ("type"), + PROPERTIES ("properties"), + ANNOTATIONS ("annotations"), + SOURCE_TYPE ("source_type"); + + private String name; + + ToscaElementNames(String name){ + this.name = name; + } + + public String getName() { + return name; + } + +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java new file mode 100644 index 0000000..74b738f --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java @@ -0,0 +1,76 @@ +package org.onap.sdc.toscaparser.api.parameters; + +import java.util.ArrayList; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.onap.sdc.toscaparser.api.Property; +import org.onap.sdc.toscaparser.api.elements.enums.ToscaElementNames; + +public class Annotation{ + + private final static String HEAT = "HEAT"; + + private String name; + private String type; + private ArrayList properties; + + public Annotation(){} + @SuppressWarnings("unchecked") + public Annotation(Map.Entry annotationEntry){ + if(annotationEntry != null){ + name = annotationEntry.getKey(); + Map annValue = (Map) annotationEntry.getValue(); + type = (String) annValue.get(ToscaElementNames.TYPE.getName()); + properties = fetchProperties((Map) annValue.get(ToscaElementNames.PROPERTIES.getName())); + } + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public ArrayList getProperties() { + return properties; + } + + public void setProperties(ArrayList properties) { + this.properties = properties; + } + + private ArrayList fetchProperties(Map properties) { + if(properties != null){ + return (ArrayList) properties.entrySet().stream() + .map(Property::new) + .collect(Collectors.toList()); + } + return null; + } + + public boolean isHeatSourceType(){ + if(properties == null){ + return false; + } + Optional sourceType = properties.stream() + .filter(p -> p.getName().equals(ToscaElementNames.SOURCE_TYPE.getName())) + .findFirst(); + if(!sourceType.isPresent()){ + return false; + } + return sourceType.get().getValue() != null && ((String)sourceType.get().getValue()).equals(HEAT); + } + +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java index 7e83cfb..e7a1246 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java @@ -1,16 +1,18 @@ package org.onap.sdc.toscaparser.api.parameters; -import org.onap.sdc.toscaparser.api.DataEntity; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; +import java.util.Map; +import java.util.stream.Collectors; +import org.onap.sdc.toscaparser.api.DataEntity; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.elements.EntityType; import org.onap.sdc.toscaparser.api.elements.constraints.Constraint; import org.onap.sdc.toscaparser.api.elements.constraints.Schema; +import org.onap.sdc.toscaparser.api.elements.enums.ToscaElementNames; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class Input { @@ -41,6 +43,7 @@ public class Input { private String name; private Schema schema; private LinkedHashMap customDefs; + private Map annotations; public Input(){ /** @@ -53,7 +56,20 @@ public class Input { schema = new Schema(_name,_schemaDict); customDefs = _customDefs; } - + + @SuppressWarnings("unchecked") + public void parseAnnotations() { + if(schema.getSchema() != null){ + LinkedHashMap annotations = (LinkedHashMap) schema.getSchema().get(ToscaElementNames.ANNOTATIONS.getName()); + if(annotations != null){ + setAnnotations(annotations.entrySet().stream() + .map(Annotation::new) + .filter(Annotation::isHeatSourceType) + .collect(Collectors.toMap(a -> a.getName(), a -> a))); + } + } + } + public String getName() { return name; } @@ -124,7 +140,8 @@ public class Input { } } - private void _validateValue(Object value) { + @SuppressWarnings("unchecked") + private void _validateValue(Object value) { Object datatype = null; if(EntityType.TOSCA_DEF.get(getType()) != null) { datatype = EntityType.TOSCA_DEF.get(getType()); @@ -147,87 +164,16 @@ public class Input { DataEntity.validateDatatype(getType(), value, null, (LinkedHashMap)datatype, null); } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import MissingRequiredFieldError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.dataentity import DataEntity -from toscaparser.elements.constraints import Schema -from toscaparser.elements.entity_type import EntityType -from toscaparser.utils.gettextutils import _ - - -log = logging.getLogger('tosca') - -class Input(object): - - INPUTFIELD = (TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED, STATUS, - ENTRY_SCHEMA) = ('type', 'description', 'default', - 'constraints', 'required', 'status', - 'entry_schema') - - def __init__(self, name, schema_dict): - self.name = name - self.schema = Schema(name, schema_dict) - - self._validate_field() - self.validate_type(self.type) - - @property - def type(self): - return self.schema.type - - @property - def required(self): - return self.schema.required - - @property - def description(self): - return self.schema.description - - @property - def default(self): - return self.schema.default - - @property - def constraints(self): - return self.schema.constraints - - @property - def status(self): - return self.schema.status - - def validate(self, value=None): - if value is not None: - self._validate_value(value) - - def _validate_field(self): - for name in self.schema.schema: - if name not in self.INPUTFIELD: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Input "%s"' % self.name, - field=name)) - - def validate_type(self, input_type): - if input_type not in Schema.PROPERTY_TYPES: - ValidationIssueCollector.appendException( - ValueError(_('Invalid type "%s".') % type)) - - # tODO(anyone) Need to test for any built-in datatype not just network - # that is, tosca.datatypes.* and not assume tosca.datatypes.network.* - # tODO(anyone) Add support for tosca.datatypes.Credential - def _validate_value(self, value): - tosca = EntityType.TOSCA_DEF - datatype = None - if self.type in tosca: - datatype = tosca[self.type] - elif EntityType.DATATYPE_NETWORK_PREFIX + self.type in tosca: - datatype = tosca[EntityType.DATATYPE_NETWORK_PREFIX + self.type] - - DataEntity.validate_datatype(self.type, value, None, datatype) + public Map getAnnotations() { + return annotations; + } -*/ + private void setAnnotations(Map annotations) { + this.annotations = annotations; + } + + public void resetAnnotaions(){ + annotations = null; + } +} diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java index 589e47c..7d0c54c 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java @@ -2,63 +2,116 @@ package org.onap.sdc.toscaparser.api; import org.junit.Test; import org.onap.sdc.toscaparser.api.common.JToscaException; +import org.onap.sdc.toscaparser.api.parameters.Annotation; +import org.onap.sdc.toscaparser.api.parameters.Input; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.io.File; import java.util.ArrayList; import java.util.List; +import java.util.Optional; import java.util.stream.Collectors; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; public class JToscaImportTest { - @Test - public void testNoMissingTypeValidationError() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource - ("csars/sdc-onboarding_csar.csar").getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List missingTypeErrors = ThreadLocalsHolder.getCollector() - .getValidationIssueReport() - .stream() - .filter(s -> s.contains("JE136")) - .collect(Collectors.toList()); - assertEquals(0, missingTypeErrors.size()); - } - - @Test - public void testNoStackOverFlowError() { - Exception jte = null; - try { - String fileStr = JToscaImportTest.class.getClassLoader().getResource - ("csars/sdc-onboarding_csar.csar").getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - } catch (Exception e){ - jte = e; - } - assertEquals(null, jte); - } - - @Test - public void testNoInvalidImports() throws JToscaException { - List fileNames = new ArrayList<>(); - fileNames.add("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); - fileNames.add("csars/sdc-onboarding_csar.csar"); - fileNames.add("csars/resource-Spgw-csar-ZTE.csar"); - - for (String fileName : fileNames) { - String fileStr = JToscaImportTest.class.getClassLoader().getResource(fileName).getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List invalidImportErrors = ThreadLocalsHolder.getCollector() - .getValidationIssueReport() - .stream() - .filter(s -> s.contains("JE195")) - .collect(Collectors.toList()); - assertEquals(0, invalidImportErrors.size()); - } - } + @Test + public void testNoMissingTypeValidationError() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/sdc-onboarding_csar.csar") + .getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List missingTypeErrors = ThreadLocalsHolder.getCollector().getValidationIssueReport().stream() + .filter(s -> s.contains("JE136")).collect(Collectors.toList()); + assertEquals(0, missingTypeErrors.size()); + } + + @Test + public void testNoStackOverFlowError() { + Exception jte = null; + try { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/sdc-onboarding_csar.csar") + .getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + } catch (Exception e) { + jte = e; + } + assertEquals(null, jte); + } + + @Test + public void testNoInvalidImports() throws JToscaException { + List fileNames = new ArrayList<>(); + fileNames.add("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); + fileNames.add("csars/sdc-onboarding_csar.csar"); + fileNames.add("csars/resource-Spgw-csar-ZTE.csar"); + + for (String fileName : fileNames) { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(fileName).getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List invalidImportErrors = ThreadLocalsHolder.getCollector().getValidationIssueReport().stream() + .filter(s -> s.contains("JE195")).collect(Collectors.toList()); + assertEquals(0, invalidImportErrors.size()); + } + } + + @Test + public void testParseAnnotations() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + + List inputs = toscaTemplate.getInputs(); + assertNotNull(inputs); + assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + + inputs.forEach(Input::parseAnnotations); + assertTrue(!inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + } + + @Test + public void testGetInputsWithAndWithoutAnnotations() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List inputs = toscaTemplate.getInputs(); + assertNotNull(inputs); + assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + + inputs = toscaTemplate.getInputs(true); + assertNotNull(inputs); + validateInputsAnnotations(inputs); + + inputs = toscaTemplate.getInputs(false); + assertNotNull(inputs); + assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + } + + private void validateInputsAnnotations(List inputs) { + List inputsWithAnnotations = inputs.stream().filter(i -> i.getAnnotations() != null) + .collect(Collectors.toList()); + assertTrue(!inputs.isEmpty()); + inputsWithAnnotations.stream().forEach(i -> validateAnnotations(i)); + } + + private void validateAnnotations(Input input) { + assertNotNull(input.getAnnotations()); + assertEquals(input.getAnnotations().size(), 1); + Annotation annotation = input.getAnnotations().get("source"); + assertEquals(annotation.getName(), "source"); + assertEquals(annotation.getType().toLowerCase(), "org.openecomp.annotations.source"); + assertNotNull(annotation.getProperties()); + Optional source_type = annotation.getProperties().stream() + .filter(p -> p.getName().equals("source_type")).findFirst(); + assertTrue(source_type.isPresent()); + assertEquals(source_type.get().getValue(), "HEAT"); + } } diff --git a/src/test/resources/csars/service-AdiodVmxVpeBvService-csar.csar b/src/test/resources/csars/service-AdiodVmxVpeBvService-csar.csar new file mode 100644 index 0000000..28aa6f4 Binary files /dev/null and b/src/test/resources/csars/service-AdiodVmxVpeBvService-csar.csar differ -- cgit 1.2.3-korg From 1e8a85ca9eeacc68f3ee6e6567bb6420a78f04c9 Mon Sep 17 00:00:00 2001 From: Jessica Wagantall Date: Mon, 9 Apr 2018 17:27:44 -0700 Subject: Update INFO.yaml Update INFO.yaml after the changes confirmed by Michael on RT53821 Change-Id: Ic43163460fe6b0d88c788c9aeb73955e755741ae Issue-ID: CIMAN-134 Signed-off-by: Jessica Wagantall --- INFO.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/INFO.yaml b/INFO.yaml index eae0f2a..7c382d0 100644 --- a/INFO.yaml +++ b/INFO.yaml @@ -38,6 +38,11 @@ committers: company: 'ATT' id: 'ys9693' timezone: 'Israel/Aviv' + - name: 'ELI LEVY' + email: 'el489u@intl.att.com' + company: 'ATT' + id: 'el489u' + timezone: 'Israel/Lod' tsc: approval: 'https://lists.onap.org/pipermail/onap-tsc' changes: @@ -46,4 +51,5 @@ tsc: name: 'Idan Amit' name: 'Tal Gitelman' name: 'Yuli Shlosberg' + name: 'ELI LEVI' link: 'https://wiki.onap.org/pages/viewpage.action?pageId=25435557' -- cgit 1.2.3-korg From 0af44b3327d47e91a5b0113ff276512c163ce374 Mon Sep 17 00:00:00 2001 From: "Sheshukov, Natalia (ns019t)" Date: Sun, 22 Apr 2018 12:19:24 +0300 Subject: JTosca Parser – support Policies fix MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Change-Id: Idedaa6f4519872eeab390c075d9bde3ca57865de Issue-ID: SDC-1223 Signed-off-by: Sheshukov, Natalia (ns019t) --- src/main/java/org/onap/sdc/toscaparser/api/Policy.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java index 437563e..aeed368 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java @@ -67,7 +67,11 @@ public class Policy extends EntityTemplate { return targetsType; } -// public ArrayList getTargetsList() { + public LinkedHashMap getMetaData() { + return metaData; + } + + // public ArrayList getTargetsList() { public ArrayList getTargetsList() { return targetsList; } -- cgit 1.2.3-korg From 718f5b1ab6d4338b6128671d558034eddfd139d8 Mon Sep 17 00:00:00 2001 From: Michael Lando Date: Sat, 26 May 2018 19:58:32 +0300 Subject: update version in master update pom anf version.properties to 1.4.0 Change-Id: Ia745033ce141c6c1866ab78f94232a0f9767fae5 Issue-ID: SDC-1370 Signed-off-by: Michael Lando --- pom.xml | 2 +- version.properties | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 5a2d1be..db09499 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.3.5-SNAPSHOT + 1.4.0-SNAPSHOT sdc-jtosca diff --git a/version.properties b/version.properties index a24b0ee..d709d86 100644 --- a/version.properties +++ b/version.properties @@ -4,8 +4,8 @@ # because they are used in Jenkins, whose plug-in doesn't support major=1 -minor=3 -patch=5 +minor=4 +patch=0 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 842633530e7c1615f988cf58fcc9b472fe5b5fe1 Mon Sep 17 00:00:00 2001 From: PriyanshuAgarwal Date: Fri, 15 Jun 2018 10:58:29 +0530 Subject: Extended notation support for interface operation. Support extended notation for "Implementation" in interface operation. Change-Id: I1a45d7bbabea8262a86144f1552534e9f15c9d0f Issue-ID: SDC-1433 Signed-off-by: priyanshu --- pom.xml | 2 +- .../java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java | 8 ++++---- version.properties | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pom.xml b/pom.xml index db09499..b132eb5 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.4.0-SNAPSHOT + 1.4.1-SNAPSHOT sdc-jtosca diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java index 86333d6..3edf3b7 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java @@ -31,7 +31,7 @@ public class InterfacesDef extends StatefulEntityType { private String operationName; private Object operationDef; - private String implementation; + private Object implementation; private LinkedHashMap inputs; private String description; @@ -85,7 +85,7 @@ public class InterfacesDef extends StatefulEntityType { if(ivalue instanceof LinkedHashMap) { for(Map.Entry me: ((LinkedHashMap)ivalue).entrySet()) { if(me.getKey().equals(IMPLEMENTATION)) { - implementation = (String)me.getValue(); + implementation = me.getValue(); } else if(me.getKey().equals(INPUTS)) { inputs = (LinkedHashMap)me.getValue(); @@ -148,11 +148,11 @@ public class InterfacesDef extends StatefulEntityType { inputs.put(name, value); } - public String getImplementation(){ + public Object getImplementation(){ return implementation; } - public void setImplementation(String implementation){ + public void setImplementation(Object implementation){ this.implementation = implementation; } diff --git a/version.properties b/version.properties index d709d86..a169fce 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=4 -patch=0 +patch=1 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 7c611567534f5467288bad170b9dff8012a93c12 Mon Sep 17 00:00:00 2001 From: Michael Lando Date: Fri, 3 Aug 2018 00:57:08 +0300 Subject: progress jtosca version after relase update jtosca version Change-Id: Ie805b9aee838df75987c12a28ebf37ace08260d9 Issue-ID: SDC-1595 Signed-off-by: Michael Lando --- pom.xml | 2 +- version.properties | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index b132eb5..d545367 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.4.1-SNAPSHOT + 1.4.2-SNAPSHOT sdc-jtosca diff --git a/version.properties b/version.properties index a169fce..0d94dfd 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=4 -patch=1 +patch=2 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From fb8fc907bb763b0a37e70ee3ac8ee624bc922920 Mon Sep 17 00:00:00 2001 From: Yuli Date: Tue, 14 Aug 2018 11:15:28 +0300 Subject: Align code to support types validation Change-Id: I9e08f55f9f4d00ae4ef8e6a1c6214b6426fdd46b Issue-ID: SDC-1646 Signed-off-by: Yuli Shlosberg --- src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java | 5 +++-- src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java | 3 +++ src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java | 4 +++- src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java | 2 +- src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java | 4 ++-- 5 files changed, 12 insertions(+), 6 deletions(-) diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java b/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java index 76800f7..19ec182 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java @@ -1,5 +1,6 @@ package org.onap.sdc.toscaparser.api; +import com.google.common.base.Charsets; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.onap.sdc.toscaparser.api.utils.UrlUtils; @@ -372,11 +373,11 @@ public class ImportsLoader { al[0] = al[1] = null; return al; } - try (InputStream input = new FileInputStream(new File(importTemplate));) { + try (BufferedReader br = new BufferedReader(new FileReader(importTemplate));) { al[0] = importTemplate; Yaml yaml = new Yaml(); - al[1] = yaml.load(input); + al[1] = yaml.load(br); return al; } catch(FileNotFoundException e) { diff --git a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java index e3d3538..4c9a53d 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java @@ -540,6 +540,9 @@ public class TopologyTemplate { public boolean getResolveGetInput() { return resolveGetInput; } + public LinkedHashMap getCustomDefs() { + return customDefs; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java index bfd0716..f236a0a 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java @@ -103,6 +103,7 @@ public class ToscaTemplate extends Object { private int nestingLoopCounter; private LinkedHashMap> metaProperties; private Set processedImports; + private LinkedHashMap customDefsFinal = new LinkedHashMap<>(); public ToscaTemplate(String _path, LinkedHashMap _parsedParams, @@ -335,14 +336,15 @@ public class ToscaTemplate extends Object { * @param alImports all imports which needs to be processed * @return the linked hash map containing all import definitions */ + @SuppressWarnings("unchecked") private LinkedHashMap _getAllCustomDefs(Object alImports) { + String types[] = { IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES }; - LinkedHashMap customDefsFinal = new LinkedHashMap<>(); List> imports = (List>) alImports; if (imports != null && !imports.isEmpty()) { diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java index 262d99a..f2df667 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java @@ -50,7 +50,7 @@ public class GetInput extends Function { String type = (String)ttinpinp.get("type"); Object value = DataEntity.validateDatatype( - type, toscaTpl.getParsedParams().get(getInputName()),null,null,null); + type, toscaTpl.getParsedParams().get(getInputName()),null,toscaTpl.getCustomDefs(),null); //SDC resolving Get Input if (value instanceof ArrayList){ if(args.size() == 2 && args.get(1) instanceof Integer && ((ArrayList) value).size()> (Integer)args.get(1)){ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java index e7a1246..d59f406 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java @@ -153,7 +153,7 @@ public class Input { String type = getType(); // if it's one of the basic types DON'T look in customDefs if(Arrays.asList(PRIMITIVE_TYPES).contains(type)) { - DataEntity.validateDatatype(getType(), value, null, (LinkedHashMap)datatype, null); + DataEntity.validateDatatype(getType(), value, null, customDefs, null); return; } else if(customDefs.get(getType()) != null) { @@ -162,7 +162,7 @@ public class Input { return; } - DataEntity.validateDatatype(getType(), value, null, (LinkedHashMap)datatype, null); + DataEntity.validateDatatype(getType(), value, null, customDefs, null); } public Map getAnnotations() { -- cgit 1.2.3-korg From ee3afa74714e46f57c00c075d01b8808c7caee67 Mon Sep 17 00:00:00 2001 From: Yuli Shlosberg Date: Wed, 15 Aug 2018 14:28:54 +0300 Subject: fix guava security violation Change-Id: Icc12417856638602ac114470d6a8aab8a0bb5280 Issue-ID: SDC-1471 Signed-off-by: Yuli Shlosberg --- pom.xml | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/pom.xml b/pom.xml index d545367..f0383b1 100644 --- a/pom.xml +++ b/pom.xml @@ -78,11 +78,23 @@ 1.3.2 - - org.reflections - reflections - 0.9.11 - + + org.reflections + reflections + 0.9.11 + + + com.google.guava + guava + + + + + com.google.guava + guava + compile + 25.1-jre + -- cgit 1.2.3-korg From 3aa7e83c4c95e3aaac3e78a4a190c3aa5717069f Mon Sep 17 00:00:00 2001 From: "Ben-kimon, Hofit (hb272c)" Date: Sun, 16 Sep 2018 17:31:41 +0300 Subject: fix parser issue update max hierarchy level from 10 to 20 Change-Id: Ic530b8d0d909be788c2f4af4a3c880fbd795c92c Issue-ID: SDC-1764 Signed-off-by: Ben-kimon, Hofit (hb272c) --- pom.xml | 2 +- src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java | 3 ++- version.properties | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index f0383b1..aa95d28 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.4.2-SNAPSHOT + 1.4.3-SNAPSHOT sdc-jtosca diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java index f236a0a..b5ae4c4 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java @@ -35,6 +35,7 @@ import org.yaml.snakeyaml.Yaml; public class ToscaTemplate extends Object { + public static final int MAX_LEVELS = 20; private static Logger log = LoggerFactory.getLogger(ToscaTemplate.class.getName()); // TOSCA template key names @@ -582,7 +583,7 @@ public class ToscaTemplate extends Object { // multi level nesting - RECURSIVE @SuppressWarnings("unchecked") private void _handleNestedToscaTemplatesWithTopology(TopologyTemplate tt) { - if(++nestingLoopCounter > 10) { + if(++nestingLoopCounter > MAX_LEVELS) { log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting"); return; } diff --git a/version.properties b/version.properties index 0d94dfd..a54766f 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=4 -patch=2 +patch=3 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 6fefab672f8ee763b561b0f67681e4f5b09418bc Mon Sep 17 00:00:00 2001 From: "Manzon, Inna (im453s)" Date: Mon, 17 Sep 2018 17:50:41 +0300 Subject: Resolve get_input for list change Change-Id: I53c13b22f74f857395f6f9c40801b0e927562bf5 Issue-ID: SDC-1769 Signed-off-by: Manzon, Inna (im453s) --- pom.xml | 2 +- src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java | 3 ++- version.properties | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index aa95d28..386b5d7 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.4.3-SNAPSHOT + 1.4.4-SNAPSHOT sdc-jtosca diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java index f2df667..7897495 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java @@ -56,11 +56,12 @@ public class GetInput extends Function { if(args.size() == 2 && args.get(1) instanceof Integer && ((ArrayList) value).size()> (Integer)args.get(1)){ return ((ArrayList) value).get((Integer) args.get(1)); } + /* commented out for network cloud (SDNC) else{ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE273",String.format( "GetInputError: cannot resolve input name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0)))); return null; - } + }*/ } return value; } diff --git a/version.properties b/version.properties index a54766f..49feeb5 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=4 -patch=3 +patch=4 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 9fb95e3c9e9ab5c239445c1511219686133437de Mon Sep 17 00:00:00 2001 From: "Manzon, Inna (im453s)" Date: Wed, 12 Sep 2018 18:32:18 +0300 Subject: Tosca Parser - property value resolving Change-Id: Ib0d2a0918d8d97d1e4988a8eeb7823f5957fa26f Issue-ID: SDC-1757 Signed-off-by: Manzon, Inna (im453s) --- pom.xml | 2 +- .../org/onap/sdc/toscaparser/api/EntityTemplate.java | 18 +++++++++++++----- .../org/onap/sdc/toscaparser/api/NodeTemplate.java | 14 ++++++++++++++ .../onap/sdc/toscaparser/api/functions/GetInput.java | 17 +++++++++-------- .../onap/sdc/toscaparser/api/JToscaImportTest.java | 19 ++++++++++++++++++- version.properties | 2 +- 6 files changed, 56 insertions(+), 16 deletions(-) diff --git a/pom.xml b/pom.xml index 386b5d7..4177612 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.4.4-SNAPSHOT + 1.4.5-SNAPSHOT sdc-jtosca diff --git a/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java index 078dc44..637329e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java @@ -197,16 +197,24 @@ public abstract class EntityTemplate { public LinkedHashMap getProperties() { LinkedHashMap props = new LinkedHashMap<>(); for(Property po: getPropertiesObjects()) { - props.put(((Property)po).getName(),po); + props.put(po.getName(),po); } return props; } public Object getPropertyValue(String name) { - LinkedHashMap props = getProperties(); - Property p = (Property)props.get(name); - return p != null ? p.getValue() : null; - } + LinkedHashMap props = getProperties(); + Property p = props.get(name); + return p != null ? p.getValue() : null; + } + + public String getPropertyType(String name) { + Property property = getProperties().get(name); + if (property != null) { + return property.getType(); + } + return null; + } public ArrayList getInterfaces() { if(_interfaces == null) { diff --git a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java index 73b2341..270e908 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java @@ -1,6 +1,8 @@ package org.onap.sdc.toscaparser.api; import static org.onap.sdc.toscaparser.api.elements.EntityType.TOSCA_DEF; + +import com.google.common.collect.Lists; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import java.util.ArrayList; @@ -27,6 +29,7 @@ public class NodeTemplate extends EntityTemplate { private static final String METADATA = "metadata"; + @SuppressWarnings("unchecked") public NodeTemplate(String name, LinkedHashMap ntnodeTemplates, @@ -247,6 +250,17 @@ public class NodeTemplate extends EntityTemplate { } } + public Object getPropertyValueFromTemplatesByName(String propertyName) { + LinkedHashMap nodeObject = (LinkedHashMap) templates.get(name); + if (nodeObject != null) { + LinkedHashMap properties = (LinkedHashMap)nodeObject.get(PROPERTIES); + if (properties != null) { + return properties.get(propertyName); + } + } + return null; + } + private Metadata _metaData() { if(entityTpl.get(METADATA) != null) { return new Metadata((Map)entityTpl.get(METADATA)); diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java index 7897495..24d5a18 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java @@ -57,11 +57,10 @@ public class GetInput extends Function { return ((ArrayList) value).get((Integer) args.get(1)); } /* commented out for network cloud (SDNC) - else{ - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE273",String.format( + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE273",String.format( "GetInputError: cannot resolve input name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0)))); - return null; - }*/ + return null; +*/ } return value; } @@ -78,11 +77,13 @@ public class GetInput extends Function { if ( args.get(1) instanceof Integer && ((ArrayList) inputDef.getDefault()).size()> ((Integer)args.get(1)).intValue()) { return ((ArrayList) inputDef.getDefault()).get(((Integer)args.get(1)).intValue()); - }else{ - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE274",(String.format( - "GetInputError: cannot resolve input Def name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0))))); - return null; } +/* + commented out for network cloud (SDNC) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE274",(String.format( + "GetInputError: cannot resolve input Def name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0))))); + return null; +*/ } return inputDef.getDefault(); } diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java index 7d0c54c..6fc8771 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java @@ -14,6 +14,7 @@ import java.util.stream.Collectors; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; public class JToscaImportTest { @@ -94,7 +95,23 @@ public class JToscaImportTest { assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); } - private void validateInputsAnnotations(List inputs) { + @Test + public void testGetPropertyNameTest() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(0); + + ArrayList valueList = (ArrayList)nodeTemplate.getPropertyValueFromTemplatesByName("vmxvpfe_sriov41_0_port_vlanfilter"); + assertEquals(4, valueList.size()); + + assertEquals("vPE", (String) nodeTemplate.getPropertyValueFromTemplatesByName("nf_role")); + + assertNull(nodeTemplate.getPropertyValueFromTemplatesByName("test")); + } + + private void validateInputsAnnotations(List inputs) { List inputsWithAnnotations = inputs.stream().filter(i -> i.getAnnotations() != null) .collect(Collectors.toList()); assertTrue(!inputs.isEmpty()); diff --git a/version.properties b/version.properties index 49feeb5..d1722d6 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=4 -patch=4 +patch=5 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From d2ed3bc4effb38ad3cf157483127abeee39c25c1 Mon Sep 17 00:00:00 2001 From: "Manzon, Inna (im453s)" Date: Wed, 12 Sep 2018 18:32:18 +0300 Subject: Tosca Parser - property value resolving Change-Id: I06eb2442dbcae9ea0824cb7d7689b8d3591a56ed Issue-ID: SDC-1757 Signed-off-by: Manzon, Inna (im453s) --- pom.xml | 2 +- version.properties | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 4177612..c25d8a7 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.4.5-SNAPSHOT + 1.4.6-SNAPSHOT sdc-jtosca diff --git a/version.properties b/version.properties index d1722d6..ed1cd82 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=4 -patch=5 +patch=6 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From e794b73443f37d92ea122336324e17709824df9d Mon Sep 17 00:00:00 2001 From: "Tufman, Shay" Date: Tue, 20 Nov 2018 14:48:55 +0200 Subject: fix null pointer Change-Id: If530734b96c32b25b2ed5df847c83dca2c633184 Issue-ID: SDC-1926 Signed-off-by: Tufman, Shay --- pom.xml | 2 +- .../java/org/onap/sdc/toscaparser/api/DataEntity.java | 8 +++++--- .../org/onap/sdc/toscaparser/api/JToscaImportTest.java | 10 ++++++++++ .../csars/service-JennyVtsbcKarunaSvc-csar.csar | Bin 0 -> 145639 bytes version.properties | 2 +- 5 files changed, 17 insertions(+), 5 deletions(-) create mode 100644 src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar diff --git a/pom.xml b/pom.xml index c25d8a7..20a4dc3 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.4.6-SNAPSHOT + 1.4.7-SNAPSHOT sdc-jtosca diff --git a/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java b/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java index 2a12a71..1559e66 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java @@ -49,9 +49,11 @@ public class DataEntity { else { if(!(value instanceof LinkedHashMap)) { //ERROR under investigation - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE001", String.format( - "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"", - value.toString(),dataType.getType()))); + String checkedVal = value != null ? value.toString() : null; + + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE001", String.format( + "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"", + checkedVal, dataType.getType()))); if (value instanceof List && ((List) value).size() > 0) { value = ((List) value).get(0); diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java index 6fc8771..8e587a9 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java @@ -111,6 +111,16 @@ public class JToscaImportTest { assertNull(nodeTemplate.getPropertyValueFromTemplatesByName("test")); } + @Test + public void testNullValueHasNoNullPointerException() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-JennyVtsbcKarunaSvc-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List inputs = toscaTemplate.getInputs(); + assertNotNull(inputs); + } + private void validateInputsAnnotations(List inputs) { List inputsWithAnnotations = inputs.stream().filter(i -> i.getAnnotations() != null) .collect(Collectors.toList()); diff --git a/src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar b/src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar new file mode 100644 index 0000000..3f80621 Binary files /dev/null and b/src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar differ diff --git a/version.properties b/version.properties index ed1cd82..442dce4 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=4 -patch=6 +patch=7 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 71ee175c85dba54b9645978a38e767ba4fb5bf70 Mon Sep 17 00:00:00 2001 From: Jessica Wagantall Date: Tue, 27 Nov 2018 13:37:33 -0800 Subject: Update INFO.yaml file Add Ofir Sonsino to the group Change-Id: I3a9a4b708ca2b5ed142ed6afd5c2777d55b190e6 Issue-ID: CIMAN-134 Signed-off-by: Jessica Wagantall --- INFO.yaml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/INFO.yaml b/INFO.yaml index 7c382d0..3ad9533 100644 --- a/INFO.yaml +++ b/INFO.yaml @@ -43,6 +43,11 @@ committers: company: 'ATT' id: 'el489u' timezone: 'Israel/Lod' + - name: 'Ofir Sonsino' + email: 'ofir.sonsino@intl.att.com' + company: 'ATT' + id: 'os0695' + timezone: 'Israel/Lod' tsc: approval: 'https://lists.onap.org/pipermail/onap-tsc' changes: @@ -53,3 +58,6 @@ tsc: name: 'Yuli Shlosberg' name: 'ELI LEVI' link: 'https://wiki.onap.org/pages/viewpage.action?pageId=25435557' + - type: 'Addition' + name: 'Ofir Sonsino' + link: 'https://wiki.onap.org/pages/viewpage.action?pageId=45305945' -- cgit 1.2.3-korg From 789fe97ea2b934329b2b26323d7e5ffc44cf196f Mon Sep 17 00:00:00 2001 From: "Manzon, Inna (im453s)" Date: Thu, 29 Nov 2018 17:26:34 +0200 Subject: SDC Tosca Parser getEntity API Change-Id: Ifc007335ef57904305458f63b422f2db41a3e694 Issue-ID: SDC-1967 Signed-off-by: Manzon, Inna (im453s) --- pom.xml | 2 +- src/main/java/org/onap/sdc/toscaparser/api/Policy.java | 18 ++++++++++++------ version.properties | 2 +- 3 files changed, 14 insertions(+), 8 deletions(-) diff --git a/pom.xml b/pom.xml index 20a4dc3..07e1c9f 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.4.7-SNAPSHOT + 1.4.8-SNAPSHOT sdc-jtosca diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java index aeed368..9eaacfc 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java @@ -6,6 +6,7 @@ import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; +import org.onap.sdc.toscaparser.api.elements.Metadata; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.onap.sdc.toscaparser.api.utils.ValidateUtils; @@ -20,8 +21,9 @@ public class Policy extends EntityTemplate { private static final String TRIGGERS = "triggers"; private static final String SECTIONS[] = { TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS}; - - LinkedHashMap metaData; + +// LinkedHashMap metaData; + Metadata metaData; ArrayList targetsList; // *** a list of NodeTemplate OR a list of Group *** String targetsType; ArrayList triggers; @@ -35,10 +37,10 @@ public class Policy extends EntityTemplate { LinkedHashMap _customDef) { super(_name,_policy,"policy_type",_customDef); - metaData = null; if(_policy.get(METADATA) != null) { - metaData = (LinkedHashMap)_policy.get(METADATA); - ValidateUtils.validateMap(metaData); + LinkedHashMap metadataMap = (LinkedHashMap)_policy.get(METADATA); + ValidateUtils.validateMap(metadataMap); + metaData = new Metadata(metadataMap); } targetsList = targetObjects; @@ -67,10 +69,14 @@ public class Policy extends EntityTemplate { return targetsType; } - public LinkedHashMap getMetaData() { + public Metadata getMetaDataObj() { return metaData; } + public LinkedHashMap getMetaData() { + return (LinkedHashMap)metaData.getAllProperties(); + } + // public ArrayList getTargetsList() { public ArrayList getTargetsList() { return targetsList; diff --git a/version.properties b/version.properties index 442dce4..7a72c98 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=4 -patch=7 +patch=8 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 8e6ae20e9ae0a66c4f518f74d059e8cedf157693 Mon Sep 17 00:00:00 2001 From: Jessica Wagantall Date: Tue, 4 Dec 2018 12:16:48 -0800 Subject: Update INFO.yaml file Remove Michael Lando. Add Ofir Sonsino as PTL Change-Id: Ie33036f6e884a47b3ec898d913d37b8ac37ce52b Issue-ID: CIMAN-134 Signed-off-by: Jessica Wagantall --- INFO.yaml | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/INFO.yaml b/INFO.yaml index 3ad9533..3d21b87 100644 --- a/INFO.yaml +++ b/INFO.yaml @@ -3,10 +3,10 @@ project: 'sdc/jtosca' project_creation_date: '2017-05-26' lifecycle_state: 'Incubation' project_lead: &onap_releng_ptl - name: 'Michael Lando' - email: 'ml636r@att.com' - id: 'ml636r' + name: 'Ofir Sonsino' + email: 'ofir.sonsino@intl.att.com' company: 'ATT' + id: 'os0695' timezone: 'Israel/Lod' primary_contact: *onap_releng_ptl issue_tracking: @@ -43,11 +43,6 @@ committers: company: 'ATT' id: 'el489u' timezone: 'Israel/Lod' - - name: 'Ofir Sonsino' - email: 'ofir.sonsino@intl.att.com' - company: 'ATT' - id: 'os0695' - timezone: 'Israel/Lod' tsc: approval: 'https://lists.onap.org/pipermail/onap-tsc' changes: @@ -61,3 +56,7 @@ tsc: - type: 'Addition' name: 'Ofir Sonsino' link: 'https://wiki.onap.org/pages/viewpage.action?pageId=45305945' + - type: 'Removal' + name: 'Michael Lando' + link: 'https://lists.onap.org/g/onap-tsc/message/4239' + -- cgit 1.2.3-korg From 92208f69ec00a3f9d28a917d6f6a43ef150863dd Mon Sep 17 00:00:00 2001 From: Tal Gitelman Date: Wed, 2 Jan 2019 15:34:04 +0200 Subject: SDC distribution failed bug fix Change-Id: I5659511a6880201399173c941314cfc1d1bb7d22 Issue-ID: SDC-1955 Signed-off-by: Tal Gitelman --- pom.xml | 2 +- .../sdc/toscaparser/api/elements/NodeType.java | 25 +++++++++++++++------- .../sdc/toscaparser/api/utils/ValidateUtils.java | 2 +- version.properties | 2 +- 4 files changed, 20 insertions(+), 11 deletions(-) diff --git a/pom.xml b/pom.xml index 07e1c9f..8e6c0f8 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.4.8-SNAPSHOT + 1.4.9-SNAPSHOT sdc-jtosca diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java index 07b3a87..a6d7f81 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java @@ -101,24 +101,33 @@ public class NodeType extends StatefulEntityType { keyword = "node"; } else { - // If value is a dict and has a type key + String getRelation = null; + // If nodeTypeByCap is a dict and has a type key // we need to lookup the node type using // the capability type String captype = (String)req.get("capability"); - String value = _getNodeTypeByCap(captype); - String getRelation = _getRelation(key,value); + nodeType = _getNodeTypeByCap(captype); + if (nodeType != null){ + getRelation = _getRelation(key, nodeType); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( + "NodeTypeForCapabilityNotFoundError: Node type for capability type \"%s\" is not found",captype))); + } if (getRelation != null) { relation = getRelation; } keyword = key; - nodeType = value; } } - } - RelationshipType rtype = new RelationshipType(relation, keyword, customDef); - NodeType relatednode = new NodeType(nodeType, customDef); - relationship.put(rtype, relatednode); + if(relation == null || nodeType == null){ + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( + "NodeTypeForRelationNotFound: Node type \"%s\" with relationship type \"%s\" is not found",nodeType, relation))); + } else { + RelationshipType rtype = new RelationshipType(relation, keyword, customDef); + NodeType relatednode = new NodeType(nodeType, customDef); + relationship.put(rtype, relatednode); + } } } return relationship; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java index 6c26f18..9623258 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java @@ -12,7 +12,7 @@ public class ValidateUtils { public static Object strToNum(Object value) { // Convert a string representation of a number into a numeric type - // tODO(TBD) we should not allow numeric values in, input should be str + // TODO(TBD) we should not allow numeric values in, input should be str if(value instanceof Number) { return value; } diff --git a/version.properties b/version.properties index 7a72c98..61bae76 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=4 -patch=8 +patch=9 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 9220c7ac558a42f65c2df48337dfc8f03d6b129c Mon Sep 17 00:00:00 2001 From: "Manzon, Inna (im453s)" Date: Thu, 29 Nov 2018 17:26:34 +0200 Subject: Retrieve leaf property value by path Change-Id: I556f6bfaa3d7c96f9d1f26ae0fcba199d23800c7 Issue-ID: SDC-1982 Signed-off-by: Manzon, Inna (im453s) --- pom.xml | 2 +- .../onap/sdc/toscaparser/api/EntityTemplate.java | 19 +- .../java/org/onap/sdc/toscaparser/api/Group.java | 12 +- .../org/onap/sdc/toscaparser/api/NodeTemplate.java | 16 +- .../java/org/onap/sdc/toscaparser/api/Policy.java | 14 +- .../org/onap/sdc/toscaparser/api/Property.java | 212 ++++++++++++++++++++- .../sdc/toscaparser/api/RelationshipTemplate.java | 12 +- .../onap/sdc/toscaparser/api/TopologyTemplate.java | 41 ++-- .../api/elements/constraints/Schema.java | 10 + .../onap/sdc/toscaparser/api/JToscaImportTest.java | 33 ++++ .../api/elements/CalculatePropertyByPathTest.java | 147 ++++++++++++++ .../service-NetworkCloudVnfServiceMock-csar.csar | Bin 0 -> 60223 bytes version.properties | 2 +- 13 files changed, 474 insertions(+), 46 deletions(-) create mode 100644 src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java create mode 100644 src/test/resources/csars/service-NetworkCloudVnfServiceMock-csar.csar diff --git a/pom.xml b/pom.xml index 8e6c0f8..c75ce9b 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.4.9-SNAPSHOT + 1.4.10-SNAPSHOT sdc-jtosca diff --git a/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java index 637329e..2178be3 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java @@ -5,6 +5,7 @@ import org.onap.sdc.toscaparser.api.elements.*; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import javax.annotation.Nullable; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; @@ -51,16 +52,27 @@ public abstract class EntityTemplate { private ArrayList _requirements; private ArrayList _capabilities; + @Nullable + private NodeTemplate _parentNodeTemplate; + // dummy constructor for subclasses that don't want super public EntityTemplate() { return; } + public EntityTemplate(String _name, + LinkedHashMap _template, + String _entityName, + LinkedHashMap _customDef) { + this(_name, _template, _entityName, _customDef, null); + } + @SuppressWarnings("unchecked") public EntityTemplate(String _name, LinkedHashMap _template, String _entityName, - LinkedHashMap _customDef) { + LinkedHashMap _customDef, + NodeTemplate parentNodeTemplate) { name = _name; entityTpl = _template; customDef = _customDef; @@ -111,8 +123,13 @@ public abstract class EntityTemplate { _interfaces = null; _requirements = null; _capabilities = null; + _parentNodeTemplate = parentNodeTemplate; } + public NodeTemplate getParentNodeTemplate() { + return _parentNodeTemplate; + } + public String getType() { if(typeDefinition != null) { String clType = typeDefinition.getClass().getSimpleName(); diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Group.java b/src/main/java/org/onap/sdc/toscaparser/api/Group.java index de031e6..15ddfb1 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Group.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Group.java @@ -25,12 +25,18 @@ public class Group extends EntityTemplate { ArrayList memberNodes; LinkedHashMap customDef; Metadata metaData; - + + + public Group(String _name, LinkedHashMap _templates, + ArrayList _memberNodes, + LinkedHashMap _customDef){ + this(_name, _templates, _memberNodes, _customDef, null); + } public Group(String _name, LinkedHashMap _templates, ArrayList _memberNodes, - LinkedHashMap _customDef) { - super(_name, _templates, "group_type", _customDef); + LinkedHashMap _customDef, NodeTemplate parentNodeTemplate) { + super(_name, _templates, "group_type", _customDef, parentNodeTemplate); name = _name; tpl = _templates; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java index 270e908..eaa650b 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java @@ -29,15 +29,25 @@ public class NodeTemplate extends EntityTemplate { private static final String METADATA = "metadata"; + public NodeTemplate(String name, + LinkedHashMap ntnodeTemplates, + LinkedHashMap ntcustomDef, + ArrayList ntavailableRelTpls, + LinkedHashMap ntavailableRelTypes) { + this( name, ntnodeTemplates, ntcustomDef, ntavailableRelTpls, + ntavailableRelTypes, null); + } @SuppressWarnings("unchecked") public NodeTemplate(String name, LinkedHashMap ntnodeTemplates, LinkedHashMap ntcustomDef, ArrayList ntavailableRelTpls, - LinkedHashMap ntavailableRelTypes) { + LinkedHashMap ntavailableRelTypes, + NodeTemplate parentNodeTemplate) { - super(name, (LinkedHashMap)ntnodeTemplates.get(name), "node_type", ntcustomDef); + super(name, (LinkedHashMap)ntnodeTemplates.get(name), + "node_type", ntcustomDef, parentNodeTemplate); templates = ntnodeTemplates; _validateFields((LinkedHashMap)templates.get(name)); @@ -209,7 +219,7 @@ public class NodeTemplate extends EntityTemplate { LinkedHashMap req = new LinkedHashMap<>(); req.put("relationship", CopyUtils.copyLhmOrAl(requirement.getRelationship())); req.put("type",rtype); - RelationshipTemplate tpl = new RelationshipTemplate(req, rtype, customDef, this, source); + RelationshipTemplate tpl = new RelationshipTemplate(req, rtype, customDef, this, source, getParentNodeTemplate()); relationshipTpl.add(tpl); } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java index 9eaacfc..5945532 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java @@ -28,14 +28,22 @@ public class Policy extends EntityTemplate { String targetsType; ArrayList triggers; LinkedHashMap properties; - + public Policy(String _name, LinkedHashMap _policy, -// ArrayList targetObjects, ArrayList targetObjects, String _targetsType, LinkedHashMap _customDef) { - super(_name,_policy,"policy_type",_customDef); + this(_name, _policy, targetObjects, _targetsType, _customDef, null); + } + + public Policy(String _name, + LinkedHashMap _policy, +// ArrayList targetObjects, + ArrayList targetObjects, + String _targetsType, + LinkedHashMap _customDef, NodeTemplate parentNodeTemplate) { + super(_name,_policy,"policy_type",_customDef, parentNodeTemplate); if(_policy.get(METADATA) != null) { LinkedHashMap metadataMap = (LinkedHashMap)_policy.get(METADATA); diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Property.java b/src/main/java/org/onap/sdc/toscaparser/api/Property.java index 6d05af0..0ef9dd1 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Property.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Property.java @@ -1,29 +1,38 @@ package org.onap.sdc.toscaparser.api; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.onap.sdc.toscaparser.api.elements.PropertyDef; -import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; +import com.google.common.collect.Lists; import org.onap.sdc.toscaparser.api.elements.constraints.Constraint; import org.onap.sdc.toscaparser.api.elements.constraints.Schema; import org.onap.sdc.toscaparser.api.functions.Function; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; public class Property { // TOSCA built-in Property type + private static final Logger logger = LoggerFactory.getLogger(Property.class.getName()); private static final String TYPE = "type"; private static final String REQUIRED = "required"; private static final String DESCRIPTION = "description"; private static final String DEFAULT = "default"; private static final String CONSTRAINTS = "constraints"; - + private static String ENTRY_SCHEMA = "entry_schema"; + private static String DATA_TYPE = "datatypes"; + private static final String[] PROPERTY_KEYS = { TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS}; private static final String ENTRYTYPE = "type"; private static final String ENTRYPROPERTIES = "properties"; + private static final String PATH_DELIMITER = "#"; private static final String[] ENTRY_SCHEMA_KEYS = { ENTRYTYPE, ENTRYPROPERTIES}; @@ -117,6 +126,195 @@ public class Property { ", customDef=" + customDef + '}'; } + + /** + * Retrieves property value as list of strings if
+ * - the value is simple
+ * - the value is list of simple values
+ * - the provided path refers to a simple property inside a data type
+ * @param propertyPath valid name of property for search.
+ * If a name refers to a simple field inside a datatype, the property name should be defined with # delimiter.
+ * + * @return List of property values. If not found, empty list will be returned.
+ * If property value is a list either of simple fields or of simple fields inside a datatype, all values from the list should be returned + */ + public List getLeafPropertyValue(String propertyPath) { + List propertyValueList = Collections.emptyList(); + + if (logger.isDebugEnabled()) { + logger.debug("getLeafPropertyValue=> A new request: propertyPath: {}, value: {}", propertyPath, getValue()); + } + if (propertyPath == null || getValue() == null || + //if entry_schema disappears, it is datatype, + // otherwise it is map of simple types - should be ignored + isValueMapOfSimpleTypes()) { + logger.error("It is a wrong request - ignoring! propertyPath: {}, value: {}", propertyPath, getValue()); + return propertyValueList; + } + String[] path = propertyPath.split(PATH_DELIMITER); + + if (Schema.isRequestedTypeSimple(getPropertyTypeByPath(path))) { + //the internal property type in the path is either simple or list of simple types + if (isValueInsideDataType()) { + if (logger.isDebugEnabled()) { + logger.debug("The requested is an internal simple property inside of a data type"); + } + //requested value is an internal simple property inside of a data type + propertyValueList = getSimplePropertyValueForComplexType(path); + } + else { + if (logger.isDebugEnabled()) { + logger.debug("The requested property has simple type or list of simple types"); + } + //the requested property is simple type or list of simple types + propertyValueList = getSimplePropertyValueForSimpleType(); + } + } + return propertyValueList; + } + + private boolean isValueMapOfSimpleTypes() { + if (getValue() instanceof Map && getEntrySchema() != null) { + logger.warn("This property value is a map of simple types"); + return true; + } + return false; + } + + private boolean isValueInsideDataType() { + //value is either a list of values for data type + //or data type + return (Schema.LIST.equals(getType()) && isDataTypeInEntrySchema()) + || (getEntrySchema() == null && getType().contains(DATA_TYPE)); + } + + private Object getSimpleValueFromComplexObject(Object current, String[] path) { + if (current == null) { + return null; + } + int index = 0; + + if (path.length > index) { + for (int i = index; i < path.length; i++) { + if (current instanceof Map) { + current = ((Map) current).get(path[i]); + } else if (current instanceof List) { + current = ((List) current).get(0); + i--; + } + else { + return null; + } + } + } + if (current != null) { + return current; + } + return null; + } + + private List getSimplePropertyValueForSimpleType() { + if (getValue() instanceof List || getValue() instanceof Map) { + return getSimplePropertyValueForComplexType(null); + } + return Lists.newArrayList(String.valueOf(value)); + } + + private List getSimplePropertyValueForComplexType(String[] path) { + if (getValue() instanceof List ) { + return ((List) getValue()).stream() + .map(v -> { + if (path != null) { + return getSimpleValueFromComplexObject(v, path); + } else { + return v; + } + }) + //it might be null when get_input can't be resolved + // e.g.: + // - get_input has two parameters: 1. list and 2. index in this list + //and list has no value + // - neither value no default is defined for get_input + .filter(Objects::nonNull) + .map(String::valueOf) + .collect(Collectors.toList()); + } + //it is data type + List valueList = Lists.newArrayList(); + String valueString = String.valueOf(getSimpleValueFromComplexObject(getValue(), path)); + if (Objects.nonNull(valueString)) { + valueList.add(valueString); + } + return valueList; + } + + private String getPropertyTypeByPath(String[] path) { + String propertyType = calculatePropertyType(); + + if (path.length > 0 && !path[0].isEmpty()) { + return getInternalPropertyType(propertyType, path, 0); + } + return propertyType; + } + + private String calculatePropertyType() { + String propertyType = getType(); + if (Schema.LIST.equals(propertyType)) { + //if it is list, return entry schema type + return (String)getEntrySchema().get(ENTRYTYPE); + } + return propertyType; + } + + private String calculatePropertyType(LinkedHashMap property) { + String type = (String) property.get(TYPE); + if (Schema.LIST.equals(type)) { + //it might be a data type + return getEntrySchemaType(property); + } + return type; + } + + private String getInternalPropertyType(String dataTypeName, String[] path, int index) { + if (path.length > index) { + LinkedHashMap complexProperty = (LinkedHashMap)customDef.get(dataTypeName); + if (complexProperty != null) { + LinkedHashMap dataTypeProperties = (LinkedHashMap) complexProperty.get(ENTRYPROPERTIES); + return getPropertyTypeFromCustomDefDeeply(path, index, dataTypeProperties); + } + } + //stop searching - seems as wrong flow: the path is finished but the value is not found yet + return null; + } + + private String getEntrySchemaType(LinkedHashMap property) { + LinkedHashMap entrySchema = (LinkedHashMap)property.get(ENTRY_SCHEMA); + if (entrySchema != null) { + return (String) entrySchema.get(TYPE); + } + return null; + } + + private String getPropertyTypeFromCustomDefDeeply(String[] path, int index, LinkedHashMap properties) { + if (properties != null) { + LinkedHashMap foundProperty = (LinkedHashMap) (properties).get(path[index]); + if (foundProperty != null) { + String propertyType = calculatePropertyType(foundProperty); + if (propertyType == null || index == path.length - 1){ + return propertyType; + } + return getInternalPropertyType(propertyType, path, index + 1); + } + } + return null; + } + + private boolean isDataTypeInEntrySchema() { + String entrySchemaType = (String)getEntrySchema().get(ENTRYTYPE); + return entrySchemaType != null && entrySchemaType.contains(DATA_TYPE); + } + + } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java index a94caed..79bf83b 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java @@ -24,13 +24,21 @@ public class RelationshipTemplate extends EntityTemplate { private NodeTemplate target; private NodeTemplate source; private ArrayList _properties; - + public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, String rtname, LinkedHashMap rtcustomDef, NodeTemplate rttarget, NodeTemplate rtsource) { - super(rtname,rtrelationshipTemplate,"relationship_type",rtcustomDef); + this(rtrelationshipTemplate, rtname, rtcustomDef, rttarget, rtsource, null); + } + + public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, + String rtname, + LinkedHashMap rtcustomDef, + NodeTemplate rttarget, + NodeTemplate rtsource, NodeTemplate parentNodeTemplate) { + super(rtname,rtrelationshipTemplate,"relationship_type",rtcustomDef, parentNodeTemplate); name = rtname; target = rttarget; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java index 4c9a53d..0b1dfcd 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java @@ -71,6 +71,7 @@ public class TopologyTemplate { description = _tplDescription(); inputs = _inputs(); relationshipTemplates =_relationshipTemplates(); + //todo: pass subMappedNodeTemplate to ET constractor nodeTemplates = _nodeTemplates(); outputs = _outputs(); if(nodeTemplates != null) { @@ -128,7 +129,8 @@ public class TopologyTemplate { tpls, customDefs, relationshipTemplates, - relTypes); + relTypes, + subMappedNodeTemplate); if(tpl.getTypeDefinition() != null) { boolean b = NodeType.TOSCA_DEF.get(tpl.getType()) != null; if(b || (tpl.getCustomDef() != null && !tpl.getCustomDef().isEmpty())) { @@ -148,7 +150,7 @@ public class TopologyTemplate { if(tpls != null) { for(String name: tpls.keySet()) { RelationshipTemplate tpl = new RelationshipTemplate( - (LinkedHashMap)tpls.get(name),name,customDefs,null,null); + (LinkedHashMap)tpls.get(name),name,customDefs,null,null, subMappedNodeTemplate); alRelationshipTemplates.add(tpl); } @@ -216,7 +218,8 @@ public class TopologyTemplate { policyTpl, targetObjects, targetsType, - customDefs); + customDefs, + subMappedNodeTemplate); alPolicies.add(policyObj); } return alPolicies; @@ -244,7 +247,7 @@ public class TopologyTemplate { Group group = new Group(groupName, groupTpl, memberNodes, - customDefs); + customDefs, subMappedNodeTemplate); groups.add(group); } return groups; @@ -314,9 +317,7 @@ public class TopologyTemplate { if(tpl.get(INPUTS) != null) { return (LinkedHashMap)tpl.get(INPUTS); } - else { - return new LinkedHashMap(); - } + return new LinkedHashMap(); } @SuppressWarnings("unchecked") @@ -329,19 +330,15 @@ public class TopologyTemplate { if(tpl.get(RELATIONSHIP_TEMPLATES) != null) { return (LinkedHashMap)tpl.get(RELATIONSHIP_TEMPLATES); } - else { - return new LinkedHashMap(); - } + return new LinkedHashMap(); } @SuppressWarnings("unchecked") private LinkedHashMap _tplOutputs() { - if(tpl.get(OUTPUTS) != null) { - return (LinkedHashMap)tpl.get(OUTPUTS); - } - else { - return new LinkedHashMap(); - } + if(tpl.get(OUTPUTS) != null) { + return (LinkedHashMap)tpl.get(OUTPUTS); + } + return new LinkedHashMap(); } @SuppressWarnings("unchecked") @@ -349,9 +346,7 @@ public class TopologyTemplate { if(tpl.get(SUBSTITUTION_MAPPINGS) != null) { return (LinkedHashMap)tpl.get(SUBSTITUTION_MAPPINGS); } - else { - return new LinkedHashMap(); - } + return new LinkedHashMap(); } @SuppressWarnings("unchecked") @@ -359,9 +354,7 @@ public class TopologyTemplate { if(tpl.get(GROUPS) != null) { return (LinkedHashMap)tpl.get(GROUPS); } - else { - return new LinkedHashMap(); - } + return new LinkedHashMap(); } @SuppressWarnings("unchecked") @@ -369,9 +362,7 @@ public class TopologyTemplate { if(tpl.get(POLICIES) != null) { return (LinkedHashMap)tpl.get(POLICIES); } - else { - return new LinkedHashMap<>(); - } + return new LinkedHashMap<>(); } private void _validateField() { diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java index 73a63ef..c0ed6bc 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java @@ -4,6 +4,7 @@ import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; @@ -42,6 +43,11 @@ public class Schema { INTEGER, STRING, BOOLEAN, FLOAT, RANGE,NUMBER, TIMESTAMP, LIST, MAP, SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, VERSION, PORTDEF, PORTSPEC, JSON}; + + public static final String SIMPLE_PROPERTY_TYPES[] = { + INTEGER, STRING, BOOLEAN, FLOAT, RANGE,NUMBER, TIMESTAMP, + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, + VERSION}; @SuppressWarnings("unused") private static final String SCALAR_UNIT_SIZE_DEFAULT = "B"; @@ -107,6 +113,10 @@ public class Schema { return (String)schema.getOrDefault(STATUS,""); } + public static boolean isRequestedTypeSimple(String type) { + return Arrays.stream(SIMPLE_PROPERTY_TYPES).anyMatch(t->t.equals(type)); + } + @SuppressWarnings("unchecked") public ArrayList getConstraints() { if(constraintsList.size() == 0) { diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java index 8e587a9..c660153 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java @@ -13,6 +13,7 @@ import java.util.Optional; import java.util.stream.Collectors; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; @@ -111,6 +112,38 @@ public class JToscaImportTest { assertNull(nodeTemplate.getPropertyValueFromTemplatesByName("test")); } + @Test + public void testGetParentNodeTemplateTest() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(0); + //parent of this VF is service (null) + assertNull(nodeTemplate.getParentNodeTemplate()); + List children = nodeTemplate.getSubMappingToscaTemplate().getNodeTemplates(); + assertFalse(children.isEmpty()); + NodeTemplate cVFC = children.get(4); + //parent is the VF above + assertEquals("2017-488_ADIOD-vPE 0", cVFC.getParentNodeTemplate().getName()); + List children1 = cVFC.getSubMappingToscaTemplate().getNodeTemplates(); + assertFalse(children1.isEmpty()); + //parent is the CVFC above + assertEquals(cVFC, children1.get(0).getParentNodeTemplate()); + +/* + + TopologyTemplate tt = nodeTemplate.getOriginComponentTemplate(); + List groups = tt.getGroups(); + List policies = tt.getPolicies(); + + TopologyTemplate tt1 = cVFC.getOriginComponentTemplate(); + groups = tt.getGroups(); + policies = tt.getPolicies(); +*/ + + } + @Test public void testNullValueHasNoNullPointerException() throws JToscaException { diff --git a/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java b/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java new file mode 100644 index 0000000..59c8445 --- /dev/null +++ b/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java @@ -0,0 +1,147 @@ +package org.onap.sdc.toscaparser.api.elements; + +import org.junit.BeforeClass; +import org.junit.Test; +import org.onap.sdc.toscaparser.api.JToscaImportTest; +import org.onap.sdc.toscaparser.api.NodeTemplate; +import org.onap.sdc.toscaparser.api.Property; +import org.onap.sdc.toscaparser.api.ToscaTemplate; +import org.onap.sdc.toscaparser.api.common.JToscaException; + +import java.io.File; +import java.net.URL; +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class CalculatePropertyByPathTest { + private static ToscaTemplate toscaTemplate; + + @BeforeClass + public static void setUpClass() throws JToscaException { + URL scarUrl = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar"); + if (scarUrl != null) { + File file = new File(scarUrl.getFile()); + toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + } + + } + + @Test + public void testGetPropertyWhenPropertyHasListOfDataTypesAndPathIsNotEmpty() throws JToscaException { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(0); //testVM_testVM_SRIOVtrunk1_port + + Property property = cp.getProperties().get("related_networks"); + List propertyValueList = property.getLeafPropertyValue("related_network_role"); + assertEquals(3, propertyValueList.size()); + assertTrue(propertyValueList.contains("cor_direct_2")); + assertTrue(propertyValueList.contains("sgi_direct_2")); + assertTrue(propertyValueList.contains("int_imbl_2")); + } + + @Test + public void testGetPropertyWhenPropertyHasDataTypeAndPathIsEmpty() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(1); //testVM_testVM_SRIOVNonTrunk0_port + + Property property = cp.getProperties().get("exCP_naming"); + List propertyValueList = property.getLeafPropertyValue(""); + assertTrue(propertyValueList.isEmpty()); + } + + @Test + public void testGetPropertyWhenPropertyHasSimpleTypeAndValueAsGetInputIsNotResolvedCorrectlyAndPathIsEmpty() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(1); //testVM_testVM_SRIOVNonTrunk0_port + + Property property = cp.getProperties().get("network"); + List propertyValueList = property.getLeafPropertyValue(""); + assertTrue(propertyValueList.isEmpty()); + } + + @Test + public void testGetPropertyWhenPropertyHasSimpleTypeAndPathIsEmpty() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(1); //testVM_testVM_SRIOVNonTrunk0_port + + Property property = cp.getProperties().get("subinterface_indicator"); + List propertyValueList = property.getLeafPropertyValue(""); + assertEquals(1, propertyValueList.size()); + assertEquals("false", propertyValueList.get(0)); + } + + + @Test + public void testGetPropertyWhenPropertyHasDataTypeAndPathIsNotEmpty() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(2); //testVM_testVM_OVS_port + + Property property = cp.getProperties().get("ip_requirements"); + List propertyValueList = property.getLeafPropertyValue("ip_version"); + assertEquals(1, propertyValueList.size()); + assertEquals("4", propertyValueList.get(0)); + } + + @Test + public void testGetPropertyWhenPropertyHasListOfDataTypesAndPathIsNull() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(2); //testVM_testVM_OVS_port + + Property property = cp.getProperties().get("ip_requirements"); + assertTrue(property.getLeafPropertyValue(null).isEmpty()); + } + + @Test + public void testGetPropertyWhenPropertyHasListOfDataTypesAndPathIsComplex() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(0); //testVM_testVM_SRIOVtrunk1_port + + Property property = cp.getProperties().get("ip_requirements"); + List propertyValueList = property.getLeafPropertyValue("ip_count_required#is_required"); + assertEquals(1, propertyValueList.size()); + assertEquals("false", propertyValueList.get(0)); + } + + @Test + public void testGetPropertyWhenPropertyHasListOfDataTypesAndPathIsWrong() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(0); //testVM_testVM_SRIOVtrunk1_port + + Property property = cp.getProperties().get("ip_requirements"); + List propertyValueList = property.getLeafPropertyValue("ip_count_required#is_required_1"); + assertEquals(0, propertyValueList.size()); + } + + @Test + public void testGetPropertyWhenPropertyHasDataTypeWithoutSchemaAndComplexPath() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(0); //testVM_testVM_SRIOVtrunk1_port + + Property property = cp.getProperties().get("mac_requirements"); + List propertyValueList = property.getLeafPropertyValue("mac_count_required#is_required"); + assertEquals(1, propertyValueList.size()); + assertEquals("false", propertyValueList.get(0)); + } + + @Test + public void testGetPropertyWhenPropertyHasDataTypeWithoutSchemaAndSimplePath() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(0); //testVM_testVM_SRIOVtrunk1_port + + Property property = cp.getProperties().get("mac_requirements"); + List propertyValueList = property.getLeafPropertyValue("mac_count_required"); + assertEquals(0, propertyValueList.size()); + } +} diff --git a/src/test/resources/csars/service-NetworkCloudVnfServiceMock-csar.csar b/src/test/resources/csars/service-NetworkCloudVnfServiceMock-csar.csar new file mode 100644 index 0000000..aabf83c Binary files /dev/null and b/src/test/resources/csars/service-NetworkCloudVnfServiceMock-csar.csar differ diff --git a/version.properties b/version.properties index 61bae76..3a44f58 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=4 -patch=9 +patch=10 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From f500065cc4edf1e51657d701c487cad90930c86e Mon Sep 17 00:00:00 2001 From: Tal Gitelman Date: Tue, 8 Jan 2019 17:31:28 +0200 Subject: New version change Change-Id: Icc6e5c4113d84f6ee9e5d7e34cbb32c079f8e67f Issue-ID: SDC-1955 Signed-off-by: Tal Gitelman --- pom.xml | 2 +- version.properties | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index c75ce9b..7497f2d 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.4.10-SNAPSHOT + 1.5.0-SNAPSHOT sdc-jtosca diff --git a/version.properties b/version.properties index 3a44f58..011dc3c 100644 --- a/version.properties +++ b/version.properties @@ -4,8 +4,8 @@ # because they are used in Jenkins, whose plug-in doesn't support major=1 -minor=4 -patch=10 +minor=5 +patch=0 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 8b7eb2c7c60a8a8be540e810967bff8542c45cd1 Mon Sep 17 00:00:00 2001 From: "Manzon, Inna (im453s)" Date: Mon, 21 Jan 2019 16:21:36 +0200 Subject: SDC Tosca Parser getEntity API Change-Id: I237ea812e7ff4e12900776de79cfb6b154464974 Issue-ID: SDC-1967 Signed-off-by: Manzon, Inna (im453s) --- .../java/org/onap/sdc/toscaparser/api/Policy.java | 24 ++++++++++---------- .../onap/sdc/toscaparser/api/JToscaImportTest.java | 26 ++++++++++++++++++++++ 2 files changed, 38 insertions(+), 12 deletions(-) diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java index 5945532..563ea25 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java @@ -13,17 +13,17 @@ import org.onap.sdc.toscaparser.api.utils.ValidateUtils; public class Policy extends EntityTemplate { - private static final String TYPE = "type"; - private static final String METADATA = "metadata"; - private static final String DESCRIPTION = "description"; - private static final String PROPERTIES = "properties"; - private static final String TARGETS = "targets"; + static final String TYPE = "type"; + static final String METADATA = "metadata"; + static final String DESCRIPTION = "description"; + static final String PROPERTIES = "properties"; + static final String TARGETS = "targets"; private static final String TRIGGERS = "triggers"; private static final String SECTIONS[] = { TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS}; -// LinkedHashMap metaData; - Metadata metaData; + Metadata metaDataObject; + LinkedHashMap metaData = null; ArrayList targetsList; // *** a list of NodeTemplate OR a list of Group *** String targetsType; ArrayList triggers; @@ -46,9 +46,9 @@ public class Policy extends EntityTemplate { super(_name,_policy,"policy_type",_customDef, parentNodeTemplate); if(_policy.get(METADATA) != null) { - LinkedHashMap metadataMap = (LinkedHashMap)_policy.get(METADATA); - ValidateUtils.validateMap(metadataMap); - metaData = new Metadata(metadataMap); + metaData = (LinkedHashMap)_policy.get(METADATA); + ValidateUtils.validateMap(metaData); + metaDataObject = new Metadata(metaData); } targetsList = targetObjects; @@ -78,11 +78,11 @@ public class Policy extends EntityTemplate { } public Metadata getMetaDataObj() { - return metaData; + return metaDataObject; } public LinkedHashMap getMetaData() { - return (LinkedHashMap)metaData.getAllProperties(); + return metaData; } // public ArrayList getTargetsList() { diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java index c660153..ff03aed 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java @@ -154,6 +154,31 @@ public class JToscaImportTest { assertNotNull(inputs); } + @Test + public void testGetPolicyMetadata() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + ArrayList policies = toscaTemplate.getPolicies(); + assertNotNull(policies); + assertEquals(1, policies.size()); + assertEquals("org.openecomp.policies.External", policies.get(0).getType()); + assertEquals("adf03496-bf87-43cf-b20a-450e47cb44bd", policies.get(0).getMetaData().getOrDefault("UUID", "").toString()); + assertTrue(policies.get(0).getMetaData().getOrDefault("UUID_test", "").toString().isEmpty()); + } + + @Test + public void testGetPolicyMetadataObj() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + ArrayList policies = toscaTemplate.getPolicies(); + assertNotNull(policies); + assertEquals(1, policies.size()); + assertEquals("adf03496-bf87-43cf-b20a-450e47cb44bd", policies.get(0).getMetaDataObj().getAllProperties().getOrDefault("UUID", "").toString()); + assertTrue(policies.get(0).getMetaDataObj().getAllProperties().getOrDefault("name_test", "").toString().isEmpty()); + } + private void validateInputsAnnotations(List inputs) { List inputsWithAnnotations = inputs.stream().filter(i -> i.getAnnotations() != null) .collect(Collectors.toList()); @@ -174,4 +199,5 @@ public class JToscaImportTest { assertEquals(source_type.get().getValue(), "HEAT"); } + } -- cgit 1.2.3-korg From 1003aeb5392947fec84ee70301e649f0e13c57b4 Mon Sep 17 00:00:00 2001 From: Tal Gitelman Date: Thu, 10 Jan 2019 17:44:52 +0200 Subject: yaml and json validation maven plug in Change-Id: I62355149010d042c6d33dd81dd111f15cf4e0d85 Issue-ID: SDC-1955 Signed-off-by: Tal Gitelman --- pom.xml | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/pom.xml b/pom.xml index 7497f2d..02b4861 100644 --- a/pom.xml +++ b/pom.xml @@ -197,6 +197,36 @@ sonar-maven-plugin 3.0.2 + + com.github.sylvainlaurent.maven + yaml-json-validator-maven-plugin + 1.0.1 + + + validate + validate + + validate + + + + + + src/main/resources/**/*.y*ml + src/test/resources/**/*.y*ml + + + + + src/main/resources/**/*.json + src/test/resources/**/*.json + + + + + + + -- cgit 1.2.3-korg From 7aa89e21b6c5ec0206071ec6fc1e671721426aec Mon Sep 17 00:00:00 2001 From: Tal Gitelman Date: Wed, 23 Jan 2019 18:37:06 +0200 Subject: SDC distribution failed bug fix Change-Id: Ia80d03ed2ee9554b315a5db3bc253b4580bdcfb9 Issue-ID: SDC-1955 Signed-off-by: Tal Gitelman --- src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java index a6d7f81..5ba6622 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java @@ -111,7 +111,7 @@ public class NodeType extends StatefulEntityType { getRelation = _getRelation(key, nodeType); } else { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( - "NodeTypeForCapabilityNotFoundError: Node type for capability type \"%s\" is not found",captype))); + "NodeTypeRequirementForCapabilityUnfulfilled: Node type: \"%s\" with requrement \"%s\" for node type with capability type \"%s\" is not found\\unfulfilled", this.ntype, key, captype))); } if (getRelation != null) { relation = getRelation; @@ -122,7 +122,7 @@ public class NodeType extends StatefulEntityType { } if(relation == null || nodeType == null){ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( - "NodeTypeForRelationNotFound: Node type \"%s\" with relationship type \"%s\" is not found",nodeType, relation))); + "NodeTypeForRelationUnfulfilled: Node type \"%s\" - relationship type \"%s\" is unfulfilled", this.ntype, relation))); } else { RelationshipType rtype = new RelationshipType(relation, keyword, customDef); NodeType relatednode = new NodeType(nodeType, customDef); -- cgit 1.2.3-korg From afdb5f5ac3ebc05008f616d3370f9e1d90e6747a Mon Sep 17 00:00:00 2001 From: Tal Gitelman Date: Wed, 23 Jan 2019 18:37:06 +0200 Subject: Fix sonar issues Change-Id: Ic998107754287db593c00cd632b43321d1923c78 Issue-ID: SDC-1895 Signed-off-by: Manzon, Inna (im453s) --- .../csars/service-JennyVtsbcKarunaSvc-csar.csar | Bin 145639 -> 145576 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar b/src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar index 3f80621..ee01780 100644 Binary files a/src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar and b/src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar differ -- cgit 1.2.3-korg From e85cec5f2344bbb802ad5ce8085efa9e34d9eaf1 Mon Sep 17 00:00:00 2001 From: Marco Platania Date: Tue, 9 Apr 2019 10:11:16 -0400 Subject: Add onap.policies.* to TOSCA definition - Add policies in the onap.policies domain to TOSCA definition for correct CSAR parsing in CLAMP Change-Id: I62c71d6d72e70f39b0aaecc4ebfa89b2e591e61b Issue-ID: SDC-2230 Signed-off-by: Marco Platania --- src/main/resources/TOSCA_definition_1_0.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/resources/TOSCA_definition_1_0.yaml b/src/main/resources/TOSCA_definition_1_0.yaml index c5a4d0f..d80ed17 100644 --- a/src/main/resources/TOSCA_definition_1_0.yaml +++ b/src/main/resources/TOSCA_definition_1_0.yaml @@ -953,6 +953,10 @@ policy_types: description: The TOSCA Policy Type definition that is used to declare performance requirements for TOSCA nodes or groups of nodes. + onap.policies.Monitoring: + derived_from: tosca.policies.Root + description: The ONAP Policy Type definition for DCAE uS component monitoring policies. + ########################################################################## # Group Type. # Group Type represents logical grouping of TOSCA nodes that have an -- cgit 1.2.3-korg From a0244589b50f80bc510b5d8dd4df02bb5870056e Mon Sep 17 00:00:00 2001 From: "Sonsino, Ofir (os0695)" Date: Wed, 17 Apr 2019 17:15:57 +0300 Subject: Bump jtosca version Change-Id: I52608df18dd96e63676634a1d29dcd117f5c8737 Issue-ID: SDC-2244 Signed-off-by: Sonsino, Ofir (os0695) --- pom.xml | 2 +- version.properties | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 02b4861..5dd141f 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.5.0-SNAPSHOT + 1.5.1-SNAPSHOT sdc-jtosca diff --git a/version.properties b/version.properties index 011dc3c..81c5c8f 100644 --- a/version.properties +++ b/version.properties @@ -5,7 +5,7 @@ major=1 minor=5 -patch=0 +patch=1 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 3b9778c4cf72580e3f87616fd677cb965a55a334 Mon Sep 17 00:00:00 2001 From: "Sonsino, Ofir (os0695)" Date: Tue, 7 May 2019 18:46:27 +0300 Subject: Bump jtosca version Change-Id: I033ddd7e6aee86dc469c00e1f7e5d6ea1c9a6a73 Issue-ID: SDC-2244 Signed-off-by: Sonsino, Ofir (os0695) --- pom.xml | 2 +- version.properties | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 5dd141f..631f678 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ org.onap.sdc.jtosca jtosca - 1.5.1-SNAPSHOT + 1.6.0-SNAPSHOT sdc-jtosca diff --git a/version.properties b/version.properties index 81c5c8f..0f0fb2b 100644 --- a/version.properties +++ b/version.properties @@ -4,8 +4,8 @@ # because they are used in Jenkins, whose plug-in doesn't support major=1 -minor=5 -patch=1 +minor=6 +patch=0 base_version=${major}.${minor}.${patch} -- cgit 1.2.3-korg From 6018fb047963d151d77bf03f6f84446866a30899 Mon Sep 17 00:00:00 2001 From: Toshimichi Fukuda Date: Fri, 19 Apr 2019 17:57:30 +0900 Subject: Change for TOSCA v1.3 get_input Change-Id: I39c8917c8c984896769e08a39302a98bca94e282 Issue-ID: SDC-2046 Signed-off-by: Toshimichi Fukuda --- .../onap/sdc/toscaparser/api/ToscaTemplate.java | 54 ++++++++- .../sdc/toscaparser/api/functions/GetInput.java | 82 ++++++++++++-- .../onap/sdc/toscaparser/api/parameters/Input.java | 5 + .../onap/sdc/toscaparser/api/JToscaImportTest.java | 122 +++++++++++++++++++-- .../toscaparser/api/functions/GetInputTest.java | 96 ++++++++++++++++ .../resources/csars/dataTypes-test-service.csar | Bin 0 -> 46307 bytes src/test/resources/csars/listed_input.csar | Bin 0 -> 46229 bytes src/test/resources/csars/listed_input_ng.csar | Bin 0 -> 46232 bytes 8 files changed, 342 insertions(+), 17 deletions(-) create mode 100644 src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java create mode 100644 src/test/resources/csars/dataTypes-test-service.csar create mode 100644 src/test/resources/csars/listed_input.csar create mode 100644 src/test/resources/csars/listed_input_ng.csar diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java index b5ae4c4..6edc291 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java @@ -1,3 +1,22 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (c) 2017 AT&T Intellectual Property. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * Modifications copyright (c) 2019 Fujitsu Limited. + * ================================================================================ + */ package org.onap.sdc.toscaparser.api; import java.io.File; @@ -22,6 +41,7 @@ import org.onap.sdc.toscaparser.api.common.JToscaException; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.common.ValidationIssueCollector; import org.onap.sdc.toscaparser.api.elements.EntityType; +import org.onap.sdc.toscaparser.api.elements.DataType; import org.onap.sdc.toscaparser.api.elements.Metadata; import org.onap.sdc.toscaparser.api.extensions.ExtTools; import org.onap.sdc.toscaparser.api.parameters.Input; @@ -105,6 +125,7 @@ public class ToscaTemplate extends Object { private LinkedHashMap> metaProperties; private Set processedImports; private LinkedHashMap customDefsFinal = new LinkedHashMap<>(); + private HashSet dataTypes; public ToscaTemplate(String _path, LinkedHashMap _parsedParams, @@ -214,7 +235,8 @@ public class ToscaTemplate extends Object { this.metaData = _tplMetaData(); this.relationshipTypes = _tplRelationshipTypes(); this.description = _tplDescription(); - this.topologyTemplate = _topologyTemplate(); + this.dataTypes = getTopologyDataTypes(); + this.topologyTemplate = _topologyTemplate(); this.repositories = _tplRepositories(); if(topologyTemplate.getTpl() != null) { this.inputs = _inputs(); @@ -325,6 +347,27 @@ public class ToscaTemplate extends Object { return topologyTemplate.getGroups(); } + /** + * Read datatypes field + * @return return list of datatypes. + */ + @SuppressWarnings("unchecked") + private HashSet getTopologyDataTypes(){ + LinkedHashMap value = + (LinkedHashMap)tpl.get(DATA_TYPES); + HashSet datatypes = new HashSet<>(); + if(value != null) { + customDefsFinal.putAll(value); + for(Map.Entry me: value.entrySet()) { + DataType datatype = new DataType(me.getKey(), value); + datatypes.add(datatype); + } + } + + + return datatypes; + } + /** * This method is used to get consolidated custom definitions from all imports * It is logically divided in two parts to handle imports; map and list formats. @@ -855,6 +898,14 @@ public class ToscaTemplate extends Object { return nestedToscaTplsWithTopology; } + /** + * Get datatypes. + * @return return list of datatypes. + */ + public HashSet getDataTypes() { + return dataTypes; + } + @Override public String toString() { return "ToscaTemplate{" + @@ -883,6 +934,7 @@ public class ToscaTemplate extends Object { ", graph=" + graph + ", csarTempDir='" + csarTempDir + '\'' + ", nestingLoopCounter=" + nestingLoopCounter + + ", dataTypes=" + dataTypes + '}'; } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java index 24d5a18..026113e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java @@ -1,3 +1,22 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (c) 2017 AT&T Intellectual Property. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * Modifications copyright (c) 2019 Fujitsu Limited. + * ================================================================================ + */ package org.onap.sdc.toscaparser.api.functions; import org.onap.sdc.toscaparser.api.DataEntity; @@ -10,7 +29,13 @@ import java.util.ArrayList; import java.util.LinkedHashMap; public class GetInput extends Function { - + + public static final String INDEX = "INDEX"; + public static final String INPUTS = "inputs"; + public static final String TYPE = "type"; + public static final String PROPERTIES = "properties"; + public static final String ENTRY_SCHEMA = "entry_schema"; + public GetInput(TopologyTemplate toscaTpl, Object context, String name, ArrayList _args) { super(toscaTpl,context,name,_args); @@ -18,17 +43,13 @@ public class GetInput extends Function { @Override void validate() { + // if(args.size() != 1) { // //PA - changed to WARNING from CRITICAL after talking to Renana, 22/05/2017 // ThreadLocalsHolder.getCollector().appendWarning(String.format( // "ValueError: Expected one argument for function \"get_input\" but received \"%s\"", // args.toString())); // } - if(args.size() > 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE009", String.format( - "ValueError: Expected max 2 arguments for function \"get_input\" but received \"%s\"", - args.size()))); - } boolean bFound = false; for(Input inp: toscaTpl.getInputs()) { if(inp.getName().equals(args.get(0))) { @@ -40,12 +61,48 @@ public class GetInput extends Function { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE158", String.format( "UnknownInputError: Unknown input \"%s\"",args.get(0)))); } + else if(args.size() > 2){ + LinkedHashMap inputs = (LinkedHashMap)toscaTpl.getTpl().get(INPUTS); + LinkedHashMap data = (LinkedHashMap)inputs.get(getInputName()); + String type ; + + for(int argumentNumber=1;argumentNumber schema = (LinkedHashMap)data.get(ENTRY_SCHEMA); + dataTypeName=(String)schema.get(TYPE); + }else{ + dataTypeName=type; + } + //check property name + LinkedHashMap dataType = (LinkedHashMap)toscaTpl.getCustomDefs().get(dataTypeName); + if(dataType != null) { + LinkedHashMap props = (LinkedHashMap) dataType.get(PROPERTIES); + data = (LinkedHashMap)props.get(args.get(argumentNumber).toString()); + if(data != null) { + bFound = true; + } + } + } + if(!bFound){ + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE282", String.format( + "UnknownDataType: Unknown data type \"%s\"",args.get(argumentNumber)))); + } + } + } } public Object result() { if(toscaTpl.getParsedParams() != null && toscaTpl.getParsedParams().get(getInputName()) != null) { - LinkedHashMap ttinp = (LinkedHashMap)toscaTpl.getTpl().get("inputs"); + LinkedHashMap ttinp = (LinkedHashMap)toscaTpl.getTpl().get(INPUTS); LinkedHashMap ttinpinp = (LinkedHashMap)ttinp.get(getInputName()); String type = (String)ttinpinp.get("type"); @@ -94,6 +151,15 @@ public class GetInput extends Function { return (String)args.get(0); } + public LinkedHashMap getEntrySchema() { + LinkedHashMap inputs = (LinkedHashMap)toscaTpl.getTpl().get(INPUTS); + LinkedHashMap inputValue = (LinkedHashMap)inputs.get(getInputName()); + return (LinkedHashMap)inputValue.get(ENTRY_SCHEMA); + } + + public ArrayList getArguments(){ + return args; + } } /*python @@ -136,4 +202,4 @@ def result(self): def input_name(self): return self.args[0] -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java index d59f406..5a6eb73 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java @@ -176,4 +176,9 @@ public class Input { public void resetAnnotaions(){ annotations = null; } + + public LinkedHashMap getEntrySchema() { + return schema.getEntrySchema(); + } + } diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java index ff03aed..13e17ce 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java @@ -1,22 +1,41 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (c) 2017 AT&T Intellectual Property. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * Modifications copyright (c) 2019 Fujitsu Limited. + * ================================================================================ + */ package org.onap.sdc.toscaparser.api; import org.junit.Test; import org.onap.sdc.toscaparser.api.common.JToscaException; +import org.onap.sdc.toscaparser.api.elements.DataType; +import org.onap.sdc.toscaparser.api.elements.PropertyDef; +import org.onap.sdc.toscaparser.api.elements.constraints.Schema; import org.onap.sdc.toscaparser.api.parameters.Annotation; import org.onap.sdc.toscaparser.api.parameters.Input; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.io.File; -import java.util.ArrayList; -import java.util.List; -import java.util.Optional; +import java.util.*; import java.util.stream.Collectors; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.core.IsNull.notNullValue; +import static org.junit.Assert.*; public class JToscaImportTest { @@ -199,5 +218,92 @@ public class JToscaImportTest { assertEquals(source_type.get().getValue(), "HEAT"); } + private static final String TEST_DATATYPE_FILENAME ="csars/dataTypes-test-service.csar"; + private static final String TEST_DATATYPE_TEST1 = "TestType1"; + private static final String TEST_DATATYPE_TEST2 = "TestType2"; + private static final String TEST_DATATYPE_PROPERTY_STR = "strdata"; + private static final String TEST_DATATYPE_PROPERTY_INT = "intdata"; + private static final String TEST_DATATYPE_PROPERTY_LIST = "listdata"; + private static final String TEST_DATATYPE_PROPERTY_TYPE = "type"; + private static final String TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA = "entry_schema"; + private static final String TEST_DATATYPE_TOSTRING = "data_types="; + @Test + public void testGetDataType() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_DATATYPE_FILENAME).getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + HashSet dataTypes = toscaTemplate.getDataTypes(); + assertThat(dataTypes,notNullValue()); + assertThat(dataTypes.size(),is(2)); + + for(DataType dataType: dataTypes){ + LinkedHashMap properties; + PropertyDef property; + if(dataType.getType().equals(TEST_DATATYPE_TEST1)){ + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_STR); + assertThat(property,notNullValue()); + assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_STR)); + assertThat( property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.STRING)); + } + if(dataType.getType().equals(TEST_DATATYPE_TEST2)){ + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_INT); + assertThat(property,notNullValue()); + assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_INT)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.INTEGER)); + + property = properties.get(TEST_DATATYPE_PROPERTY_LIST); + assertThat(property,notNullValue()); + assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA),is(TEST_DATATYPE_TEST1)); + + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST1),notNullValue()); + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST2),notNullValue()); + assertThat(toscaTemplate.toString(),containsString(TEST_DATATYPE_TOSTRING)); + } + } + + } + + @Test + public void testGetInputValidate() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_DATATYPE_FILENAME).getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + HashSet dataTypes = toscaTemplate.getDataTypes(); + assertThat(dataTypes,notNullValue()); + assertThat(dataTypes.size(),is(2)); + + for(DataType dataType: dataTypes) { + LinkedHashMap properties; + PropertyDef property; + if(dataType.getType().equals(TEST_DATATYPE_TEST1)) { + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_STR); + assertThat(property,notNullValue()); + assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_STR)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.STRING)); + } + if(dataType.getType().equals(TEST_DATATYPE_TEST2)) { + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_INT); + assertThat(property,notNullValue()); + assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_INT)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.INTEGER)); + + property = properties.get(TEST_DATATYPE_PROPERTY_LIST); + assertThat(property,notNullValue()); + assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA),is(TEST_DATATYPE_TEST1)); + + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST1),notNullValue()); + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST2),notNullValue()); + assertThat(toscaTemplate.toString(),containsString(TEST_DATATYPE_TOSTRING)); + } + } + } } diff --git a/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java b/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java new file mode 100644 index 0000000..577fb17 --- /dev/null +++ b/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java @@ -0,0 +1,96 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (c) 2019 Fujitsu Limited. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.sdc.toscaparser.api.functions; + +import org.junit.Test; +import org.onap.sdc.toscaparser.api.*; +import org.onap.sdc.toscaparser.api.common.JToscaException; +import org.onap.sdc.toscaparser.api.elements.constraints.Schema; +import org.onap.sdc.toscaparser.api.parameters.Input; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.io.File; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.junit.Assert.*; + +public class GetInputTest { + + private static final String TEST_FILENAME = "csars/listed_input.csar"; + private static final String TEST_FILENAME_NG = "csars/listed_input_ng.csar"; + private static final String TEST_PROPERTY_ROLE = "role"; + private static final String TEST_PROPERTY_LONGITUDE = "longitude"; + private static final String TEST_DEFAULT_VALUE = "dsvpn-hub"; + private static final String TEST_DESCRIPTION_VALUE = "This is used for SDWAN only"; + private static final String TEST_INPUT_TYPE="type"; + private static final String TEST_INPUT_SCHEMA_TYPE="tosca.datatypes.siteresource.site"; + private static final String TEST_TOSTRING = "get_input:[sites, 1, longitude]"; + private static final String TEST_INPUT_SITES= "sites"; + + @Test + public void validate() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_FILENAME).getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null, false); + NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(1).getSubMappingToscaTemplate().getNodeTemplates().get(0); + ArrayList inputs = toscaTemplate.getNodeTemplates().get(1).getSubMappingToscaTemplate().getInputs(); + LinkedHashMap properties = nodeTemplate.getProperties(); + assertThat(properties,notNullValue()); + assertThat(properties.size(),is(14)); + + Property property = properties.get(TEST_PROPERTY_ROLE); + assertThat(properties,notNullValue()); + assertThat(property.getName(),is(TEST_PROPERTY_ROLE)); + assertThat(property.getType(),is(Schema.STRING)); + assertThat(property.getDefault(),is(TEST_DEFAULT_VALUE)); + assertThat(property.getDescription(),is(TEST_DESCRIPTION_VALUE)); + GetInput getInput= (GetInput)property.getValue(); + assertThat(getInput.getEntrySchema().get(TEST_INPUT_TYPE).toString(),is(TEST_INPUT_SCHEMA_TYPE)); + + property = properties.get(TEST_PROPERTY_LONGITUDE); + assertThat(properties,notNullValue()); + assertThat(property.getName(), is(TEST_PROPERTY_LONGITUDE)); + assertThat(property.getValue().toString(),is(TEST_TOSTRING)); + getInput= (GetInput)property.getValue(); + ArrayList getInputArguments = getInput.getArguments(); + assertThat(getInputArguments.size(),is(3)); + assertThat(getInputArguments.get(0).toString(), is(TEST_INPUT_SITES)); + assertThat(getInputArguments.get(1).toString(), is("1")); + assertThat(getInputArguments.get(2).toString(), is(TEST_PROPERTY_LONGITUDE)); + + Input in = inputs.get(10); + assertThat(in.getEntrySchema().get(TEST_INPUT_TYPE), is(TEST_INPUT_SCHEMA_TYPE)); + assertThat(in.getName(),is(TEST_INPUT_SITES)); + assertThat(in.getType(),is(Input.LIST)); + } + + @Test + public void validate_ng() throws JToscaException { + //invalid file + String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_FILENAME_NG).getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null,false); + + List issues = ThreadLocalsHolder.getCollector().getValidationIssueReport(); + assertTrue(issues.stream().anyMatch(x -> x.contains("JE282"))); + } + } diff --git a/src/test/resources/csars/dataTypes-test-service.csar b/src/test/resources/csars/dataTypes-test-service.csar new file mode 100644 index 0000000..b4de177 Binary files /dev/null and b/src/test/resources/csars/dataTypes-test-service.csar differ diff --git a/src/test/resources/csars/listed_input.csar b/src/test/resources/csars/listed_input.csar new file mode 100644 index 0000000..445b91a Binary files /dev/null and b/src/test/resources/csars/listed_input.csar differ diff --git a/src/test/resources/csars/listed_input_ng.csar b/src/test/resources/csars/listed_input_ng.csar new file mode 100644 index 0000000..6b3402e Binary files /dev/null and b/src/test/resources/csars/listed_input_ng.csar differ -- cgit 1.2.3-korg From 787ef0b09708ba160efa81e507fd8d0bc6719f43 Mon Sep 17 00:00:00 2001 From: "michal.banka" Date: Tue, 9 Jul 2019 14:06:30 +0200 Subject: Set Oparent as parent and fix Checkstyle errors in sdc/jtosca Change-Id: I5e6ca46a2d0989dcd46b47a0318655e62bf829d5 Issue-ID: SDC-2434 Signed-off-by: michal.banka --- checkstyle-suppressions.xml | 38 ++++++++++ pom.xml | 81 +++++++++++++--------- .../sdc/toscaparser/api/CapabilityAssignment.java | 20 ++++++ .../sdc/toscaparser/api/CapabilityAssignments.java | 20 ++++++ .../org/onap/sdc/toscaparser/api/DataEntity.java | 22 +++++- .../onap/sdc/toscaparser/api/EntityTemplate.java | 22 +++++- .../java/org/onap/sdc/toscaparser/api/Group.java | 22 +++++- .../onap/sdc/toscaparser/api/ImportsLoader.java | 22 +++++- .../org/onap/sdc/toscaparser/api/NodeTemplate.java | 22 +++++- .../java/org/onap/sdc/toscaparser/api/Policy.java | 22 +++++- .../org/onap/sdc/toscaparser/api/Property.java | 20 ++++++ .../sdc/toscaparser/api/RelationshipTemplate.java | 22 +++++- .../org/onap/sdc/toscaparser/api/Repository.java | 22 +++++- .../sdc/toscaparser/api/RequirementAssignment.java | 20 ++++++ .../toscaparser/api/RequirementAssignments.java | 20 ++++++ .../sdc/toscaparser/api/SubstitutionMappings.java | 22 +++++- .../onap/sdc/toscaparser/api/TopologyTemplate.java | 22 +++++- .../org/onap/sdc/toscaparser/api/ToscaGraph.java | 22 +++++- .../org/onap/sdc/toscaparser/api/Triggers.java | 22 +++++- .../onap/sdc/toscaparser/api/UnsupportedType.java | 22 +++++- .../toscaparser/api/common/JToscaException.java | 20 ++++++ .../api/common/JToscaValidationIssue.java | 20 ++++++ .../sdc/toscaparser/api/common/TOSCAException.java | 20 ++++++ .../api/common/ValidationIssueCollector.java | 20 ++++++ .../toscaparser/api/elements/ArtifactTypeDef.java | 22 +++++- .../sdc/toscaparser/api/elements/AttributeDef.java | 22 +++++- .../api/elements/CapabilityTypeDef.java | 20 ++++++ .../sdc/toscaparser/api/elements/DataType.java | 22 +++++- .../sdc/toscaparser/api/elements/EntityType.java | 20 ++++++ .../sdc/toscaparser/api/elements/GroupType.java | 22 +++++- .../toscaparser/api/elements/InterfacesDef.java | 22 +++++- .../sdc/toscaparser/api/elements/Metadata.java | 20 ++++++ .../sdc/toscaparser/api/elements/NodeType.java | 22 +++++- .../sdc/toscaparser/api/elements/PolicyType.java | 22 +++++- .../sdc/toscaparser/api/elements/PortSpec.java | 22 +++++- .../sdc/toscaparser/api/elements/PropertyDef.java | 20 ++++++ .../toscaparser/api/elements/RelationshipType.java | 22 +++++- .../sdc/toscaparser/api/elements/ScalarUnit.java | 22 +++++- .../api/elements/ScalarUnitFrequency.java | 20 ++++++ .../toscaparser/api/elements/ScalarUnitSize.java | 20 ++++++ .../toscaparser/api/elements/ScalarUnitTime.java | 20 ++++++ .../api/elements/StatefulEntityType.java | 22 +++++- .../toscaparser/api/elements/TypeValidation.java | 20 ++++++ .../api/elements/constraints/Constraint.java | 20 ++++++ .../api/elements/constraints/Equal.java | 22 +++++- .../api/elements/constraints/GreaterOrEqual.java | 22 +++++- .../api/elements/constraints/GreaterThan.java | 20 ++++++ .../api/elements/constraints/InRange.java | 20 ++++++ .../api/elements/constraints/Length.java | 22 +++++- .../api/elements/constraints/LessOrEqual.java | 22 +++++- .../api/elements/constraints/LessThan.java | 22 +++++- .../api/elements/constraints/MaxLength.java | 22 +++++- .../api/elements/constraints/MinLength.java | 22 +++++- .../api/elements/constraints/Pattern.java | 22 +++++- .../api/elements/constraints/Schema.java | 22 +++++- .../api/elements/constraints/ValidValues.java | 22 +++++- .../api/elements/enums/ToscaElementNames.java | 20 ++++++ .../sdc/toscaparser/api/extensions/ExtTools.java | 22 +++++- .../onap/sdc/toscaparser/api/functions/Concat.java | 22 +++++- .../sdc/toscaparser/api/functions/Function.java | 20 ++++++ .../toscaparser/api/functions/GetAttribute.java | 22 +++++- .../api/functions/GetOperationOutput.java | 22 +++++- .../sdc/toscaparser/api/functions/GetProperty.java | 20 ++++++ .../onap/sdc/toscaparser/api/functions/Token.java | 22 +++++- .../sdc/toscaparser/api/parameters/Annotation.java | 20 ++++++ .../onap/sdc/toscaparser/api/parameters/Input.java | 20 ++++++ .../sdc/toscaparser/api/parameters/Output.java | 20 ++++++ .../org/onap/sdc/toscaparser/api/prereq/CSAR.java | 20 ++++++ .../onap/sdc/toscaparser/api/utils/CopyUtils.java | 20 ++++++ .../onap/sdc/toscaparser/api/utils/DumpUtils.java | 22 +++++- .../toscaparser/api/utils/JToscaErrorCodes.java | 22 +++++- .../api/utils/TOSCAVersionProperty.java | 22 +++++- .../toscaparser/api/utils/ThreadLocalsHolder.java | 20 ++++++ .../onap/sdc/toscaparser/api/utils/UrlUtils.java | 22 +++++- .../sdc/toscaparser/api/utils/ValidateUtils.java | 22 +++++- .../sdc/toscaparser/api/GetValidationIssues.java | 20 ++++++ .../sdc/toscaparser/api/JToscaMetadataParse.java | 20 ++++++ .../api/elements/CalculatePropertyByPathTest.java | 20 ++++++ .../toscaparser/api/elements/EntityTypeTest.java | 22 +++++- 79 files changed, 1672 insertions(+), 77 deletions(-) create mode 100644 checkstyle-suppressions.xml diff --git a/checkstyle-suppressions.xml b/checkstyle-suppressions.xml new file mode 100644 index 0000000..2920ca2 --- /dev/null +++ b/checkstyle-suppressions.xml @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/pom.xml b/pom.xml index 631f678..6d3d412 100644 --- a/pom.xml +++ b/pom.xml @@ -1,11 +1,19 @@ + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 org.onap.sdc.jtosca jtosca 1.6.0-SNAPSHOT sdc-jtosca + + + org.onap.oparent + oparent + 2.0.0 + + + @@ -54,8 +62,8 @@ 1.7.25 - + junit @@ -63,43 +71,44 @@ 4.12 test - + - com.opencsv - opencsv - 3.10 - test + com.opencsv + opencsv + 3.10 + test - + - - org.apache.commons - commons-io - 1.3.2 - + + org.apache.commons + commons-io + 1.3.2 + - - org.reflections - reflections - 0.9.11 - - - com.google.guava - guava - - - - - com.google.guava - guava + + org.reflections + reflections + 0.9.11 + + + com.google.guava + guava + + + + + com.google.guava + guava compile - 25.1-jre - - + 25.1-jre + + + org.apache.maven.plugins maven-javadoc-plugin @@ -121,6 +130,14 @@ + + maven-checkstyle-plugin + 2.17 + + checkstyle-suppressions.xml + checkstyle.suppressions.file + + org.apache.maven.plugins maven-site-plugin @@ -179,7 +196,7 @@ org.apache.maven.plugins maven-javadoc-plugin 2.10.3 - + diff --git a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java index 8f18cc3..126c858 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import java.util.ArrayList; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java index d0c6a7f..b960e77 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import java.util.ArrayList; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java b/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java index 1559e66..75802a3 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import java.util.ArrayList; @@ -448,4 +468,4 @@ class DataEntity(object): for constraint in schema.constraints: constraint.validate(v) return value -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java index 2178be3..b0540be 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -878,4 +898,4 @@ class EntityTemplate(object): caps = self.get_capabilities() if caps and name in caps.keys(): return caps[name] -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Group.java b/src/main/java/org/onap/sdc/toscaparser/api/Group.java index 15ddfb1..299ba01 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Group.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Group.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -147,4 +167,4 @@ class Group(EntityTemplate): ValidationIssueCollector.appendException( UnknownFieldError(what='Groups "%s"' % self.name, field=key)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java b/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java index 19ec182..5ef639b 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import com.google.common.base.Charsets; @@ -744,4 +764,4 @@ class ImportsLoader(object): % {'n_uri': repo_url, 'tpl': import_name}) log.error(msg) ValidationIssueCollector.appendException(ImportError(msg)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java index eaa650b..6a2e9f6 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import static org.onap.sdc.toscaparser.api.elements.EntityType.TOSCA_DEF; @@ -808,4 +828,4 @@ class NodeTemplate(EntityTemplate): if name not in self.SECTIONS and name not in self.SPECIAL_SECTIONS: ValidationIssueCollector.appendException( UnknownFieldError(what='Node template "%s"' % self.name, - field=name))*/ \ No newline at end of file + field=name))*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java index 563ea25..392a528 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -209,4 +229,4 @@ class Policy(EntityTemplate): ValidationIssueCollector.appendException( UnknownFieldError(what='Policy "%s"' % self.name, field=key)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Property.java b/src/main/java/org/onap/sdc/toscaparser/api/Property.java index 0ef9dd1..743262a 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Property.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Property.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import com.google.common.collect.Lists; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java index 79bf83b..1b5d58a 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import java.util.ArrayList; @@ -204,4 +224,4 @@ class RelationshipTemplate(EntityTemplate): return props def validate(self): - self._validate_properties(self.entity_tpl, self.type_definition)*/ \ No newline at end of file + self._validate_properties(self.entity_tpl, self.type_definition)*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Repository.java b/src/main/java/org/onap/sdc/toscaparser/api/Repository.java index 5bed453..2fff7f6 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Repository.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Repository.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -114,4 +134,4 @@ class Repository(object): ValidationIssueCollector.appendException( URLException(what=_('repsositories "%s" Invalid Url') % self.keyname)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java index 1b4e243..f980e0c 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import java.util.Map; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java index b6b9ea4..1425f6c 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import java.util.ArrayList; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java b/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java index a87ea6c..1dec80a 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -516,4 +536,4 @@ class SubstitutionMappings(object): UnknownOutputError( where=_('SubstitutionMappings with node_type ') + self.node_type, - output_name=output.name))*/ \ No newline at end of file + output_name=output.name))*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java index 0b1dfcd..4c4afd3 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -846,4 +866,4 @@ class TopologyTemplate(object): if topology_tpl and isinstance(topology_tpl, dict): submap_tpl = topology_tpl.get(SUBSTITUION_MAPPINGS) return SubstitutionMappings.get_node_type(submap_tpl) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java index fa371c3..1799f2e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import java.util.ArrayList; @@ -106,4 +126,4 @@ class ToscaGraph(object): if tpl.name == nodetpls.name: self._create_edge(node, tpl, rel) self._create_vertex(node) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java b/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java index cfe0138..91545c2 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -180,4 +200,4 @@ class Triggers(EntityTemplate): ValidationIssueCollector.appendException( UnknownFieldError(what='Triggers "%s"' % self.name, field=key)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java b/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java index 1117190..b7adfa4 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -75,4 +95,4 @@ class UnsupportedType(object): return True else: return False -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java index ffe936d..b96399b 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.common; public class JToscaException extends Exception { diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java index d31735f..9eb8f54 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.common; public class JToscaValidationIssue { diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java b/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java index efc834a..2769c1a 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.common; import java.util.IllegalFormatException; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java b/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java index 81e3e33..25bb854 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.common; import java.util.*; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java index e5cbf90..3dce5e6 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import java.util.LinkedHashMap; @@ -102,4 +122,4 @@ class ArtifactTypeDef(StatefulEntityType): '''Return the definition of an artifact field by name.''' if name in self.defs: return self.defs[name] -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java index 702094f..2070c50 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import java.util.LinkedHashMap; @@ -37,4 +57,4 @@ class AttributeDef(object): self.name = name self.value = value self.schema = schema -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java index e64f1b8..9f9610e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import java.util.ArrayList; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java index 17f1ad4..4b6451d 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import java.util.ArrayList; @@ -113,4 +133,4 @@ class DataType(StatefulEntityType): props_def = self.get_all_properties() if props_def and name in props_def.key(): return props_def[name].value -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java index e2ad766..62f51d2 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import java.io.IOException; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java index 2f8c1e0..cbcb6f6 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -241,4 +261,4 @@ class GroupType(StatefulEntityType): 'metadata "%s"' % (entry_schema_type.get('type'), entry_schema))) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java index 3edf3b7..ceb8fb9 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -264,4 +284,4 @@ class InterfacesDef(StatefulEntityType): for name in list(self.defs.keys()): ops.append(name) return ops -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java index 6b818f5..dd914d4 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import java.util.AbstractMap; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java index 5ba6622..918c629 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -531,4 +551,4 @@ class NodeType(StatefulEntityType): ValidationIssueCollector.appendException( UnknownFieldError(what='Nodetype"%s"' % self.ntype, field=key)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java index 0a36a35..e4d1dd6 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -288,4 +308,4 @@ class PolicyType(StatefulEntityType): 'metadata "%s"' % (entry_schema_type.get('type'), entry_schema))) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java index 8fb65df..65304dd 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import org.onap.sdc.toscaparser.api.DataEntity; @@ -157,4 +177,4 @@ class PortSpec(object): % {'value': properties, 'type': PortSpec.SHORTNAME} ValidationIssueCollector.appendException( ValueError(msg)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java index e37603d..6e1fe61 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import java.util.LinkedHashMap; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java index 0197d54..17f420d 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -98,4 +118,4 @@ class RelationshipType(StatefulEntityType): ValidationIssueCollector.appendException( UnknownFieldError(what='Relationshiptype "%s"' % self.type, field=key)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java index f7f2a8a..eeaa07c 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import java.util.HashMap; @@ -259,4 +279,4 @@ def get_scalarunit_value(type, value, unit=None): else: ValidationIssueCollector.appendException( TypeError(_('"%s" is not a valid scalar-unit type.') % type)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java index 6c05c43..59664ca 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; public class ScalarUnitFrequency extends ScalarUnit { diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java index c788c32..d29d8a2 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; public class ScalarUnitSize extends ScalarUnit { diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java index 274fbf0..45848af 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; public class ScalarUnitTime extends ScalarUnit { diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java index b9ce6c8..ef9159f 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -215,4 +235,4 @@ class StatefulEntityType(EntityType): attrs_def = self.get_attributes_def() if attrs_def and name in attrs_def.keys(): return attrs_def[name].value -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java index 3376c69..9321064 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java index fb183f8..82f6718 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements.constraints; import java.util.ArrayList; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java index c9a66d9..16e379a 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements.constraints; public class Equal extends Constraint { @@ -58,4 +78,4 @@ def _err_msg(self, value): dict(pname=self.property_name, pvalue=self.value_msg, cvalue=self.constraint_value_msg)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java index f9275a5..4d6b1cf 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -111,4 +131,4 @@ def _err_msg(self, value): cvalue=self.constraint_value_msg)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java index 1ffe3f1..c716821 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java index 829bc1f..32719fa 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java index db0eaac..1abdcfd 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -76,4 +96,4 @@ public class Length extends Constraint { dict(pname=self.property_name, pvalue=value, cvalue=self.constraint_value)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java index 7ea333d..9f1cd65 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -104,4 +124,4 @@ class LessOrEqual(Constraint): dict(pname=self.property_name, pvalue=self.value_msg, cvalue=self.constraint_value_msg)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java index 428f10c..b893fea 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -101,4 +121,4 @@ def _err_msg(self, value): dict(pname=self.property_name, pvalue=self.value_msg, cvalue=self.constraint_value_msg)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java index 7ac7df9..2cb20eb 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -88,4 +108,4 @@ class MaxLength(Constraint): dict(pname=self.property_name, pvalue=value, cvalue=self.constraint_value)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java index fa1fbe2..e7d0a9d 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -88,4 +108,4 @@ class MinLength(Constraint): dict(pname=self.property_name, pvalue=value, cvalue=self.constraint_value)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java index cf3b856..f1b374e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -94,4 +114,4 @@ class Pattern(Constraint): dict(pname=self.property_name, pvalue=value, cvalue=self.constraint_value)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java index c0ed6bc..06a9cd0 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -285,4 +305,4 @@ def __len__(self): if self._len is None: self._len = len(list(iter(self))) return self._len -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java index 23f25ed..d09caae 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements.constraints; import java.util.ArrayList; @@ -81,4 +101,4 @@ def _err_msg(self, value): cvalue=allowed)) -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java index 0ee201c..715123b 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements.enums; public enum ToscaElementNames { diff --git a/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java b/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java index 4a8309e..8e0915e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.extensions; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -189,4 +209,4 @@ class ExtTools(object): return versiondata.get('defs_file') else: return None -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java index 0b09c73..d47fd57 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.functions; import org.onap.sdc.toscaparser.api.TopologyTemplate; @@ -74,4 +94,4 @@ def validate(self): def result(self): return self -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java index cb40c4c..2b4759f 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.functions; import java.util.*; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java index 8648e4e..aa85eb2 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.functions; import org.onap.sdc.toscaparser.api.*; @@ -521,4 +541,4 @@ def node_template_name(self): @property def attribute_name(self): return self.args[1] -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java index 342e18a..2acc79a 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.functions; import org.onap.sdc.toscaparser.api.EntityTemplate; @@ -226,4 +246,4 @@ def _find_node_template(self, node_template_name): def result(self): return self -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java index fca5f7f..2da57ef 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.functions; import org.onap.sdc.toscaparser.api.*; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java index 771345b..e8e160e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.functions; import org.onap.sdc.toscaparser.api.TopologyTemplate; @@ -108,4 +128,4 @@ def validate(self): def result(self): return self -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java index 74b738f..397c637 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.parameters; import java.util.ArrayList; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java index 5a6eb73..106fe94 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.parameters; import java.util.ArrayList; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java index 093c6cf..df122f0 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.parameters; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java b/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java index 98625e0..92d5194 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.prereq; import org.onap.sdc.toscaparser.api.ImportsLoader; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java index 55e9ba1..a15afe4 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.utils; import java.util.ArrayList; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java index f23e1c6..d87103b 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.utils; import java.util.ArrayList; @@ -52,4 +72,4 @@ public class DumpUtils { System.out.println("Exception!! " + e.getMessage()); } } -} \ No newline at end of file +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java index 3abd3b1..3515ed0 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.utils; @@ -29,4 +49,4 @@ public enum JToscaErrorCodes { } return null; } -} \ No newline at end of file +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java index caba044..838fb07 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.utils; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -179,4 +199,4 @@ class TOSCAVersionProperty(object): def get_version(self): return self.version -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java index 8a04c0d..2ea8d08 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.utils; import org.onap.sdc.toscaparser.api.common.ValidationIssueCollector; diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java index 3eb156d..72e5122 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.utils; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -120,4 +140,4 @@ class UrlUtils(object): Otherwise, returns false. """ return urllib2.urlopen(url).getcode() == 200 -*/ \ No newline at end of file +*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java index 9623258..a9786ae 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.utils; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; @@ -422,4 +442,4 @@ def validate_timestamp(value): {'val': value, 'msg': original_err_msg})) return -*/ \ No newline at end of file +*/ diff --git a/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java b/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java index a5afa6b..3902219 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import com.opencsv.CSVWriter; diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java index 37c6d18..f8295d7 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api; import static org.junit.Assert.assertEquals; diff --git a/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java b/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java index 59c8445..eaf182e 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import org.junit.BeforeClass; diff --git a/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java b/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java index 2a88c2b..271eb59 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java @@ -1,3 +1,23 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + package org.onap.sdc.toscaparser.api.elements; import org.junit.After; @@ -52,4 +72,4 @@ public class EntityTypeTest { EntityType.TOSCA_DEF = (LinkedHashMap) origMap; } -} \ No newline at end of file +} -- cgit 1.2.3-korg From cad897335db0c7912782482fa71932cc2b4ce421 Mon Sep 17 00:00:00 2001 From: Tomasz Golabek Date: Fri, 12 Jul 2019 12:32:12 +0200 Subject: Maven staging plugin removed Required by global-jjb Change-Id: I7788b72d0aa3bc0a0a7d92caf3e5c23d59c668e1 Issue-ID: SDC-2444 Signed-off-by: Tomasz Golabek --- pom.xml | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/pom.xml b/pom.xml index 6d3d412..c619031 100644 --- a/pom.xml +++ b/pom.xml @@ -34,7 +34,6 @@ /content/sites/site/org/onap/sdc/jtosca/${project.version} snapshots releases - 176c31dfe190a ${project.build.sourceEncoding} true @@ -169,19 +168,6 @@ - - - org.sonatype.plugins - nexus-staging-maven-plugin - 1.6.7 - true - - ${nexus.proxy} - ${staging.profile.id} - onap-staging - - - org.apache.maven.plugins maven-compiler-plugin -- cgit 1.2.3-korg From d0d5690f13b9c794044bfe6bd7ac87557dd3dcea Mon Sep 17 00:00:00 2001 From: "michal.banka" Date: Wed, 10 Jul 2019 16:14:27 +0200 Subject: Fix checkstyle violations in sdc/jtosca Number of checkstyle violations has decreased from about 8200 to 450. Change-Id: I31f763d7f51fa66958aab68d094280189c612417 Issue-ID: SDC-2434 Signed-off-by: michal.banka --- .../sdc/toscaparser/api/CapabilityAssignment.java | 182 +-- .../sdc/toscaparser/api/CapabilityAssignments.java | 13 +- .../org/onap/sdc/toscaparser/api/DataEntity.java | 368 +++-- .../onap/sdc/toscaparser/api/EntityTemplate.java | 862 ++++++----- .../java/org/onap/sdc/toscaparser/api/Group.java | 177 +-- .../onap/sdc/toscaparser/api/ImportsLoader.java | 709 +++++---- .../org/onap/sdc/toscaparser/api/NodeTemplate.java | 943 ++++++------ .../java/org/onap/sdc/toscaparser/api/Policy.java | 250 ++-- .../org/onap/sdc/toscaparser/api/Property.java | 369 +++-- .../sdc/toscaparser/api/RelationshipTemplate.java | 228 +-- .../org/onap/sdc/toscaparser/api/Repository.java | 124 +- .../sdc/toscaparser/api/RequirementAssignment.java | 12 +- .../toscaparser/api/RequirementAssignments.java | 6 +- .../sdc/toscaparser/api/SubstitutionMappings.java | 440 +++--- .../onap/sdc/toscaparser/api/TopologyTemplate.java | 900 ++++++------ .../org/onap/sdc/toscaparser/api/ToscaGraph.java | 114 +- .../onap/sdc/toscaparser/api/ToscaTemplate.java | 1552 ++++++++++---------- .../org/onap/sdc/toscaparser/api/Triggers.java | 232 ++- .../onap/sdc/toscaparser/api/UnsupportedType.java | 29 +- .../toscaparser/api/common/JToscaException.java | 40 +- .../api/common/JToscaValidationIssue.java | 52 +- .../sdc/toscaparser/api/common/TOSCAException.java | 67 +- .../api/common/ValidationIssueCollector.java | 16 +- .../toscaparser/api/elements/ArtifactTypeDef.java | 102 +- .../sdc/toscaparser/api/elements/AttributeDef.java | 24 +- .../api/elements/CapabilityTypeDef.java | 228 ++- .../sdc/toscaparser/api/elements/DataType.java | 88 +- .../sdc/toscaparser/api/elements/EntityType.java | 354 +++-- .../sdc/toscaparser/api/elements/GroupType.java | 263 ++-- .../toscaparser/api/elements/InterfacesDef.java | 322 ++-- .../sdc/toscaparser/api/elements/Metadata.java | 59 +- .../sdc/toscaparser/api/elements/NodeType.java | 513 ++++--- .../sdc/toscaparser/api/elements/PolicyType.java | 286 ++-- .../sdc/toscaparser/api/elements/PortSpec.java | 55 +- .../sdc/toscaparser/api/elements/PropertyDef.java | 206 ++- .../toscaparser/api/elements/RelationshipType.java | 92 +- .../sdc/toscaparser/api/elements/ScalarUnit.java | 281 ++-- .../api/elements/ScalarUnitFrequency.java | 25 +- .../toscaparser/api/elements/ScalarUnitSize.java | 36 +- .../toscaparser/api/elements/ScalarUnitTime.java | 26 +- .../api/elements/StatefulEntityType.java | 216 ++- .../toscaparser/api/elements/TypeValidation.java | 126 +- .../api/elements/constraints/Constraint.java | 336 +++-- .../api/elements/constraints/Equal.java | 56 +- .../api/elements/constraints/GreaterOrEqual.java | 126 +- .../api/elements/constraints/GreaterThan.java | 109 +- .../api/elements/constraints/InRange.java | 139 +- .../api/elements/constraints/Length.java | 71 +- .../api/elements/constraints/LessOrEqual.java | 117 +- .../api/elements/constraints/LessThan.java | 107 +- .../api/elements/constraints/MaxLength.java | 93 +- .../api/elements/constraints/MinLength.java | 108 +- .../api/elements/constraints/Pattern.java | 105 +- .../api/elements/constraints/Schema.java | 249 ++-- .../api/elements/constraints/ValidValues.java | 65 +- .../toscaparser/api/elements/enums/FileSize.java | 32 + .../api/elements/enums/ToscaElementNames.java | 34 +- .../sdc/toscaparser/api/extensions/ExtTools.java | 188 ++- .../onap/sdc/toscaparser/api/functions/Concat.java | 40 +- .../sdc/toscaparser/api/functions/Function.java | 318 ++-- .../toscaparser/api/functions/GetAttribute.java | 534 +++---- .../sdc/toscaparser/api/functions/GetInput.java | 222 ++- .../api/functions/GetOperationOutput.java | 246 ++-- .../sdc/toscaparser/api/functions/GetProperty.java | 663 +++++---- .../onap/sdc/toscaparser/api/functions/Token.java | 59 +- .../sdc/toscaparser/api/parameters/Annotation.java | 140 +- .../onap/sdc/toscaparser/api/parameters/Input.java | 315 ++-- .../sdc/toscaparser/api/parameters/Output.java | 135 +- .../org/onap/sdc/toscaparser/api/prereq/CSAR.java | 765 +++++----- .../onap/sdc/toscaparser/api/utils/CopyUtils.java | 41 +- .../onap/sdc/toscaparser/api/utils/DumpUtils.java | 91 +- .../toscaparser/api/utils/JToscaErrorCodes.java | 12 +- .../api/utils/TOSCAVersionProperty.java | 157 +- .../toscaparser/api/utils/ThreadLocalsHolder.java | 17 +- .../onap/sdc/toscaparser/api/utils/UrlUtils.java | 88 +- .../sdc/toscaparser/api/utils/ValidateUtils.java | 502 ++++--- .../sdc/toscaparser/api/GetValidationIssues.java | 13 +- .../onap/sdc/toscaparser/api/JToscaImportTest.java | 478 +++--- .../sdc/toscaparser/api/JToscaMetadataParse.java | 26 +- .../api/elements/CalculatePropertyByPathTest.java | 4 +- .../toscaparser/api/elements/EntityTypeTest.java | 72 +- .../toscaparser/api/functions/GetInputTest.java | 44 +- 82 files changed, 8921 insertions(+), 8983 deletions(-) create mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java diff --git a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java index 126c858..bb7b47d 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,97 +28,103 @@ import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef; import org.onap.sdc.toscaparser.api.elements.PropertyDef; public class CapabilityAssignment { - - private String name; - private LinkedHashMap _properties; - private CapabilityTypeDef _definition; - private LinkedHashMap _customDef; - - public CapabilityAssignment(String cname, - LinkedHashMap cproperties, - CapabilityTypeDef cdefinition, LinkedHashMap customDef) { - name = cname; - _properties = cproperties; - _definition = cdefinition; - _customDef = customDef; - } - - /** - * Get the properties list for capability - * @return list of property objects for capability - */ - public ArrayList getPropertiesObjects() { - // Return a list of property objects - ArrayList properties = new ArrayList(); - LinkedHashMap props = _properties; - if(props != null) { - for(Map.Entry me: props.entrySet()) { - String pname = me.getKey(); - Object pvalue = me.getValue(); - - LinkedHashMap propsDef = _definition.getPropertiesDef(); - if(propsDef != null) { - PropertyDef pd = (PropertyDef)propsDef.get(pname); - if(pd != null) { - properties.add(new Property(pname,pvalue,pd.getSchema(), _customDef)); - } - } - } - } - return properties; - } - - /** - * Get the map of properties - * @return map of all properties contains dictionary of property name and property object - */ - public LinkedHashMap getProperties() { + + private String name; + private LinkedHashMap _properties; + private CapabilityTypeDef _definition; + private LinkedHashMap _customDef; + + public CapabilityAssignment(String cname, + LinkedHashMap cproperties, + CapabilityTypeDef cdefinition, LinkedHashMap customDef) { + name = cname; + _properties = cproperties; + _definition = cdefinition; + _customDef = customDef; + } + + /** + * Get the properties list for capability + * + * @return list of property objects for capability + */ + public ArrayList getPropertiesObjects() { + // Return a list of property objects + ArrayList properties = new ArrayList(); + LinkedHashMap props = _properties; + if (props != null) { + for (Map.Entry me : props.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + + LinkedHashMap propsDef = _definition.getPropertiesDef(); + if (propsDef != null) { + PropertyDef pd = (PropertyDef) propsDef.get(pname); + if (pd != null) { + properties.add(new Property(pname, pvalue, pd.getSchema(), _customDef)); + } + } + } + } + return properties; + } + + /** + * Get the map of properties + * + * @return map of all properties contains dictionary of property name and property object + */ + public LinkedHashMap getProperties() { // Return a dictionary of property name-object pairs - LinkedHashMap npps = new LinkedHashMap<>(); - for(Property p: getPropertiesObjects()) { - npps.put(p.getName(),p); - } - return npps; - } - - /** - * Get the property value by name - * @param pname - the property name for capability - * @return the property value for this name - */ - public Object getPropertyValue(String pname) { + LinkedHashMap npps = new LinkedHashMap<>(); + for (Property p : getPropertiesObjects()) { + npps.put(p.getName(), p); + } + return npps; + } + + /** + * Get the property value by name + * + * @param pname - the property name for capability + * @return the property value for this name + */ + public Object getPropertyValue(String pname) { // Return the value of a given property name - LinkedHashMap props = getProperties(); - if(props != null && props.get(pname) != null) { + LinkedHashMap props = getProperties(); + if (props != null && props.get(pname) != null) { return props.get(name).getValue(); } return null; - } - - /** - * Get the name for capability - * @return the name for capability - */ - public String getName() { - return name; - } - - /** - * Get the definition for capability - * @return CapabilityTypeDef - contain definition for capability - */ - public CapabilityTypeDef getDefinition() { - return _definition; - } - - /** - * Set the property for capability - * @param pname - the property name for capability to set - * @param pvalue - the property valiue for capability to set - */ - public void setProperty(String pname,Object pvalue) { - _properties.put(pname,pvalue); - } + } + + /** + * Get the name for capability + * + * @return the name for capability + */ + public String getName() { + return name; + } + + /** + * Get the definition for capability + * + * @return CapabilityTypeDef - contain definition for capability + */ + public CapabilityTypeDef getDefinition() { + return _definition; + } + + /** + * Set the property for capability + * + * @param pname - the property name for capability to set + * @param pvalue - the property valiue for capability to set + */ + public void setProperty(String pname, Object pvalue) { + _properties.put(pname, pvalue); + } @Override public String toString() { diff --git a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java index b960e77..28ada96 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,15 +28,16 @@ import java.util.stream.Collectors; public class CapabilityAssignments { - private Map capabilityAssignments; + private Map capabilityAssignments; - public CapabilityAssignments(Map capabilityAssignments) { + public CapabilityAssignments(Map capabilityAssignments) { this.capabilityAssignments = capabilityAssignments != null ? new HashMap<>(capabilityAssignments) : new HashMap<>(); } /** * Get all capability assignments for node template.
* This object can be either the original one, holding all capability assignments for this node template,or a filtered one, holding a filtered subset.
+ * * @return list of capability assignments for the node template.
* If there are no capability assignments, empty list is returned. */ @@ -46,12 +47,13 @@ public class CapabilityAssignments { /** * Filter capability assignments by capability tosca type. + * * @param type - The tosca type of capability assignments. * @return CapabilityAssignments object, containing capability assignments of this type.
* If no such found, filtering will result in an empty collection. */ public CapabilityAssignments getCapabilitiesByType(String type) { - Map capabilityAssignmentsMap = capabilityAssignments.entrySet().stream() + Map capabilityAssignmentsMap = capabilityAssignments.entrySet().stream() .filter(cap -> cap.getValue().getDefinition().getType().equals(type)).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); return new CapabilityAssignments(capabilityAssignmentsMap); @@ -59,6 +61,7 @@ public class CapabilityAssignments { /** * Get capability assignment by capability name. + * * @param name - The name of capability assignment * @return capability assignment with this name, or null if no such capability assignment was found. */ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java b/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java index 75802a3..e95fe72 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,12 +20,13 @@ package org.onap.sdc.toscaparser.api; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; - import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.elements.*; +import org.onap.sdc.toscaparser.api.elements.DataType; +import org.onap.sdc.toscaparser.api.elements.PortSpec; +import org.onap.sdc.toscaparser.api.elements.PropertyDef; +import org.onap.sdc.toscaparser.api.elements.ScalarUnitFrequency; +import org.onap.sdc.toscaparser.api.elements.ScalarUnitSize; +import org.onap.sdc.toscaparser.api.elements.ScalarUnitTime; import org.onap.sdc.toscaparser.api.elements.constraints.Constraint; import org.onap.sdc.toscaparser.api.elements.constraints.Schema; import org.onap.sdc.toscaparser.api.functions.Function; @@ -33,132 +34,134 @@ import org.onap.sdc.toscaparser.api.utils.TOSCAVersionProperty; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.onap.sdc.toscaparser.api.utils.ValidateUtils; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; + public class DataEntity { // A complex data value entity - - private LinkedHashMap customDef; - private DataType dataType; - private LinkedHashMap schema; - private Object value; - private String propertyName; - - public DataEntity(String _dataTypeName,Object _valueDict, - LinkedHashMap _customDef,String _propName) { - + + private LinkedHashMap customDef; + private DataType dataType; + private LinkedHashMap schema; + private Object value; + private String propertyName; + + public DataEntity(String _dataTypeName, Object _valueDict, + LinkedHashMap _customDef, String _propName) { + customDef = _customDef; - dataType = new DataType(_dataTypeName,_customDef); + dataType = new DataType(_dataTypeName, _customDef); schema = dataType.getAllProperties(); value = _valueDict; propertyName = _propName; - } - - @SuppressWarnings("unchecked") - public Object validate() { - // Validate the value by the definition of the datatype + } + + @SuppressWarnings("unchecked") + public Object validate() { + // Validate the value by the definition of the datatype // A datatype can not have both 'type' and 'properties' definitions. // If the datatype has 'type' definition - if(dataType.getValueType() != null) { - value = DataEntity.validateDatatype(dataType.getValueType(),value,null,customDef,null); - Schema schemaCls = new Schema(propertyName,dataType.getDefs()); - for(Constraint constraint: schemaCls.getConstraints()) { + if (dataType.getValueType() != null) { + value = DataEntity.validateDatatype(dataType.getValueType(), value, null, customDef, null); + Schema schemaCls = new Schema(propertyName, dataType.getDefs()); + for (Constraint constraint : schemaCls.getConstraints()) { constraint.validate(value); } } // If the datatype has 'properties' definition else { - if(!(value instanceof LinkedHashMap)) { - //ERROR under investigation - String checkedVal = value != null ? value.toString() : null; + if (!(value instanceof LinkedHashMap)) { + //ERROR under investigation + String checkedVal = value != null ? value.toString() : null; - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE001", String.format( - "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"", - checkedVal, dataType.getType()))); - - if (value instanceof List && ((List) value).size() > 0) { - value = ((List) value).get(0); - } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE001", String.format( + "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"", + checkedVal, dataType.getType()))); - if (!(value instanceof LinkedHashMap)) { - return value; - } - } + if (value instanceof List && ((List) value).size() > 0) { + value = ((List) value).get(0); + } + if (!(value instanceof LinkedHashMap)) { + return value; + } + } - LinkedHashMap valueDict = (LinkedHashMap)value; + LinkedHashMap valueDict = (LinkedHashMap) value; ArrayList allowedProps = new ArrayList<>(); ArrayList requiredProps = new ArrayList<>(); - LinkedHashMap defaultProps = new LinkedHashMap<>(); - if(schema != null) { - allowedProps.addAll(schema.keySet()); - for(String name: schema.keySet()) { - PropertyDef propDef = schema.get(name); - if(propDef.isRequired()) { - requiredProps.add(name); - } - if(propDef.getDefault() != null) { - defaultProps.put(name,propDef.getDefault()); - } - } + LinkedHashMap defaultProps = new LinkedHashMap<>(); + if (schema != null) { + allowedProps.addAll(schema.keySet()); + for (String name : schema.keySet()) { + PropertyDef propDef = schema.get(name); + if (propDef.isRequired()) { + requiredProps.add(name); + } + if (propDef.getDefault() != null) { + defaultProps.put(name, propDef.getDefault()); + } + } } - + // check allowed field - for(String valueKey: valueDict.keySet()) { - //1710 devlop JSON validation - if(!("json").equals(dataType.getType()) && !allowedProps.contains(valueKey)) { + for (String valueKey : valueDict.keySet()) { + //1710 devlop JSON validation + if (!("json").equals(dataType.getType()) && !allowedProps.contains(valueKey)) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE100", String.format( "UnknownFieldError: Data value of type \"%s\" contains unknown field \"%s\"", - dataType.getType(),valueKey))); - } + dataType.getType(), valueKey))); + } } // check default field - for(String defKey: defaultProps.keySet()) { - Object defValue = defaultProps.get(defKey); - if(valueDict.get(defKey) == null) { - valueDict.put(defKey, defValue); - } - + for (String defKey : defaultProps.keySet()) { + Object defValue = defaultProps.get(defKey); + if (valueDict.get(defKey) == null) { + valueDict.put(defKey, defValue); + } + } - + // check missing field ArrayList missingProp = new ArrayList<>(); - for(String reqKey: requiredProps) { - if(!valueDict.keySet().contains(reqKey)) { + for (String reqKey : requiredProps) { + if (!valueDict.keySet().contains(reqKey)) { missingProp.add(reqKey); } } - if(missingProp.size() > 0) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003",String.format( - "MissingRequiredFieldError: Data value of type \"%s\" is missing required field(s) \"%s\"", - dataType.getType(),missingProp.toString()))); + if (missingProp.size() > 0) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", String.format( + "MissingRequiredFieldError: Data value of type \"%s\" is missing required field(s) \"%s\"", + dataType.getType(), missingProp.toString()))); } - + // check every field - for(String vname: valueDict.keySet()) { - Object vvalue = valueDict.get(vname); - LinkedHashMap schemaName = _findSchema(vname); - if(schemaName == null) { - continue; - } - Schema propSchema = new Schema(vname,schemaName); + for (String vname : valueDict.keySet()) { + Object vvalue = valueDict.get(vname); + LinkedHashMap schemaName = _findSchema(vname); + if (schemaName == null) { + continue; + } + Schema propSchema = new Schema(vname, schemaName); // check if field value meets type defined - DataEntity.validateDatatype(propSchema.getType(), - vvalue, - propSchema.getEntrySchema(), - customDef, - null); - + DataEntity.validateDatatype(propSchema.getType(), + vvalue, + propSchema.getEntrySchema(), + customDef, + null); + // check if field value meets constraints defined - if(propSchema.getConstraints() != null) { - for(Constraint constraint: propSchema.getConstraints()) { - if(vvalue instanceof ArrayList) { - for(Object val: (ArrayList)vvalue) { + if (propSchema.getConstraints() != null) { + for (Constraint constraint : propSchema.getConstraints()) { + if (vvalue instanceof ArrayList) { + for (Object val : (ArrayList) vvalue) { constraint.validate(val); } - } - else { + } else { constraint.validate(vvalue); } } @@ -166,134 +169,117 @@ public class DataEntity { } } return value; - } - - private LinkedHashMap _findSchema(String name) { - if(schema != null && schema.get(name) != null) { - return schema.get(name).getSchema(); - } - return null; - } - - public static Object validateDatatype(String type, - Object value, - LinkedHashMap entrySchema, - LinkedHashMap customDef, - String propName) { - // Validate value with given type + } + + private LinkedHashMap _findSchema(String name) { + if (schema != null && schema.get(name) != null) { + return schema.get(name).getSchema(); + } + return null; + } + + public static Object validateDatatype(String type, + Object value, + LinkedHashMap entrySchema, + LinkedHashMap customDef, + String propName) { + // Validate value with given type // If type is list or map, validate its entry by entry_schema(if defined) // If type is a user-defined complex datatype, custom_def is required. - if(Function.isFunction(value)) { - return value; - } - else if (type == null) { - //NOT ANALYZED - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE002", String.format( - "MissingType: Type is missing for value \"%s\"", - value.toString()))); - return value; - } - else if(type.equals(Schema.STRING)) { + if (Function.isFunction(value)) { + return value; + } else if (type == null) { + //NOT ANALYZED + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE002", String.format( + "MissingType: Type is missing for value \"%s\"", + value.toString()))); + return value; + } else if (type.equals(Schema.STRING)) { return ValidateUtils.validateString(value); - } - else if(type.equals(Schema.INTEGER)) { + } else if (type.equals(Schema.INTEGER)) { return ValidateUtils.validateInteger(value); - } - else if(type.equals(Schema.FLOAT)) { + } else if (type.equals(Schema.FLOAT)) { return ValidateUtils.validateFloat(value); - } - else if(type.equals(Schema.NUMBER)) { + } else if (type.equals(Schema.NUMBER)) { return ValidateUtils.validateNumeric(value); - } - else if(type.equals(Schema.BOOLEAN)) { + } else if (type.equals(Schema.BOOLEAN)) { return ValidateUtils.validateBoolean(value); - } - else if(type.equals(Schema.RANGE)) { + } else if (type.equals(Schema.RANGE)) { return ValidateUtils.validateRange(value); - } - else if(type.equals(Schema.TIMESTAMP)) { + } else if (type.equals(Schema.TIMESTAMP)) { ValidateUtils.validateTimestamp(value); return value; - } - else if(type.equals(Schema.LIST)) { + } else if (type.equals(Schema.LIST)) { ValidateUtils.validateList(value); - if(entrySchema != null) { - DataEntity.validateEntry(value,entrySchema,customDef); + if (entrySchema != null) { + DataEntity.validateEntry(value, entrySchema, customDef); } return value; - } - else if(type.equals(Schema.SCALAR_UNIT_SIZE)) { + } else if (type.equals(Schema.SCALAR_UNIT_SIZE)) { return (new ScalarUnitSize(value)).validateScalarUnit(); - } - else if(type.equals(Schema.SCALAR_UNIT_FREQUENCY)) { + } else if (type.equals(Schema.SCALAR_UNIT_FREQUENCY)) { return (new ScalarUnitFrequency(value)).validateScalarUnit(); - } - else if(type.equals(Schema.SCALAR_UNIT_TIME)) { + } else if (type.equals(Schema.SCALAR_UNIT_TIME)) { return (new ScalarUnitTime(value)).validateScalarUnit(); - } - else if(type.equals(Schema.VERSION)) { - return (new TOSCAVersionProperty(value)).getVersion(); - } - else if(type.equals(Schema.MAP)) { + } else if (type.equals(Schema.VERSION)) { + return (new TOSCAVersionProperty(value.toString())).getVersion(); + } else if (type.equals(Schema.MAP)) { ValidateUtils.validateMap(value); - if(entrySchema != null) { - DataEntity.validateEntry(value,entrySchema,customDef); + if (entrySchema != null) { + DataEntity.validateEntry(value, entrySchema, customDef); } return value; - } - else if(type.equals(Schema.PORTSPEC)) { + } else if (type.equals(Schema.PORTSPEC)) { // tODO(TBD) bug 1567063, validate source & target as PortDef type // as complex types not just as integers - PortSpec.validateAdditionalReq(value,propName,customDef); - } - else { - DataEntity data = new DataEntity(type,value,customDef,null); + PortSpec.validateAdditionalReq(value, propName, customDef); + } else { + DataEntity data = new DataEntity(type, value, customDef, null); return data.validate(); } - - return value; - } - - @SuppressWarnings("unchecked") - public static Object validateEntry(Object value, - LinkedHashMap entrySchema, - LinkedHashMap customDef) { - + + return value; + } + + @SuppressWarnings("unchecked") + public static Object validateEntry(Object value, + LinkedHashMap entrySchema, + LinkedHashMap customDef) { + // Validate entries for map and list - Schema schema = new Schema(null,entrySchema); + Schema schema = new Schema(null, entrySchema); Object valueob = value; ArrayList valueList = null; - if(valueob instanceof LinkedHashMap) { - valueList = new ArrayList(((LinkedHashMap)valueob).values()); - } - else if(valueob instanceof ArrayList) { - valueList = (ArrayList)valueob; + if (valueob instanceof LinkedHashMap) { + valueList = new ArrayList(((LinkedHashMap) valueob).values()); + } else if (valueob instanceof ArrayList) { + valueList = (ArrayList) valueob; } - if(valueList != null) { - for(Object v: valueList) { - DataEntity.validateDatatype(schema.getType(),v,schema.getEntrySchema(),customDef,null); - if(schema.getConstraints() != null) { - for(Constraint constraint: schema.getConstraints()) { - constraint.validate(v); - } - } - } + if (valueList != null) { + for (Object v : valueList) { + DataEntity.validateDatatype(schema.getType(), v, schema.getEntrySchema(), customDef, null); + if (schema.getConstraints() != null) { + for (Constraint constraint : schema.getConstraints()) { + constraint.validate(v); + } + } + } } - return value; - } - - @Override - public String toString() { - return "DataEntity{" + - "customDef=" + customDef + - ", dataType=" + dataType + - ", schema=" + schema + - ", value=" + value + - ", propertyName='" + propertyName + '\'' + - '}'; - } + return value; + } + + @Override + public String toString() { + return "DataEntity{" + + "customDef=" + customDef + + ", dataType=" + dataType + + ", schema=" + schema + + ", value=" + value + + ", propertyName='" + propertyName + '\'' + + '}'; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java index b0540be..93bfe2b 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -33,111 +33,107 @@ import java.util.Map; public abstract class EntityTemplate { // Base class for TOSCA templates - protected static final String DERIVED_FROM = "derived_from"; - protected static final String PROPERTIES = "properties"; - protected static final String REQUIREMENTS = "requirements"; - protected static final String INTERFACES = "interfaces"; - protected static final String CAPABILITIES = "capabilities"; - protected static final String TYPE = "type"; - protected static final String DESCRIPTION = "description"; - protected static final String DIRECTIVES = "directives"; - protected static final String ATTRIBUTES = "attributes"; - protected static final String ARTIFACTS = "artifacts"; - protected static final String NODE_FILTER = "node_filter"; - protected static final String COPY = "copy"; - - protected static final String SECTIONS[] = { - DERIVED_FROM, PROPERTIES, REQUIREMENTS,INTERFACES, - CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES, - ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY}; - - private static final String NODE = "node"; - private static final String CAPABILITY = "capability"; - private static final String RELATIONSHIP = "relationship"; - private static final String OCCURRENCES = "occurrences"; - - protected static final String REQUIREMENTS_SECTION[] = { - NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER}; - - //# Special key names - private static final String METADATA = "metadata"; - protected static final String SPECIAL_SECTIONS[] = {METADATA}; - - protected String name; - protected LinkedHashMap entityTpl; - protected LinkedHashMap customDef; - protected StatefulEntityType typeDefinition; - private ArrayList _properties; - private ArrayList _interfaces; - private ArrayList _requirements; - private ArrayList _capabilities; - - @Nullable - private NodeTemplate _parentNodeTemplate; - - // dummy constructor for subclasses that don't want super - public EntityTemplate() { - return; - } + protected static final String DERIVED_FROM = "derived_from"; + protected static final String PROPERTIES = "properties"; + protected static final String REQUIREMENTS = "requirements"; + protected static final String INTERFACES = "interfaces"; + protected static final String CAPABILITIES = "capabilities"; + protected static final String TYPE = "type"; + protected static final String DESCRIPTION = "description"; + protected static final String DIRECTIVES = "directives"; + protected static final String ATTRIBUTES = "attributes"; + protected static final String ARTIFACTS = "artifacts"; + protected static final String NODE_FILTER = "node_filter"; + protected static final String COPY = "copy"; + + protected static final String SECTIONS[] = { + DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES, + CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES, + ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY}; + + private static final String NODE = "node"; + private static final String CAPABILITY = "capability"; + private static final String RELATIONSHIP = "relationship"; + private static final String OCCURRENCES = "occurrences"; + + protected static final String REQUIREMENTS_SECTION[] = { + NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER}; + + //# Special key names + private static final String METADATA = "metadata"; + protected static final String SPECIAL_SECTIONS[] = {METADATA}; + + protected String name; + protected LinkedHashMap entityTpl; + protected LinkedHashMap customDef; + protected StatefulEntityType typeDefinition; + private ArrayList _properties; + private ArrayList _interfaces; + private ArrayList _requirements; + private ArrayList _capabilities; + + @Nullable + private NodeTemplate _parentNodeTemplate; + + // dummy constructor for subclasses that don't want super + public EntityTemplate() { + return; + } public EntityTemplate(String _name, - LinkedHashMap _template, + LinkedHashMap _template, String _entityName, - LinkedHashMap _customDef) { - this(_name, _template, _entityName, _customDef, null); + LinkedHashMap _customDef) { + this(_name, _template, _entityName, _customDef, null); } @SuppressWarnings("unchecked") - public EntityTemplate(String _name, - LinkedHashMap _template, - String _entityName, - LinkedHashMap _customDef, - NodeTemplate parentNodeTemplate) { + public EntityTemplate(String _name, + LinkedHashMap _template, + String _entityName, + LinkedHashMap _customDef, + NodeTemplate parentNodeTemplate) { name = _name; entityTpl = _template; customDef = _customDef; _validateField(entityTpl); - String type = (String)entityTpl.get("type"); - UnsupportedType.validateType(type); - if(_entityName.equals("node_type")) { - if(type != null) { - typeDefinition = new NodeType(type, customDef); - } - else { - typeDefinition = null; - } + String type = (String) entityTpl.get("type"); + UnsupportedType.validateType(type); + if (_entityName.equals("node_type")) { + if (type != null) { + typeDefinition = new NodeType(type, customDef); + } else { + typeDefinition = null; + } } - if(_entityName.equals("relationship_type")) { - Object relationship = _template.get("relationship"); + if (_entityName.equals("relationship_type")) { + Object relationship = _template.get("relationship"); type = null; - if(relationship != null && relationship instanceof LinkedHashMap) { - type = (String)((LinkedHashMap)relationship).get("type"); - } - else if(relationship instanceof String) { - type = (String)entityTpl.get("relationship"); - } - else { - type = (String)entityTpl.get("type"); + if (relationship != null && relationship instanceof LinkedHashMap) { + type = (String) ((LinkedHashMap) relationship).get("type"); + } else if (relationship instanceof String) { + type = (String) entityTpl.get("relationship"); + } else { + type = (String) entityTpl.get("type"); } UnsupportedType.validateType(type); - typeDefinition = new RelationshipType(type,null, customDef); + typeDefinition = new RelationshipType(type, null, customDef); } - if(_entityName.equals("policy_type")) { - if(type == null) { + if (_entityName.equals("policy_type")) { + if (type == null) { //msg = (_('Policy definition of "%(pname)s" must have' // ' a "type" ''attribute.') % dict(pname=name)) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE140", String.format( - "ValidationError: Policy definition of \"%s\" must have a \"type\" attribute",name))); + "ValidationError: Policy definition of \"%s\" must have a \"type\" attribute", name))); } typeDefinition = new PolicyType(type, customDef); } - if(_entityName.equals("group_type")) { - if(type != null) { - typeDefinition = new GroupType(type, customDef); - } - else { + if (_entityName.equals("group_type")) { + if (type != null) { + typeDefinition = new GroupType(type, customDef); + } else { typeDefinition = null; - } + } } _properties = null; _interfaces = null; @@ -146,451 +142,439 @@ public abstract class EntityTemplate { _parentNodeTemplate = parentNodeTemplate; } - public NodeTemplate getParentNodeTemplate() { - return _parentNodeTemplate; - } + public NodeTemplate getParentNodeTemplate() { + return _parentNodeTemplate; + } public String getType() { - if(typeDefinition != null) { - String clType = typeDefinition.getClass().getSimpleName(); - if(clType.equals("NodeType")) { - return (String)((NodeType)typeDefinition).getType(); - } - else if(clType.equals("PolicyType")) { - return (String)((PolicyType)typeDefinition).getType(); - } - else if(clType.equals("GroupType")) { - return (String)((GroupType)typeDefinition).getType(); - } - else if(clType.equals("RelationshipType")) { - return (String)((RelationshipType)typeDefinition).getType(); - } - } - return null; + if (typeDefinition != null) { + String clType = typeDefinition.getClass().getSimpleName(); + if (clType.equals("NodeType")) { + return (String) ((NodeType) typeDefinition).getType(); + } else if (clType.equals("PolicyType")) { + return (String) ((PolicyType) typeDefinition).getType(); + } else if (clType.equals("GroupType")) { + return (String) ((GroupType) typeDefinition).getType(); + } else if (clType.equals("RelationshipType")) { + return (String) ((RelationshipType) typeDefinition).getType(); + } + } + return null; } public Object getParentType() { - if(typeDefinition != null) { - String clType = typeDefinition.getClass().getSimpleName(); - if(clType.equals("NodeType")) { - return ((NodeType)typeDefinition).getParentType(); - } - else if(clType.equals("PolicyType")) { - return ((PolicyType)typeDefinition).getParentType(); - } - else if(clType.equals("GroupType")) { - return ((GroupType)typeDefinition).getParentType(); - } - else if(clType.equals("RelationshipType")) { - return ((RelationshipType)typeDefinition).getParentType(); - } - } - return null; + if (typeDefinition != null) { + String clType = typeDefinition.getClass().getSimpleName(); + if (clType.equals("NodeType")) { + return ((NodeType) typeDefinition).getParentType(); + } else if (clType.equals("PolicyType")) { + return ((PolicyType) typeDefinition).getParentType(); + } else if (clType.equals("GroupType")) { + return ((GroupType) typeDefinition).getParentType(); + } else if (clType.equals("RelationshipType")) { + return ((RelationshipType) typeDefinition).getParentType(); + } + } + return null; } - - @SuppressWarnings("unchecked") - public RequirementAssignments getRequirements() { - if(_requirements == null) { - _requirements = _createRequirements(); - } - return new RequirementAssignments(_requirements); + + @SuppressWarnings("unchecked") + public RequirementAssignments getRequirements() { + if (_requirements == null) { + _requirements = _createRequirements(); + } + return new RequirementAssignments(_requirements); } private ArrayList _createRequirements() { - ArrayList reqs = new ArrayList<>(); - ArrayList> requirements = (ArrayList>) - typeDefinition.getValue(REQUIREMENTS,entityTpl,false); - if(requirements == null) { - requirements = new ArrayList<>(); - } - for (Map req: requirements) { - for(String reqName: req.keySet()) { - Object reqItem = req.get(reqName); - if(reqItem instanceof LinkedHashMap) { - Object rel = ((LinkedHashMap)reqItem).get("relationship"); + ArrayList reqs = new ArrayList<>(); + ArrayList> requirements = (ArrayList>) + typeDefinition.getValue(REQUIREMENTS, entityTpl, false); + if (requirements == null) { + requirements = new ArrayList<>(); + } + for (Map req : requirements) { + for (String reqName : req.keySet()) { + Object reqItem = req.get(reqName); + if (reqItem instanceof LinkedHashMap) { + Object rel = ((LinkedHashMap) reqItem).get("relationship"); // LinkedHashMap relationship = rel instanceof LinkedHashMap ? (LinkedHashMap) rel : null; - String nodeName = ((LinkedHashMap)reqItem).get("node").toString(); - Object capability = ((LinkedHashMap)reqItem).get("capability"); - String capabilityString = capability != null ? capability.toString() : null; - - reqs.add(new RequirementAssignment(reqName, nodeName, capabilityString, rel)); - } else if (reqItem instanceof String) { //short notation - String nodeName = String.valueOf(reqItem); - reqs.add(new RequirementAssignment(reqName, nodeName)); - } - } - } - return reqs; - } + String nodeName = ((LinkedHashMap) reqItem).get("node").toString(); + Object capability = ((LinkedHashMap) reqItem).get("capability"); + String capabilityString = capability != null ? capability.toString() : null; + + reqs.add(new RequirementAssignment(reqName, nodeName, capabilityString, rel)); + } else if (reqItem instanceof String) { //short notation + String nodeName = String.valueOf(reqItem); + reqs.add(new RequirementAssignment(reqName, nodeName)); + } + } + } + return reqs; + } public ArrayList getPropertiesObjects() { // Return properties objects for this template - if(_properties ==null) { + if (_properties == null) { _properties = _createProperties(); } - return _properties; + return _properties; } - - public LinkedHashMap getProperties() { - LinkedHashMap props = new LinkedHashMap<>(); - for(Property po: getPropertiesObjects()) { - props.put(po.getName(),po); - } - return props; + + public LinkedHashMap getProperties() { + LinkedHashMap props = new LinkedHashMap<>(); + for (Property po : getPropertiesObjects()) { + props.put(po.getName(), po); + } + return props; } - + public Object getPropertyValue(String name) { - LinkedHashMap props = getProperties(); - Property p = props.get(name); - return p != null ? p.getValue() : null; - } + LinkedHashMap props = getProperties(); + Property p = props.get(name); + return p != null ? p.getValue() : null; + } - public String getPropertyType(String name) { - Property property = getProperties().get(name); + public String getPropertyType(String name) { + Property property = getProperties().get(name); if (property != null) { return property.getType(); } return null; - } + } public ArrayList getInterfaces() { - if(_interfaces == null) { - _interfaces = _createInterfaces(); - } - return _interfaces; + if (_interfaces == null) { + _interfaces = _createInterfaces(); + } + return _interfaces; } - + public ArrayList getCapabilitiesObjects() { // Return capabilities objects for this template - if(_capabilities == null) { - _capabilities = _createCapabilities(); - } - return _capabilities; - + if (_capabilities == null) { + _capabilities = _createCapabilities(); + } + return _capabilities; + } - + public CapabilityAssignments getCapabilities() { - LinkedHashMap caps = new LinkedHashMap(); - for(CapabilityAssignment cap: getCapabilitiesObjects()) { - caps.put(cap.getName(),cap); - } - return new CapabilityAssignments(caps); + LinkedHashMap caps = new LinkedHashMap(); + for (CapabilityAssignment cap : getCapabilitiesObjects()) { + caps.put(cap.getName(), cap); + } + return new CapabilityAssignments(caps); } public boolean isDerivedFrom(String typeStr) { - // Returns true if this object is derived from 'type_str'. + // Returns true if this object is derived from 'type_str'. // False otherwise - - if(getType() == null) { - return false; - } - else if(getType().equals(typeStr)) { - return true; - } - else if(getParentType() != null) { - return ((EntityType)getParentType()).isDerivedFrom(typeStr); - } - return false; + + if (getType() == null) { + return false; + } else if (getType().equals(typeStr)) { + return true; + } else if (getParentType() != null) { + return ((EntityType) getParentType()).isDerivedFrom(typeStr); + } + return false; } - + @SuppressWarnings("unchecked") - private ArrayList _createCapabilities() { - ArrayList capability = new ArrayList(); - LinkedHashMap caps = (LinkedHashMap) - ((EntityType)typeDefinition).getValue(CAPABILITIES,entityTpl,true); - if(caps != null) { - //?!? getCapabilities defined only for NodeType... - LinkedHashMap capabilities = null; - if(typeDefinition instanceof NodeType){ - capabilities = ((NodeType)typeDefinition).getCapabilities(); - } else if (typeDefinition instanceof GroupType){ - capabilities = ((GroupType)typeDefinition).getCapabilities(); - } - for(Map.Entry me: caps.entrySet()) { - String name = me. getKey(); - LinkedHashMap props = (LinkedHashMap)me.getValue(); - if(capabilities.get(name) != null) { - CapabilityTypeDef c = capabilities.get(name); // a CapabilityTypeDef - LinkedHashMap properties = new LinkedHashMap(); - // first use the definition default value - LinkedHashMap cprops = c.getProperties(); - if(cprops != null) { - for(Map.Entry cpe: cprops.entrySet()) { - String propertyName = cpe.getKey(); - LinkedHashMap propertyDef = (LinkedHashMap)cpe.getValue(); - Object dob = propertyDef.get("default"); - if(dob != null) { - properties.put(propertyName, dob); - - } - } - } + private ArrayList _createCapabilities() { + ArrayList capability = new ArrayList(); + LinkedHashMap caps = (LinkedHashMap) + ((EntityType) typeDefinition).getValue(CAPABILITIES, entityTpl, true); + if (caps != null) { + //?!? getCapabilities defined only for NodeType... + LinkedHashMap capabilities = null; + if (typeDefinition instanceof NodeType) { + capabilities = ((NodeType) typeDefinition).getCapabilities(); + } else if (typeDefinition instanceof GroupType) { + capabilities = ((GroupType) typeDefinition).getCapabilities(); + } + for (Map.Entry me : caps.entrySet()) { + String name = me.getKey(); + LinkedHashMap props = (LinkedHashMap) me.getValue(); + if (capabilities.get(name) != null) { + CapabilityTypeDef c = capabilities.get(name); // a CapabilityTypeDef + LinkedHashMap properties = new LinkedHashMap(); + // first use the definition default value + LinkedHashMap cprops = c.getProperties(); + if (cprops != null) { + for (Map.Entry cpe : cprops.entrySet()) { + String propertyName = cpe.getKey(); + LinkedHashMap propertyDef = (LinkedHashMap) cpe.getValue(); + Object dob = propertyDef.get("default"); + if (dob != null) { + properties.put(propertyName, dob); + + } + } + } // then update (if available) with the node properties - LinkedHashMap pp = (LinkedHashMap)props.get("properties"); - if(pp != null) { - properties.putAll(pp); - } + LinkedHashMap pp = (LinkedHashMap) props.get("properties"); + if (pp != null) { + properties.putAll(pp); + } CapabilityAssignment cap = new CapabilityAssignment(name, properties, c, customDef); capability.add(cap); - } - } - } - return capability; - } - - protected void _validateProperties(LinkedHashMap template,StatefulEntityType entityType) { - @SuppressWarnings("unchecked") - LinkedHashMap properties = (LinkedHashMap)entityType.getValue(PROPERTIES,template,false); - _commonValidateProperties(entityType,properties); + } + } + } + return capability; + } + + protected void _validateProperties(LinkedHashMap template, StatefulEntityType entityType) { + @SuppressWarnings("unchecked") + LinkedHashMap properties = (LinkedHashMap) entityType.getValue(PROPERTIES, template, false); + _commonValidateProperties(entityType, properties); } protected void _validateCapabilities() { - //BUG??? getCapabilities only defined in NodeType... - LinkedHashMap typeCapabilities = ((NodeType)typeDefinition).getCapabilities(); - ArrayList allowedCaps = new ArrayList(); - if(typeCapabilities != null) { - allowedCaps.addAll(typeCapabilities.keySet()); - } - @SuppressWarnings("unchecked") - LinkedHashMap capabilities = (LinkedHashMap) - ((EntityType)typeDefinition).getValue(CAPABILITIES, entityTpl, false); - if(capabilities != null) { + //BUG??? getCapabilities only defined in NodeType... + LinkedHashMap typeCapabilities = ((NodeType) typeDefinition).getCapabilities(); + ArrayList allowedCaps = new ArrayList(); + if (typeCapabilities != null) { + allowedCaps.addAll(typeCapabilities.keySet()); + } + @SuppressWarnings("unchecked") + LinkedHashMap capabilities = (LinkedHashMap) + ((EntityType) typeDefinition).getValue(CAPABILITIES, entityTpl, false); + if (capabilities != null) { _commonValidateField(capabilities, allowedCaps, "capabilities"); _validateCapabilitiesProperties(capabilities); - } + } } - - @SuppressWarnings("unchecked") - private void _validateCapabilitiesProperties(LinkedHashMap capabilities) { - for(Map.Entry me: capabilities.entrySet()) { - String cap = me.getKey(); - LinkedHashMap props = (LinkedHashMap)me.getValue(); - CapabilityAssignment capability = getCapability(cap); - if(capability == null) { - continue; - } - CapabilityTypeDef capabilitydef = capability.getDefinition(); - _commonValidateProperties(capabilitydef,(LinkedHashMap)props.get(PROPERTIES)); - + + @SuppressWarnings("unchecked") + private void _validateCapabilitiesProperties(LinkedHashMap capabilities) { + for (Map.Entry me : capabilities.entrySet()) { + String cap = me.getKey(); + LinkedHashMap props = (LinkedHashMap) me.getValue(); + CapabilityAssignment capability = getCapability(cap); + if (capability == null) { + continue; + } + CapabilityTypeDef capabilitydef = capability.getDefinition(); + _commonValidateProperties(capabilitydef, (LinkedHashMap) props.get(PROPERTIES)); + // validating capability properties values - for(Property prop: getCapability(cap).getPropertiesObjects()) { + for (Property prop : getCapability(cap).getPropertiesObjects()) { prop.validate(); - - if(cap.equals("scalable") && prop.getName().equals("default_instances")) { - LinkedHashMap propDict = (LinkedHashMap)props.get(PROPERTIES); - int minInstances = (int)propDict.get("min_instances"); - int maxInstances = (int)propDict.get("max_instances"); - int defaultInstances = (int)propDict.get("default_instances"); - if(defaultInstances < minInstances || defaultInstances > maxInstances) { + + if (cap.equals("scalable") && prop.getName().equals("default_instances")) { + LinkedHashMap propDict = (LinkedHashMap) props.get(PROPERTIES); + int minInstances = (int) propDict.get("min_instances"); + int maxInstances = (int) propDict.get("max_instances"); + int defaultInstances = (int) propDict.get("default_instances"); + if (defaultInstances < minInstances || defaultInstances > maxInstances) { //err_msg = ('"properties" of template "%s": ' // '"default_instances" value is not between ' // '"min_instances" and "max_instances".' % // self.name) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE141", String.format( - "ValidationError: \"properties\" of template \"%s\": \"default_instances\" value is not between \"min_instances\" and \"max_instances\"", - name))); + "ValidationError: \"properties\" of template \"%s\": \"default_instances\" value is not between \"min_instances\" and \"max_instances\"", + name))); } } - } - } + } + } } - private void _commonValidateProperties(StatefulEntityType entityType,LinkedHashMap properties) { - ArrayList allowedProps = new ArrayList(); - ArrayList requiredProps = new ArrayList(); - for(PropertyDef p: entityType.getPropertiesDefObjects()) { - allowedProps.add(p.getName()); - // If property is 'required' and has no 'default' value then record - if(p.isRequired() && p.getDefault() == null) { - requiredProps.add(p.getName()); - } - } + private void _commonValidateProperties(StatefulEntityType entityType, LinkedHashMap properties) { + ArrayList allowedProps = new ArrayList(); + ArrayList requiredProps = new ArrayList(); + for (PropertyDef p : entityType.getPropertiesDefObjects()) { + allowedProps.add(p.getName()); + // If property is 'required' and has no 'default' value then record + if (p.isRequired() && p.getDefault() == null) { + requiredProps.add(p.getName()); + } + } // validate all required properties have values - if(properties != null) { + if (properties != null) { ArrayList reqPropsNoValueOrDefault = new ArrayList(); _commonValidateField(properties, allowedProps, "properties"); // make sure it's not missing any property required by a tosca type - for(String r: requiredProps) { - if(properties.get(r) == null) { - reqPropsNoValueOrDefault.add(r); - } + for (String r : requiredProps) { + if (properties.get(r) == null) { + reqPropsNoValueOrDefault.add(r); + } } // Required properties found without value or a default value - if(!reqPropsNoValueOrDefault.isEmpty()) { + if (!reqPropsNoValueOrDefault.isEmpty()) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", String.format( - "MissingRequiredFieldError: properties of template \"%s\" are missing field(s): %s", - name,reqPropsNoValueOrDefault.toString()))); + "MissingRequiredFieldError: properties of template \"%s\" are missing field(s): %s", + name, reqPropsNoValueOrDefault.toString()))); } - } - else { + } else { // Required properties in schema, but not in template - if(!requiredProps.isEmpty()) { + if (!requiredProps.isEmpty()) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE004", String.format( "MissingRequiredFieldError2: properties of template \"%s\" are missing field(s): %s", - name,requiredProps.toString()))); + name, requiredProps.toString()))); } } } - + @SuppressWarnings("unchecked") - private void _validateField(LinkedHashMap template) { - if(!(template instanceof LinkedHashMap)) { + private void _validateField(LinkedHashMap template) { + if (!(template instanceof LinkedHashMap)) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE142", String.format( - "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE))); + "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"", name, TYPE))); return;//??? } boolean bBad = false; - Object relationship = ((LinkedHashMap)template).get("relationship"); - if(relationship != null) { - if(!(relationship instanceof String)) { - bBad = (((LinkedHashMap)relationship).get(TYPE) == null); - } - else if(relationship instanceof String) { - bBad = (template.get("relationship") == null); - } - } - else { - bBad = (template.get(TYPE) == null); + Object relationship = ((LinkedHashMap) template).get("relationship"); + if (relationship != null) { + if (!(relationship instanceof String)) { + bBad = (((LinkedHashMap) relationship).get(TYPE) == null); + } else if (relationship instanceof String) { + bBad = (template.get("relationship") == null); + } + } else { + bBad = (template.get(TYPE) == null); } - if(bBad) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE143", String.format( - "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE))); + if (bBad) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE143", String.format( + "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"", name, TYPE))); } } - - protected void _commonValidateField(LinkedHashMap schema, ArrayList allowedList,String section) { - for(String sname: schema.keySet()) { - boolean bFound = false; - for(String allowed: allowedList) { - if(sname.equals(allowed)) { - bFound = true; - break; - } - } - if(!bFound) { + + protected void _commonValidateField(LinkedHashMap schema, ArrayList allowedList, String section) { + for (String sname : schema.keySet()) { + boolean bFound = false; + for (String allowed : allowedList) { + if (sname.equals(allowed)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE144", String.format( - "UnknownFieldError: Section \"%s\" of template \"%s\" contains unknown field \"%s\"",section,name,sname))); - } - } - + "UnknownFieldError: Section \"%s\" of template \"%s\" contains unknown field \"%s\"", section, name, sname))); + } + } + } - + @SuppressWarnings("unchecked") - private ArrayList _createProperties() { - ArrayList props = new ArrayList(); - LinkedHashMap properties = (LinkedHashMap) - ((EntityType)typeDefinition).getValue(PROPERTIES,entityTpl,false); - if(properties == null) { - properties = new LinkedHashMap(); - } - for(Map.Entry me: properties.entrySet()) { - String pname = me.getKey(); - Object pvalue = me.getValue(); - LinkedHashMap propsDef = ((StatefulEntityType)typeDefinition).getPropertiesDef(); - if(propsDef != null && propsDef.get(pname) != null) { - PropertyDef pd = (PropertyDef)propsDef.get(pname); - Property prop = new Property(pname,pvalue,pd.getSchema(),customDef); - props.add(prop); - } - } - ArrayList pds = ((StatefulEntityType)typeDefinition).getPropertiesDefObjects(); - for(Object pdo: pds) { - PropertyDef pd = (PropertyDef)pdo; - if(pd.getDefault() != null && properties.get(pd.getName()) == null) { - Property prop = new Property(pd.getName(),pd.getDefault(),pd.getSchema(),customDef); - props.add(prop); - } - } - return props; + private ArrayList _createProperties() { + ArrayList props = new ArrayList(); + LinkedHashMap properties = (LinkedHashMap) + ((EntityType) typeDefinition).getValue(PROPERTIES, entityTpl, false); + if (properties == null) { + properties = new LinkedHashMap(); + } + for (Map.Entry me : properties.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + LinkedHashMap propsDef = ((StatefulEntityType) typeDefinition).getPropertiesDef(); + if (propsDef != null && propsDef.get(pname) != null) { + PropertyDef pd = (PropertyDef) propsDef.get(pname); + Property prop = new Property(pname, pvalue, pd.getSchema(), customDef); + props.add(prop); + } + } + ArrayList pds = ((StatefulEntityType) typeDefinition).getPropertiesDefObjects(); + for (Object pdo : pds) { + PropertyDef pd = (PropertyDef) pdo; + if (pd.getDefault() != null && properties.get(pd.getName()) == null) { + Property prop = new Property(pd.getName(), pd.getDefault(), pd.getSchema(), customDef); + props.add(prop); + } + } + return props; } @SuppressWarnings("unchecked") - private ArrayList _createInterfaces() { - ArrayList interfaces = new ArrayList<>(); - LinkedHashMap typeInterfaces = new LinkedHashMap(); - if(typeDefinition instanceof RelationshipType) { - if(entityTpl instanceof LinkedHashMap) { - typeInterfaces = (LinkedHashMap)entityTpl.get(INTERFACES); - if(typeInterfaces == null) { - for(String relName: entityTpl.keySet()) { - Object relValue = entityTpl.get(relName); - if(!relName.equals("type")) { - Object relDef = relValue; - LinkedHashMap rel = null; - if(relDef instanceof LinkedHashMap) { - Object relob = ((LinkedHashMap)relDef).get("relationship"); - if(relob instanceof LinkedHashMap) { - rel = (LinkedHashMap)relob; - } - } - if(rel != null) { - if(rel.get(INTERFACES) != null) { - typeInterfaces = (LinkedHashMap)rel.get(INTERFACES); - break; - } - } - } - } - } - } - } - else { - typeInterfaces = (LinkedHashMap) - ((EntityType)typeDefinition).getValue(INTERFACES,entityTpl,false); - } - if(typeInterfaces != null) { - for(Map.Entry me: typeInterfaces.entrySet()) { - String interfaceType = me.getKey(); - LinkedHashMap value = (LinkedHashMap)me.getValue(); - for(Map.Entry ve: value.entrySet()) { - String op = ve.getKey(); - Object opDef = ve.getValue(); - InterfacesDef iface = new InterfacesDef((EntityType)typeDefinition, - interfaceType, - this, - op, - opDef); - interfaces.add(iface); - } - - } - } - return interfaces; + private ArrayList _createInterfaces() { + ArrayList interfaces = new ArrayList<>(); + LinkedHashMap typeInterfaces = new LinkedHashMap(); + if (typeDefinition instanceof RelationshipType) { + if (entityTpl instanceof LinkedHashMap) { + typeInterfaces = (LinkedHashMap) entityTpl.get(INTERFACES); + if (typeInterfaces == null) { + for (String relName : entityTpl.keySet()) { + Object relValue = entityTpl.get(relName); + if (!relName.equals("type")) { + Object relDef = relValue; + LinkedHashMap rel = null; + if (relDef instanceof LinkedHashMap) { + Object relob = ((LinkedHashMap) relDef).get("relationship"); + if (relob instanceof LinkedHashMap) { + rel = (LinkedHashMap) relob; + } + } + if (rel != null) { + if (rel.get(INTERFACES) != null) { + typeInterfaces = (LinkedHashMap) rel.get(INTERFACES); + break; + } + } + } + } + } + } + } else { + typeInterfaces = (LinkedHashMap) + ((EntityType) typeDefinition).getValue(INTERFACES, entityTpl, false); + } + if (typeInterfaces != null) { + for (Map.Entry me : typeInterfaces.entrySet()) { + String interfaceType = me.getKey(); + LinkedHashMap value = (LinkedHashMap) me.getValue(); + for (Map.Entry ve : value.entrySet()) { + String op = ve.getKey(); + Object opDef = ve.getValue(); + InterfacesDef iface = new InterfacesDef((EntityType) typeDefinition, + interfaceType, + this, + op, + opDef); + interfaces.add(iface); + } + + } + } + return interfaces; } - - public CapabilityAssignment getCapability(String name) { + + public CapabilityAssignment getCapability(String name) { // Provide named capability - // :param name: name of capability + // :param name: name of capability // :return: capability object if found, None otherwise - return getCapabilities().getCapabilityByName(name); + return getCapabilities().getCapabilityByName(name); } - - // getter - public String getName() { - return name; + + // getter + public String getName() { + return name; } - + public StatefulEntityType getTypeDefinition() { - return typeDefinition; + return typeDefinition; + } + + public LinkedHashMap getCustomDef() { + return customDef; + } + + @Override + public String toString() { + return "EntityTemplate{" + + "name='" + name + '\'' + + ", entityTpl=" + entityTpl + + ", customDef=" + customDef + + ", typeDefinition=" + typeDefinition + + ", _properties=" + _properties + + ", _interfaces=" + _interfaces + + ", _requirements=" + _requirements + + ", _capabilities=" + _capabilities + + '}'; } - - public LinkedHashMap getCustomDef() { - return customDef; - } - - @Override - public String toString() { - return "EntityTemplate{" + - "name='" + name + '\'' + - ", entityTpl=" + entityTpl + - ", customDef=" + customDef + - ", typeDefinition=" + typeDefinition + - ", _properties=" + _properties + - ", _interfaces=" + _interfaces + - ", _requirements=" + _requirements + - ", _capabilities=" + _capabilities + - '}'; - } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Group.java b/src/main/java/org/onap/sdc/toscaparser/api/Group.java index 299ba01..0591d9a 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Group.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Group.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -30,96 +30,97 @@ import java.util.LinkedHashMap; import java.util.Map; public class Group extends EntityTemplate { - - private static final String TYPE = "type"; - private static final String METADATA = "metadata"; - private static final String DESCRIPTION = "description"; - private static final String PROPERTIES = "properties"; - private static final String MEMBERS = "members"; - private static final String INTERFACES = "interfaces"; - private static final String SECTIONS[] = { - TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; - - private String name; - LinkedHashMap tpl; - ArrayList memberNodes; - LinkedHashMap customDef; - Metadata metaData; - - - public Group(String _name, LinkedHashMap _templates, - ArrayList _memberNodes, - LinkedHashMap _customDef){ - this(_name, _templates, _memberNodes, _customDef, null); - } - - public Group(String _name, LinkedHashMap _templates, - ArrayList _memberNodes, - LinkedHashMap _customDef, NodeTemplate parentNodeTemplate) { - super(_name, _templates, "group_type", _customDef, parentNodeTemplate); - - name = _name; - tpl = _templates; - if(tpl.get(METADATA) != null) { - Object metadataObject = tpl.get(METADATA); - ValidateUtils.validateMap(metadataObject); - metaData = new Metadata((Map)metadataObject); + + private static final String TYPE = "type"; + private static final String METADATA = "metadata"; + private static final String DESCRIPTION = "description"; + private static final String PROPERTIES = "properties"; + private static final String MEMBERS = "members"; + private static final String INTERFACES = "interfaces"; + private static final String[] SECTIONS = { + TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; + + private String name; + private LinkedHashMap tpl; + private ArrayList memberNodes; + private LinkedHashMap customDef; + private Metadata metaData; + + + public Group(String name, LinkedHashMap templates, + ArrayList memberNodes, + LinkedHashMap customDef) { + this(name, templates, memberNodes, customDef, null); + } + + public Group(String name, LinkedHashMap templates, + ArrayList memberNodes, + LinkedHashMap customDef, NodeTemplate parentNodeTemplate) { + super(name, templates, "group_type", customDef, parentNodeTemplate); + + this.name = name; + tpl = templates; + if (tpl.get(METADATA) != null) { + Object metadataObject = tpl.get(METADATA); + ValidateUtils.validateMap(metadataObject); + metaData = new Metadata((Map) metadataObject); } - memberNodes = _memberNodes; - _validateKeys(); + this.memberNodes = memberNodes; + validateKeys(); getCapabilities(); - } - - public Metadata getMetadata() { - return metaData; - } - - public ArrayList getMembers() { - return (ArrayList)entityTpl.get("members"); - } - - public String getDescription() { - return (String)entityTpl.get("description"); - - } - - public ArrayList getMemberNodes() { - return memberNodes; - } - - private void _validateKeys() { - for(String key: entityTpl.keySet()) { - boolean bFound = false; - for(String sect: SECTIONS) { - if(key.equals(sect)) { - bFound = true; - break; - } - } - if(!bFound) { + } + + public Metadata getMetadata() { + return metaData; + } + + public ArrayList getMembers() { + return (ArrayList) entityTpl.get("members"); + } + + public String getDescription() { + return (String) entityTpl.get("description"); + + } + + public ArrayList getMemberNodes() { + return memberNodes; + } + + private void validateKeys() { + for (String key : entityTpl.keySet()) { + boolean bFound = false; + for (String sect : SECTIONS) { + if (key.equals(sect)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE183", String.format( "UnknownFieldError: Groups \"%s\" contains unknown field \"%s\"", - name,key))); - } - } - } - - @Override - public String toString() { - return "Group{" + - "name='" + name + '\'' + - ", tpl=" + tpl + - ", memberNodes=" + memberNodes + - ", customDef=" + customDef + - ", metaData=" + metaData + - '}'; - } - - public int compareTo(Group other){ - if(this.equals(other)) - return 0; - return this.getName().compareTo(other.getName()) == 0 ? this.getType().compareTo(other.getType()) : this.getName().compareTo(other.getName()); - } + name, key))); + } + } + } + + @Override + public String toString() { + return "Group{" + + "name='" + name + '\'' + + ", tpl=" + tpl + + ", memberNodes=" + memberNodes + + ", customDef=" + customDef + + ", metaData=" + metaData + + '}'; + } + + public int compareTo(Group other) { + if (this.equals(other)) { + return 0; + } + return this.getName().compareTo(other.getName()) == 0 ? this.getType().compareTo(other.getType()) : this.getName().compareTo(other.getName()); + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java b/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java index 5ef639b..019adb3 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -37,202 +37,197 @@ import java.util.*; public class ImportsLoader { - private static Logger log = LoggerFactory.getLogger(ImportsLoader.class.getName()); - private static final String FILE = "file"; - private static final String REPOSITORY = "repository"; - private static final String NAMESPACE_URI = "namespace_uri"; - private static final String NAMESPACE_PREFIX = "namespace_prefix"; + private static Logger log = LoggerFactory.getLogger(ImportsLoader.class.getName()); + private static final String FILE = "file"; + private static final String REPOSITORY = "repository"; + private static final String NAMESPACE_URI = "namespace_uri"; + private static final String NAMESPACE_PREFIX = "namespace_prefix"; private String IMPORTS_SECTION[] = {FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX}; - - private ArrayList importslist; - private String path; - private ArrayList typeDefinitionList; - - private LinkedHashMap customDefs; - private LinkedHashMap allCustomDefs; - private ArrayList> nestedToscaTpls; - private LinkedHashMap repositories; - - @SuppressWarnings("unchecked") - public ImportsLoader(ArrayList_importslist, - String _path, - Object _typeDefinitionList, - LinkedHashMap tpl) { - - this.importslist = _importslist; - customDefs = new LinkedHashMap(); - allCustomDefs = new LinkedHashMap(); - nestedToscaTpls = new ArrayList>(); - if((_path == null || _path.isEmpty()) && tpl == null) { - //msg = _('Input tosca template is not provided.') - //log.warning(msg) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE184", "ValidationError: Input tosca template is not provided")); - } - - this.path = _path; - this.repositories = new LinkedHashMap(); - - if(tpl != null && tpl.get("repositories") != null) { - this.repositories = (LinkedHashMap)tpl.get("repositories"); - } - this.typeDefinitionList = new ArrayList(); - if(_typeDefinitionList != null) { - if(_typeDefinitionList instanceof ArrayList) { - this.typeDefinitionList = (ArrayList)_typeDefinitionList; - } - else { - this.typeDefinitionList.add((String)_typeDefinitionList); - } - } - _validateAndLoadImports(); - } - - public LinkedHashMap getCustomDefs() { - return allCustomDefs; - } - - public ArrayList> getNestedToscaTpls() { - return nestedToscaTpls; + + private ArrayList importslist; + private String path; + private ArrayList typeDefinitionList; + + private LinkedHashMap customDefs; + private LinkedHashMap allCustomDefs; + private ArrayList> nestedToscaTpls; + private LinkedHashMap repositories; + + @SuppressWarnings("unchecked") + public ImportsLoader(ArrayList _importslist, + String _path, + Object _typeDefinitionList, + LinkedHashMap tpl) { + + this.importslist = _importslist; + customDefs = new LinkedHashMap(); + allCustomDefs = new LinkedHashMap(); + nestedToscaTpls = new ArrayList>(); + if ((_path == null || _path.isEmpty()) && tpl == null) { + //msg = _('Input tosca template is not provided.') + //log.warning(msg) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE184", "ValidationError: Input tosca template is not provided")); + } + + this.path = _path; + this.repositories = new LinkedHashMap(); + + if (tpl != null && tpl.get("repositories") != null) { + this.repositories = (LinkedHashMap) tpl.get("repositories"); + } + this.typeDefinitionList = new ArrayList(); + if (_typeDefinitionList != null) { + if (_typeDefinitionList instanceof ArrayList) { + this.typeDefinitionList = (ArrayList) _typeDefinitionList; + } else { + this.typeDefinitionList.add((String) _typeDefinitionList); + } + } + _validateAndLoadImports(); + } + + public LinkedHashMap getCustomDefs() { + return allCustomDefs; } - - @SuppressWarnings({ "unchecked", "unused" }) - public void _validateAndLoadImports() { - Set importNames = new HashSet(); - - if(importslist == null) { + + public ArrayList> getNestedToscaTpls() { + return nestedToscaTpls; + } + + @SuppressWarnings({"unchecked", "unused"}) + public void _validateAndLoadImports() { + Set importNames = new HashSet(); + + if (importslist == null) { //msg = _('"imports" keyname is defined without including templates.') //log.error(msg) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE185", - "ValidationError: \"imports\" keyname is defined without including templates")); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE185", + "ValidationError: \"imports\" keyname is defined without including templates")); return; - } - - for(Object importDef: importslist) { - String fullFileName = null; - LinkedHashMap customType = null; - if(importDef instanceof LinkedHashMap) { - for(Map.Entry me: ((LinkedHashMap)importDef).entrySet()) { - String importName = me.getKey(); - Object importUri = me.getValue(); - if(importNames.contains(importName)) { + } + + for (Object importDef : importslist) { + String fullFileName = null; + LinkedHashMap customType = null; + if (importDef instanceof LinkedHashMap) { + for (Map.Entry me : ((LinkedHashMap) importDef).entrySet()) { + String importName = me.getKey(); + Object importUri = me.getValue(); + if (importNames.contains(importName)) { //msg = (_('Duplicate import name "%s" was found.') % import_name) //log.error(msg) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE186", String.format( - "ValidationError: Duplicate import name \"%s\" was found",importName))); - } - importNames.add(importName); //??? - - // _loadImportTemplate returns 2 objects - Object ffnct[] = _loadImportTemplate(importName, importUri); - fullFileName = (String)ffnct[0]; - customType = (LinkedHashMap)ffnct[1]; - String namespacePrefix = ""; - if(importUri instanceof LinkedHashMap) { - namespacePrefix = (String) - ((LinkedHashMap)importUri).get(NAMESPACE_PREFIX); - } - - if(customType != null) { - TypeValidation tv = new TypeValidation(customType, importDef); + "ValidationError: Duplicate import name \"%s\" was found", importName))); + } + importNames.add(importName); //??? + + // _loadImportTemplate returns 2 objects + Object ffnct[] = _loadImportTemplate(importName, importUri); + fullFileName = (String) ffnct[0]; + customType = (LinkedHashMap) ffnct[1]; + String namespacePrefix = ""; + if (importUri instanceof LinkedHashMap) { + namespacePrefix = (String) + ((LinkedHashMap) importUri).get(NAMESPACE_PREFIX); + } + + if (customType != null) { + TypeValidation tv = new TypeValidation(customType, importDef); _updateCustomDefs(customType, namespacePrefix); - } - } - } - else { // old style of imports - // _loadImportTemplate returns 2 objects - Object ffnct[] = _loadImportTemplate(null,importDef); - fullFileName = (String)ffnct[0]; - customType = (LinkedHashMap)ffnct[1]; - if(customType != null) { - TypeValidation tv = new TypeValidation(customType,importDef); - _updateCustomDefs(customType,null); + } } - } + } else { // old style of imports + // _loadImportTemplate returns 2 objects + Object ffnct[] = _loadImportTemplate(null, importDef); + fullFileName = (String) ffnct[0]; + customType = (LinkedHashMap) ffnct[1]; + if (customType != null) { + TypeValidation tv = new TypeValidation(customType, importDef); + _updateCustomDefs(customType, null); + } + } _updateNestedToscaTpls(fullFileName, customType); - - } + + } } - /** - * This method is used to get consolidated custom definitions by passing custom Types from - * each import. The resultant collection is then passed back which contains all import - * definitions - * - * @param customType the custom type - * @param namespacePrefix the namespace prefix - */ - @SuppressWarnings("unchecked") - private void _updateCustomDefs(LinkedHashMap customType, String namespacePrefix) { - LinkedHashMap outerCustomTypes; - for(String typeDef: typeDefinitionList) { - if(typeDef.equals("imports")) { - customDefs.put("imports", customType.get(typeDef)); - if (allCustomDefs.isEmpty() || allCustomDefs.get("imports") == null){ - allCustomDefs.put("imports",customType.get(typeDef)); - } - else if (customType.get(typeDef) != null){ - Set allCustomImports = new HashSet<>((ArrayList)allCustomDefs.get("imports")); - allCustomImports.addAll((ArrayList) customType.get(typeDef)); - allCustomDefs.put("imports", new ArrayList<>(allCustomImports)); - } - } - else { - outerCustomTypes = (LinkedHashMap)customType.get(typeDef); - if(outerCustomTypes != null) { - if(namespacePrefix != null && !namespacePrefix.isEmpty()) { - LinkedHashMap prefixCustomTypes = new LinkedHashMap(); - for(Map.Entry me: outerCustomTypes.entrySet()) { - String typeDefKey = me.getKey(); - String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey; - prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey)); - } - customDefs.putAll(prefixCustomTypes); - allCustomDefs.putAll(prefixCustomTypes); - } - else { - customDefs.putAll(outerCustomTypes); - allCustomDefs.putAll(outerCustomTypes); - } - } - } - } - } - - private void _updateNestedToscaTpls(String fullFileName,LinkedHashMap customTpl) { - if(fullFileName != null && customTpl != null) { - LinkedHashMap tt = new LinkedHashMap(); - tt.put(fullFileName, customTpl); - nestedToscaTpls.add(tt); - } + /** + * This method is used to get consolidated custom definitions by passing custom Types from + * each import. The resultant collection is then passed back which contains all import + * definitions + * + * @param customType the custom type + * @param namespacePrefix the namespace prefix + */ + @SuppressWarnings("unchecked") + private void _updateCustomDefs(LinkedHashMap customType, String namespacePrefix) { + LinkedHashMap outerCustomTypes; + for (String typeDef : typeDefinitionList) { + if (typeDef.equals("imports")) { + customDefs.put("imports", customType.get(typeDef)); + if (allCustomDefs.isEmpty() || allCustomDefs.get("imports") == null) { + allCustomDefs.put("imports", customType.get(typeDef)); + } else if (customType.get(typeDef) != null) { + Set allCustomImports = new HashSet<>((ArrayList) allCustomDefs.get("imports")); + allCustomImports.addAll((ArrayList) customType.get(typeDef)); + allCustomDefs.put("imports", new ArrayList<>(allCustomImports)); + } + } else { + outerCustomTypes = (LinkedHashMap) customType.get(typeDef); + if (outerCustomTypes != null) { + if (namespacePrefix != null && !namespacePrefix.isEmpty()) { + LinkedHashMap prefixCustomTypes = new LinkedHashMap(); + for (Map.Entry me : outerCustomTypes.entrySet()) { + String typeDefKey = me.getKey(); + String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey; + prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey)); + } + customDefs.putAll(prefixCustomTypes); + allCustomDefs.putAll(prefixCustomTypes); + } else { + customDefs.putAll(outerCustomTypes); + allCustomDefs.putAll(outerCustomTypes); + } + } + } + } + } + + private void _updateNestedToscaTpls(String fullFileName, LinkedHashMap customTpl) { + if (fullFileName != null && customTpl != null) { + LinkedHashMap tt = new LinkedHashMap(); + tt.put(fullFileName, customTpl); + nestedToscaTpls.add(tt); + } } - private void _validateImportKeys(String importName, LinkedHashMap importUri) { - if(importUri.get(FILE) == null) { + private void _validateImportKeys(String importName, LinkedHashMap importUri) { + if (importUri.get(FILE) == null) { //log.warning(_('Missing keyname "file" in import "%(name)s".') % {'name': import_name}) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE187", String.format( - "MissingRequiredFieldError: Import of template \"%s\" is missing field %s",importName,FILE))); - } - for(String key: importUri.keySet()) { - boolean bFound = false; - for(String is: IMPORTS_SECTION) { - if(is.equals(key)) { - bFound = true; - break; - } - } - if(!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE187", String.format( + "MissingRequiredFieldError: Import of template \"%s\" is missing field %s", importName, FILE))); + } + for (String key : importUri.keySet()) { + boolean bFound = false; + for (String is : IMPORTS_SECTION) { + if (is.equals(key)) { + bFound = true; + break; + } + } + if (!bFound) { //log.warning(_('Unknown keyname "%(key)s" error in ' // 'imported definition "%(def)s".') // % {'key': key, 'def': import_name}) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE188", String.format( - "UnknownFieldError: Import of template \"%s\" has unknown fiels %s",importName,key))); - } - } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE188", String.format( + "UnknownFieldError: Import of template \"%s\" has unknown fiels %s", importName, key))); + } + } } @SuppressWarnings("unchecked") - private Object[] _loadImportTemplate(String importName, Object importUriDef) { + private Object[] _loadImportTemplate(String importName, Object importUriDef) { /* This method loads the custom type definitions referenced in "imports" section of the TOSCA YAML template by determining whether each import @@ -251,251 +246,237 @@ public class ImportsLoader { | URL | URL | OK | +----------+--------+------------------------------+ */ - Object al[] = new Object[2]; + Object al[] = new Object[2]; boolean shortImportNotation = false; String fileName; String repository; - if(importUriDef instanceof LinkedHashMap) { - _validateImportKeys(importName, (LinkedHashMap)importUriDef); - fileName = (String)((LinkedHashMap)importUriDef).get(FILE); - repository = (String)((LinkedHashMap)importUriDef).get(REPOSITORY); - if(repository != null) { - if(!repositories.keySet().contains(repository)) { + if (importUriDef instanceof LinkedHashMap) { + _validateImportKeys(importName, (LinkedHashMap) importUriDef); + fileName = (String) ((LinkedHashMap) importUriDef).get(FILE); + repository = (String) ((LinkedHashMap) importUriDef).get(REPOSITORY); + if (repository != null) { + if (!repositories.keySet().contains(repository)) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE189", String.format( - "InvalidPropertyValueError: Repository \"%s\" not found in \"%s\"", - repository,repositories.keySet().toString()))); - } + "InvalidPropertyValueError: Repository \"%s\" not found in \"%s\"", + repository, repositories.keySet().toString()))); + } } - } - else { - fileName = (String)importUriDef; + } else { + fileName = (String) importUriDef; repository = null; shortImportNotation = true; } - if(fileName == null || fileName.isEmpty()) { - //msg = (_('A template file name is not provided with import ' - // 'definition "%(import_name)s".') - // % {'import_name': import_name}) - //log.error(msg) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE190", String.format( - "ValidationError: A template file name is not provided with import definition \"%s\"",importName))); - al[0] = al[1] = null; - return al; + if (fileName == null || fileName.isEmpty()) { + //msg = (_('A template file name is not provided with import ' + // 'definition "%(import_name)s".') + // % {'import_name': import_name}) + //log.error(msg) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE190", String.format( + "ValidationError: A template file name is not provided with import definition \"%s\"", importName))); + al[0] = al[1] = null; + return al; } - if(UrlUtils.validateUrl(fileName)) { - try (InputStream input = new URL(fileName).openStream();) { - al[0] = fileName; - Yaml yaml = new Yaml(); - al[1] = yaml.load(input); - return al; - } - catch(IOException e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE191", String.format( - "ImportError: \"%s\" loading YAML import from \"%s\"",e.getClass().getSimpleName(),fileName))); - al[0] = al[1] = null; - return al; - } - } - else if(repository == null || repository.isEmpty()) { - boolean aFile = false; + if (UrlUtils.validateUrl(fileName)) { + try (InputStream input = new URL(fileName).openStream();) { + al[0] = fileName; + Yaml yaml = new Yaml(); + al[1] = yaml.load(input); + return al; + } catch (IOException e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE191", String.format( + "ImportError: \"%s\" loading YAML import from \"%s\"", e.getClass().getSimpleName(), fileName))); + al[0] = al[1] = null; + return al; + } + } else if (repository == null || repository.isEmpty()) { + boolean aFile = false; String importTemplate = null; - if(path != null && !path.isEmpty()) { - if(UrlUtils.validateUrl(path)) { - File fp = new File(path); - if(fp.isAbsolute()) { - String msg = String.format( - "ImportError: Absolute file name \"%s\" cannot be used in the URL-based input template \"%s\"", - fileName,path); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE192", msg)); + if (path != null && !path.isEmpty()) { + if (UrlUtils.validateUrl(path)) { + File fp = new File(path); + if (fp.isAbsolute()) { + String msg = String.format( + "ImportError: Absolute file name \"%s\" cannot be used in the URL-based input template \"%s\"", + fileName, path); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE192", msg)); al[0] = al[1] = null; return al; - } - importTemplate = UrlUtils.joinUrl(path,fileName); - aFile = false; - } - else { - + } + importTemplate = UrlUtils.joinUrl(path, fileName); + aFile = false; + } else { + aFile = true; - File fp = new File(path); - if(fp.isFile()) { + File fp = new File(path); + if (fp.isFile()) { File fn = new File(fileName); - if(fn.isFile()) { + if (fn.isFile()) { importTemplate = fileName; - } - else { - String fullPath = Paths.get(path).toAbsolutePath().getParent().toString() + File.separator + fileName; - File ffp = new File(fullPath); - if(ffp.isFile()) { + } else { + String fullPath = Paths.get(path).toAbsolutePath().getParent().toString() + File.separator + fileName; + File ffp = new File(fullPath); + if (ffp.isFile()) { importTemplate = fullPath; - } - else { - String dirPath = Paths.get(path).toAbsolutePath().getParent().toString(); - String filePath; - if(Paths.get(fileName).getParent() != null) { - filePath = Paths.get(fileName).getParent().toString(); - } - else { - filePath = ""; - } - if(!filePath.isEmpty() && dirPath.endsWith(filePath)) { - String sFileName = Paths.get(fileName).getFileName().toString(); - importTemplate = dirPath + File.separator + sFileName; - File fit = new File(importTemplate); - if(!fit.isFile()) { + } else { + String dirPath = Paths.get(path).toAbsolutePath().getParent().toString(); + String filePath; + if (Paths.get(fileName).getParent() != null) { + filePath = Paths.get(fileName).getParent().toString(); + } else { + filePath = ""; + } + if (!filePath.isEmpty() && dirPath.endsWith(filePath)) { + String sFileName = Paths.get(fileName).getFileName().toString(); + importTemplate = dirPath + File.separator + sFileName; + File fit = new File(importTemplate); + if (!fit.isFile()) { //msg = (_('"%(import_template)s" is' // 'not a valid file') // % {'import_template': // import_template}) //log.error(msg) - String msg = String.format( - "ValueError: \"%s\" is not a valid file",importTemplate); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE193", msg)); + String msg = String.format( + "ValueError: \"%s\" is not a valid file", importTemplate); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE193", msg)); log.debug("ImportsLoader - _loadImportTemplate - {}", msg); - } - } + } + } } } - } - } + } + } + } else { // template is pre-parsed + File fn = new File(fileName); + if (fn.isAbsolute() && fn.isFile()) { + aFile = true; + importTemplate = fileName; + } else { + String msg = String.format( + "Relative file name \"%s\" cannot be used in a pre-parsed input template", fileName); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE194", "ImportError: " + msg)); + al[0] = al[1] = null; + return al; + } } - else { // template is pre-parsed - File fn = new File(fileName); - if(fn.isAbsolute() && fn.isFile()) { - aFile = true; - importTemplate = fileName; - } - else { - String msg = String.format( - "Relative file name \"%s\" cannot be used in a pre-parsed input template",fileName); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE194", "ImportError: " + msg)); - al[0] = al[1] = null; - return al; - } - } - - if(importTemplate == null || importTemplate.isEmpty()) { + + if (importTemplate == null || importTemplate.isEmpty()) { //log.error(_('Import "%(name)s" is not valid.') % // {'name': import_uri_def}) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE195", String.format( - "ImportError: Import \"%s\" is not valid",importUriDef))); - al[0] = al[1] = null; - return al; + "ImportError: Import \"%s\" is not valid", importUriDef))); + al[0] = al[1] = null; + return al; } - + // for now, this must be a file - if(!aFile) { - log.error("ImportsLoader - _loadImportTemplate - Error!! Expected a file. importUriDef = {}, importTemplate = {}", importUriDef, importTemplate); + if (!aFile) { + log.error("ImportsLoader - _loadImportTemplate - Error!! Expected a file. importUriDef = {}, importTemplate = {}", importUriDef, importTemplate); ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE196", String.format( - "ImportError: Import \"%s\" is not a file",importName))); - al[0] = al[1] = null; - return al; + "ImportError: Import \"%s\" is not a file", importName))); + al[0] = al[1] = null; + return al; } try (BufferedReader br = new BufferedReader(new FileReader(importTemplate));) { - al[0] = importTemplate; + al[0] = importTemplate; - Yaml yaml = new Yaml(); - al[1] = yaml.load(br); - return al; - } - catch(FileNotFoundException e) { + Yaml yaml = new Yaml(); + al[1] = yaml.load(br); + return al; + } catch (FileNotFoundException e) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE197", String.format( - "ImportError: Failed to load YAML from \"%s\"" + e,importName))); - al[0] = al[1] = null; - return al; - } - catch(Exception e) { + "ImportError: Failed to load YAML from \"%s\"" + e, importName))); + al[0] = al[1] = null; + return al; + } catch (Exception e) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE198", String.format( - "ImportError: Exception from SnakeYAML file = \"%s\"" + e,importName))); - al[0] = al[1] = null; - return al; + "ImportError: Exception from SnakeYAML file = \"%s\"" + e, importName))); + al[0] = al[1] = null; + return al; } } - - if(shortImportNotation) { + + if (shortImportNotation) { //log.error(_('Import "%(name)s" is not valid.') % import_uri_def) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE199", String.format( - "ImportError: Import \"%s\" is not valid",importName))); - al[0] = al[1] = null; - return al; + "ImportError: Import \"%s\" is not valid", importName))); + al[0] = al[1] = null; + return al; } - + String fullUrl = ""; - String repoUrl = ""; - if(repository != null && !repository.isEmpty()) { - if(repositories != null) { - for(String repoName: repositories.keySet()) { - if(repoName.equals(repository)) { - Object repoDef = repositories.get(repoName); - if(repoDef instanceof String) { - repoUrl = (String)repoDef; - } - else if(repoDef instanceof LinkedHashMap) { - repoUrl = (String)((LinkedHashMap)repoDef).get("url"); - } - // Remove leading, ending spaces and strip - // the last character if "/" - repoUrl = repoUrl.trim(); - if(repoUrl.endsWith("/")) { - repoUrl = repoUrl.substring(0,repoUrl.length()-1); - } - fullUrl = repoUrl + "/" + fileName; - break; - } - } - } - if(fullUrl.isEmpty()) { + String repoUrl = ""; + if (repository != null && !repository.isEmpty()) { + if (repositories != null) { + for (String repoName : repositories.keySet()) { + if (repoName.equals(repository)) { + Object repoDef = repositories.get(repoName); + if (repoDef instanceof String) { + repoUrl = (String) repoDef; + } else if (repoDef instanceof LinkedHashMap) { + repoUrl = (String) ((LinkedHashMap) repoDef).get("url"); + } + // Remove leading, ending spaces and strip + // the last character if "/" + repoUrl = repoUrl.trim(); + if (repoUrl.endsWith("/")) { + repoUrl = repoUrl.substring(0, repoUrl.length() - 1); + } + fullUrl = repoUrl + "/" + fileName; + break; + } + } + } + if (fullUrl.isEmpty()) { String msg = String.format( - "referenced repository \"%s\" in import definition \"%s\" not found", - repository,importName); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE200", "ImportError: " + msg)); - al[0] = al[1] = null; - return al; - } - } - if(UrlUtils.validateUrl(fullUrl)) { - try (InputStream input = new URL(fullUrl).openStream();) { - al[0] = fullUrl; - Yaml yaml = new Yaml(); - al[1] = yaml.load(input); - return al; - } - catch(IOException e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE201", String.format( - "ImportError: Exception loading YAML import from \"%s\"",fullUrl))); - al[0] = al[1] = null; - return al; - } + "referenced repository \"%s\" in import definition \"%s\" not found", + repository, importName); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE200", "ImportError: " + msg)); + al[0] = al[1] = null; + return al; + } } - else { + if (UrlUtils.validateUrl(fullUrl)) { + try (InputStream input = new URL(fullUrl).openStream();) { + al[0] = fullUrl; + Yaml yaml = new Yaml(); + al[1] = yaml.load(input); + return al; + } catch (IOException e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE201", String.format( + "ImportError: Exception loading YAML import from \"%s\"", fullUrl))); + al[0] = al[1] = null; + return al; + } + } else { String msg = String.format( - "repository URL \"%s\" in import definition \"%s\" is not valid", - repoUrl,importName); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE202", "ImportError: " + msg)); + "repository URL \"%s\" in import definition \"%s\" is not valid", + repoUrl, importName); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE202", "ImportError: " + msg)); } - + // if we got here something is wrong with the flow... log.error("ImportsLoader - _loadImportTemplate - got to dead end (importName {})", importName); ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE203", String.format( - "ImportError: _loadImportTemplate got to dead end (importName %s)\n",importName))); + "ImportError: _loadImportTemplate got to dead end (importName %s)\n", importName))); al[0] = al[1] = null; return al; } - @Override - public String toString() { - return "ImportsLoader{" + - "IMPORTS_SECTION=" + Arrays.toString(IMPORTS_SECTION) + - ", importslist=" + importslist + - ", path='" + path + '\'' + - ", typeDefinitionList=" + typeDefinitionList + - ", customDefs=" + customDefs + - ", nestedToscaTpls=" + nestedToscaTpls + - ", repositories=" + repositories + - '}'; - } + @Override + public String toString() { + return "ImportsLoader{" + + "IMPORTS_SECTION=" + Arrays.toString(IMPORTS_SECTION) + + ", importslist=" + importslist + + ", path='" + path + '\'' + + ", typeDefinitionList=" + typeDefinitionList + + ", customDefs=" + customDefs + + ", nestedToscaTpls=" + nestedToscaTpls + + ", repositories=" + repositories + + '}'; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java index 6a2e9f6..4fabe38 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,532 +20,525 @@ package org.onap.sdc.toscaparser.api; -import static org.onap.sdc.toscaparser.api.elements.EntityType.TOSCA_DEF; - -import com.google.common.collect.Lists; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.EntityType; +import org.onap.sdc.toscaparser.api.elements.InterfacesDef; +import org.onap.sdc.toscaparser.api.elements.Metadata; +import org.onap.sdc.toscaparser.api.elements.NodeType; +import org.onap.sdc.toscaparser.api.elements.RelationshipType; +import org.onap.sdc.toscaparser.api.utils.CopyUtils; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import org.onap.sdc.toscaparser.api.elements.*; -import org.onap.sdc.toscaparser.api.utils.CopyUtils; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import static org.onap.sdc.toscaparser.api.elements.EntityType.TOSCA_DEF; public class NodeTemplate extends EntityTemplate { - - private LinkedHashMap templates; - private LinkedHashMap customDef; - private ArrayList availableRelTpls; - private LinkedHashMap availableRelTypes; - private LinkedHashMap related; - private ArrayList relationshipTpl; - private LinkedHashMap _relationships; - private SubstitutionMappings subMappingToscaTemplate; - private TopologyTemplate originComponentTemplate; - private Metadata metadata; - - private static final String METADATA = "metadata"; - - public NodeTemplate(String name, - LinkedHashMap ntnodeTemplates, - LinkedHashMap ntcustomDef, - ArrayList ntavailableRelTpls, - LinkedHashMap ntavailableRelTypes) { - this( name, ntnodeTemplates, ntcustomDef, ntavailableRelTpls, - ntavailableRelTypes, null); - } - - @SuppressWarnings("unchecked") - public NodeTemplate(String name, - LinkedHashMap ntnodeTemplates, - LinkedHashMap ntcustomDef, - ArrayList ntavailableRelTpls, - LinkedHashMap ntavailableRelTypes, - NodeTemplate parentNodeTemplate) { - - super(name, (LinkedHashMap)ntnodeTemplates.get(name), - "node_type", ntcustomDef, parentNodeTemplate); - - templates = ntnodeTemplates; - _validateFields((LinkedHashMap)templates.get(name)); - customDef = ntcustomDef; - related = new LinkedHashMap(); - relationshipTpl = new ArrayList(); - availableRelTpls = ntavailableRelTpls; - availableRelTypes = ntavailableRelTypes; - _relationships = new LinkedHashMap(); - subMappingToscaTemplate = null; - metadata = _metaData(); - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getRelationships() { - if(_relationships.isEmpty()) { - List requires = getRequirements().getAll(); - if(requires != null && requires instanceof List) { - for(RequirementAssignment r: requires) { - LinkedHashMap explicit = _getExplicitRelationship(r); - if(explicit != null) { - // _relationships.putAll(explicit)... - for(Map.Entry ee: explicit.entrySet()) { - _relationships.put(ee.getKey(), ee.getValue()); - } - } - } - } - } - return _relationships; - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _getExplicitRelationship(RequirementAssignment req) { + + private LinkedHashMap templates; + private LinkedHashMap customDef; + private ArrayList availableRelTpls; + private LinkedHashMap availableRelTypes; + private LinkedHashMap related; + private ArrayList relationshipTpl; + private LinkedHashMap _relationships; + private SubstitutionMappings subMappingToscaTemplate; + private TopologyTemplate originComponentTemplate; + private Metadata metadata; + + private static final String METADATA = "metadata"; + + public NodeTemplate(String name, + LinkedHashMap ntnodeTemplates, + LinkedHashMap ntcustomDef, + ArrayList ntavailableRelTpls, + LinkedHashMap ntavailableRelTypes) { + this(name, ntnodeTemplates, ntcustomDef, ntavailableRelTpls, + ntavailableRelTypes, null); + } + + @SuppressWarnings("unchecked") + public NodeTemplate(String name, + LinkedHashMap ntnodeTemplates, + LinkedHashMap ntcustomDef, + ArrayList ntavailableRelTpls, + LinkedHashMap ntavailableRelTypes, + NodeTemplate parentNodeTemplate) { + + super(name, (LinkedHashMap) ntnodeTemplates.get(name), + "node_type", ntcustomDef, parentNodeTemplate); + + templates = ntnodeTemplates; + _validateFields((LinkedHashMap) templates.get(name)); + customDef = ntcustomDef; + related = new LinkedHashMap(); + relationshipTpl = new ArrayList(); + availableRelTpls = ntavailableRelTpls; + availableRelTypes = ntavailableRelTypes; + _relationships = new LinkedHashMap(); + subMappingToscaTemplate = null; + metadata = _metaData(); + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getRelationships() { + if (_relationships.isEmpty()) { + List requires = getRequirements().getAll(); + if (requires != null && requires instanceof List) { + for (RequirementAssignment r : requires) { + LinkedHashMap explicit = _getExplicitRelationship(r); + if (explicit != null) { + // _relationships.putAll(explicit)... + for (Map.Entry ee : explicit.entrySet()) { + _relationships.put(ee.getKey(), ee.getValue()); + } + } + } + } + } + return _relationships; + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _getExplicitRelationship(RequirementAssignment req) { // Handle explicit relationship // For example, // - req: // node: DBMS // relationship: tosca.relationships.HostedOn - - LinkedHashMap explicitRelation = new LinkedHashMap(); - String node = req.getNodeTemplateName(); - - if(node != null && !node.isEmpty()) { + + LinkedHashMap explicitRelation = new LinkedHashMap(); + String node = req.getNodeTemplateName(); + + if (node != null && !node.isEmpty()) { //msg = _('Lookup by TOSCA types is not supported. ' // 'Requirement for "%s" can not be full-filled.') % self.name - boolean bFound = false; - for(String k: EntityType.TOSCA_DEF.keySet()) { - if(k.equals(node)) { - bFound = true; - break; - } - } - if(bFound || customDef.get(node) != null) { + boolean bFound = false; + for (String k : EntityType.TOSCA_DEF.keySet()) { + if (k.equals(node)) { + bFound = true; + break; + } + } + if (bFound || customDef.get(node) != null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE205", String.format( - "NotImplementedError: Lookup by TOSCA types is not supported. Requirement for \"%s\" can not be full-filled", - getName()))); + "NotImplementedError: Lookup by TOSCA types is not supported. Requirement for \"%s\" can not be full-filled", + getName()))); return null; - } - if(templates.get(node) == null) { + } + if (templates.get(node) == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE206", String.format( - "KeyError: Node template \"%s\" was not found",node))); - return null; - } - NodeTemplate relatedTpl = new NodeTemplate(node,templates,customDef,null,null); - Object relationship = req.getRelationship(); - String relationshipString = null; + "KeyError: Node template \"%s\" was not found", node))); + return null; + } + NodeTemplate relatedTpl = new NodeTemplate(node, templates, customDef, null, null); + Object relationship = req.getRelationship(); + String relationshipString = null; // // here relationship can be a string or a LHM with 'type': - // check if its type has relationship defined - if(relationship == null) { - ArrayList parentReqs = ((NodeType)typeDefinition).getAllRequirements(); - if(parentReqs == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE207", "ValidationError: parent_req is null")); - } - else { + // check if its type has relationship defined + if (relationship == null) { + ArrayList parentReqs = ((NodeType) typeDefinition).getAllRequirements(); + if (parentReqs == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE207", "ValidationError: parent_req is null")); + } else { // for(String key: req.keySet()) { // boolean bFoundRel = false; - for(Object rdo: parentReqs) { - LinkedHashMap reqDict = (LinkedHashMap)rdo; - LinkedHashMap relDict = (LinkedHashMap)reqDict.get(req.getName()); - if(relDict != null) { - relationship = relDict.get("relationship"); - //BUG-python??? need to break twice? + for (Object rdo : parentReqs) { + LinkedHashMap reqDict = (LinkedHashMap) rdo; + LinkedHashMap relDict = (LinkedHashMap) reqDict.get(req.getName()); + if (relDict != null) { + relationship = relDict.get("relationship"); + //BUG-python??? need to break twice? // bFoundRel = true; - break; - } - } + break; + } + } // if(bFoundRel) { // break; // } // } - } - } - - if(relationship != null) { - // here relationship can be a string or a LHM with 'type': - if(relationship instanceof String) { - relationshipString = (String)relationship; - } - else if(relationship instanceof LinkedHashMap) { - relationshipString = (String)((LinkedHashMap)relationship).get("type"); - } - - boolean foundRelationshipTpl = false; - // apply available relationship templates if found - if(availableRelTpls != null) { - for(RelationshipTemplate tpl: availableRelTpls) { - if(tpl.getName().equals(relationshipString)) { - RelationshipType rtype = new RelationshipType(tpl.getType(),null,customDef); - explicitRelation.put(rtype, relatedTpl); - tpl.setTarget(relatedTpl); - tpl.setSource(this); - relationshipTpl.add(tpl); - foundRelationshipTpl = true; - } - } - } - // create relationship template object. - String relPrfx = EntityType.RELATIONSHIP_PREFIX; - if(!foundRelationshipTpl) { - if(relationship instanceof LinkedHashMap) { - relationshipString = (String)((LinkedHashMap)relationship).get("type"); - if(relationshipString != null) { - if(availableRelTypes != null && !availableRelTypes.isEmpty() && - availableRelTypes.get(relationshipString) != null) { - ; - } - else if(!(relationshipString).startsWith(relPrfx)) { - relationshipString = relPrfx + relationshipString; - } - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE208", String.format( - "MissingRequiredFieldError: \"relationship\" used in template \"%s\" is missing required field \"type\"", - relatedTpl.getName()))); - } - } - for(RelationshipType rtype: ((NodeType)typeDefinition).getRelationship().keySet()) { - if(rtype.getType().equals(relationshipString)) { - explicitRelation.put(rtype,relatedTpl); - relatedTpl._addRelationshipTemplate(req,rtype.getType(),this); - } - else if(availableRelTypes != null && !availableRelTypes.isEmpty()) { - LinkedHashMap relTypeDef = (LinkedHashMap)availableRelTypes.get(relationshipString); - if(relTypeDef != null) { - String superType = (String)relTypeDef.get("derived_from"); - if(superType != null) { - if(!superType.startsWith(relPrfx)) { - superType = relPrfx + superType; - } - if(rtype.getType().equals(superType)) { - explicitRelation.put(rtype,relatedTpl); - relatedTpl._addRelationshipTemplate(req,rtype.getType(),this); - } - } - } - } - } - } - } - } - return explicitRelation; - } - - @SuppressWarnings("unchecked") - private void _addRelationshipTemplate(RequirementAssignment requirement, String rtype, NodeTemplate source) { - LinkedHashMap req = new LinkedHashMap<>(); - req.put("relationship", CopyUtils.copyLhmOrAl(requirement.getRelationship())); - req.put("type",rtype); - RelationshipTemplate tpl = new RelationshipTemplate(req, rtype, customDef, this, source, getParentNodeTemplate()); - relationshipTpl.add(tpl); - } - - public ArrayList getRelationshipTemplate() { - return relationshipTpl; - } - - void _addNext(NodeTemplate nodetpl,RelationshipType relationship) { - related.put(nodetpl,relationship); - } - - public ArrayList getRelatedNodes() { - if(related.isEmpty()) { - for(Map.Entry me: ((NodeType)typeDefinition).getRelationship().entrySet()) { - RelationshipType relation = me.getKey(); - NodeType node = me.getValue(); - for(String tpl: templates.keySet()) { - if(tpl.equals(node.getType())) { - //BUG.. python has - // self.related[NodeTemplate(tpl)] = relation - // but NodeTemplate doesn't have a constructor with just name... - //???? - related.put(new NodeTemplate(tpl,null,null,null,null),relation); - } - } - } - } - return new ArrayList(related.keySet()); - } - - public void validate(/*tosca_tpl=none is not used...*/) { + } + } + + if (relationship != null) { + // here relationship can be a string or a LHM with 'type': + if (relationship instanceof String) { + relationshipString = (String) relationship; + } else if (relationship instanceof LinkedHashMap) { + relationshipString = (String) ((LinkedHashMap) relationship).get("type"); + } + + boolean foundRelationshipTpl = false; + // apply available relationship templates if found + if (availableRelTpls != null) { + for (RelationshipTemplate tpl : availableRelTpls) { + if (tpl.getName().equals(relationshipString)) { + RelationshipType rtype = new RelationshipType(tpl.getType(), null, customDef); + explicitRelation.put(rtype, relatedTpl); + tpl.setTarget(relatedTpl); + tpl.setSource(this); + relationshipTpl.add(tpl); + foundRelationshipTpl = true; + } + } + } + // create relationship template object. + String relPrfx = EntityType.RELATIONSHIP_PREFIX; + if (!foundRelationshipTpl) { + if (relationship instanceof LinkedHashMap) { + relationshipString = (String) ((LinkedHashMap) relationship).get("type"); + if (relationshipString != null) { + if (availableRelTypes != null && !availableRelTypes.isEmpty() && + availableRelTypes.get(relationshipString) != null) { + ; + } else if (!(relationshipString).startsWith(relPrfx)) { + relationshipString = relPrfx + relationshipString; + } + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE208", String.format( + "MissingRequiredFieldError: \"relationship\" used in template \"%s\" is missing required field \"type\"", + relatedTpl.getName()))); + } + } + for (RelationshipType rtype : ((NodeType) typeDefinition).getRelationship().keySet()) { + if (rtype.getType().equals(relationshipString)) { + explicitRelation.put(rtype, relatedTpl); + relatedTpl._addRelationshipTemplate(req, rtype.getType(), this); + } else if (availableRelTypes != null && !availableRelTypes.isEmpty()) { + LinkedHashMap relTypeDef = (LinkedHashMap) availableRelTypes.get(relationshipString); + if (relTypeDef != null) { + String superType = (String) relTypeDef.get("derived_from"); + if (superType != null) { + if (!superType.startsWith(relPrfx)) { + superType = relPrfx + superType; + } + if (rtype.getType().equals(superType)) { + explicitRelation.put(rtype, relatedTpl); + relatedTpl._addRelationshipTemplate(req, rtype.getType(), this); + } + } + } + } + } + } + } + } + return explicitRelation; + } + + @SuppressWarnings("unchecked") + private void _addRelationshipTemplate(RequirementAssignment requirement, String rtype, NodeTemplate source) { + LinkedHashMap req = new LinkedHashMap<>(); + req.put("relationship", CopyUtils.copyLhmOrAl(requirement.getRelationship())); + req.put("type", rtype); + RelationshipTemplate tpl = new RelationshipTemplate(req, rtype, customDef, this, source, getParentNodeTemplate()); + relationshipTpl.add(tpl); + } + + public ArrayList getRelationshipTemplate() { + return relationshipTpl; + } + + void _addNext(NodeTemplate nodetpl, RelationshipType relationship) { + related.put(nodetpl, relationship); + } + + public ArrayList getRelatedNodes() { + if (related.isEmpty()) { + for (Map.Entry me : ((NodeType) typeDefinition).getRelationship().entrySet()) { + RelationshipType relation = me.getKey(); + NodeType node = me.getValue(); + for (String tpl : templates.keySet()) { + if (tpl.equals(node.getType())) { + //BUG.. python has + // self.related[NodeTemplate(tpl)] = relation + // but NodeTemplate doesn't have a constructor with just name... + //???? + related.put(new NodeTemplate(tpl, null, null, null, null), relation); + } + } + } + } + return new ArrayList(related.keySet()); + } + + public void validate(/*tosca_tpl=none is not used...*/) { _validateCapabilities(); _validateRequirements(); - _validateProperties(entityTpl,(NodeType)typeDefinition); + _validateProperties(entityTpl, (NodeType) typeDefinition); _validateInterfaces(); - for(Property prop: getPropertiesObjects()) { - prop.validate(); + for (Property prop : getPropertiesObjects()) { + prop.validate(); } - } + } - public Object getPropertyValueFromTemplatesByName(String propertyName) { - LinkedHashMap nodeObject = (LinkedHashMap) templates.get(name); + public Object getPropertyValueFromTemplatesByName(String propertyName) { + LinkedHashMap nodeObject = (LinkedHashMap) templates.get(name); if (nodeObject != null) { - LinkedHashMap properties = (LinkedHashMap)nodeObject.get(PROPERTIES); + LinkedHashMap properties = (LinkedHashMap) nodeObject.get(PROPERTIES); if (properties != null) { return properties.get(propertyName); } } - return null; - } - - private Metadata _metaData() { - if(entityTpl.get(METADATA) != null) { - return new Metadata((Map)entityTpl.get(METADATA)); - } - else { - return null; - } - } - - @SuppressWarnings("unchecked") - private void _validateRequirements() { - ArrayList typeRequires = ((NodeType)typeDefinition).getAllRequirements(); - ArrayList allowedReqs = new ArrayList<>(); - allowedReqs.add("template"); - if(typeRequires != null) { - for(Object to: typeRequires) { - LinkedHashMap treq = (LinkedHashMap)to; - for(Map.Entry me: treq.entrySet()) { - String key = me.getKey(); - Object value = me.getValue(); - allowedReqs.add(key); - if(value instanceof LinkedHashMap) { - allowedReqs.addAll(((LinkedHashMap)value).keySet()); - } - } - - } - } - - ArrayList requires = (ArrayList)((NodeType)typeDefinition).getValue(REQUIREMENTS, entityTpl, false); - if(requires != null) { - if(!(requires instanceof ArrayList)) { + return null; + } + + private Metadata _metaData() { + if (entityTpl.get(METADATA) != null) { + return new Metadata((Map) entityTpl.get(METADATA)); + } else { + return null; + } + } + + @SuppressWarnings("unchecked") + private void _validateRequirements() { + ArrayList typeRequires = ((NodeType) typeDefinition).getAllRequirements(); + ArrayList allowedReqs = new ArrayList<>(); + allowedReqs.add("template"); + if (typeRequires != null) { + for (Object to : typeRequires) { + LinkedHashMap treq = (LinkedHashMap) to; + for (Map.Entry me : treq.entrySet()) { + String key = me.getKey(); + Object value = me.getValue(); + allowedReqs.add(key); + if (value instanceof LinkedHashMap) { + allowedReqs.addAll(((LinkedHashMap) value).keySet()); + } + } + + } + } + + ArrayList requires = (ArrayList) ((NodeType) typeDefinition).getValue(REQUIREMENTS, entityTpl, false); + if (requires != null) { + if (!(requires instanceof ArrayList)) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE209", String.format( - "TypeMismatchError: \"requirements\" of template \"%s\" are not of type \"list\"",name))); - } - else { - for(Object ro: requires) { - LinkedHashMap req = (LinkedHashMap)ro; - for(Map.Entry me: req.entrySet()) { - String rl = me.getKey(); - Object vo = me.getValue(); - if(vo instanceof LinkedHashMap) { - LinkedHashMap value = (LinkedHashMap)vo; - _validateRequirementsKeys(value); - _validateRequirementsProperties(value); - allowedReqs.add(rl); - } - } - _commonValidateField(req,allowedReqs,"requirements"); + "TypeMismatchError: \"requirements\" of template \"%s\" are not of type \"list\"", name))); + } else { + for (Object ro : requires) { + LinkedHashMap req = (LinkedHashMap) ro; + for (Map.Entry me : req.entrySet()) { + String rl = me.getKey(); + Object vo = me.getValue(); + if (vo instanceof LinkedHashMap) { + LinkedHashMap value = (LinkedHashMap) vo; + _validateRequirementsKeys(value); + _validateRequirementsProperties(value); + allowedReqs.add(rl); + } + } + _commonValidateField(req, allowedReqs, "requirements"); } - } - } - } + } + } + } - @SuppressWarnings("unchecked") - private void _validateRequirementsProperties(LinkedHashMap reqs) { + @SuppressWarnings("unchecked") + private void _validateRequirementsProperties(LinkedHashMap reqs) { // TO-DO(anyone): Only occurrences property of the requirements is // validated here. Validation of other requirement properties are being // validated in different files. Better to keep all the requirements // properties validation here. - for(Map.Entry me: reqs.entrySet()) { - if(me.getKey().equals("occurrences")) { - ArrayList val = (ArrayList)me.getValue(); - _validateOccurrences(val); - } - - } - } - - private void _validateOccurrences(ArrayList occurrences) { - DataEntity.validateDatatype("list",occurrences,null,null,null); - for(Object val: occurrences) { - DataEntity.validateDatatype("Integer",val,null,null,null); + for (Map.Entry me : reqs.entrySet()) { + if (me.getKey().equals("occurrences")) { + ArrayList val = (ArrayList) me.getValue(); + _validateOccurrences(val); + } + + } + } + + private void _validateOccurrences(ArrayList occurrences) { + DataEntity.validateDatatype("list", occurrences, null, null, null); + for (Object val : occurrences) { + DataEntity.validateDatatype("Integer", val, null, null, null); } - if(occurrences.size() != 2 || - !(0 <= (int)occurrences.get(0) && (int)occurrences.get(0) <= (int)occurrences.get(1)) || - (int)occurrences.get(1) == 0) { + if (occurrences.size() != 2 || + !(0 <= (int) occurrences.get(0) && (int) occurrences.get(0) <= (int) occurrences.get(1)) || + (int) occurrences.get(1) == 0) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE210", String.format( - "InvalidPropertyValueError: property has invalid value %s",occurrences.toString()))); + "InvalidPropertyValueError: property has invalid value %s", occurrences.toString()))); } - } - - private void _validateRequirementsKeys(LinkedHashMap reqs) { - for(String key: reqs.keySet()) { - boolean bFound = false; - for(int i=0; i< REQUIREMENTS_SECTION.length; i++) { - if(key.equals(REQUIREMENTS_SECTION[i])) { - bFound = true; - break; - } - } - if(!bFound) { + } + + private void _validateRequirementsKeys(LinkedHashMap reqs) { + for (String key : reqs.keySet()) { + boolean bFound = false; + for (int i = 0; i < REQUIREMENTS_SECTION.length; i++) { + if (key.equals(REQUIREMENTS_SECTION[i])) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE211", String.format( - "UnknownFieldError: \"requirements\" of template \"%s\" contains unknown field \"%s\"",name,key))); - } - } - } - - @SuppressWarnings("unchecked") - private void _validateInterfaces() { - LinkedHashMap ifaces = (LinkedHashMap) - ((NodeType)typeDefinition).getValue(INTERFACES, entityTpl, false); - if(ifaces != null) { - for(Map.Entry me: ifaces.entrySet()) { - String iname = me.getKey(); - LinkedHashMap value = (LinkedHashMap)me.getValue(); - if(iname.equals(InterfacesDef.LIFECYCLE) || iname.equals(InterfacesDef.LIFECYCLE_SHORTNAME)) { - // maybe we should convert [] to arraylist??? - ArrayList inlo = new ArrayList<>(); - for(int i=0; i ifaces = (LinkedHashMap) + ((NodeType) typeDefinition).getValue(INTERFACES, entityTpl, false); + if (ifaces != null) { + for (Map.Entry me : ifaces.entrySet()) { + String iname = me.getKey(); + LinkedHashMap value = (LinkedHashMap) me.getValue(); + if (iname.equals(InterfacesDef.LIFECYCLE) || iname.equals(InterfacesDef.LIFECYCLE_SHORTNAME)) { + // maybe we should convert [] to arraylist??? + ArrayList inlo = new ArrayList<>(); + for (int i = 0; i < InterfacesDef.INTERFACE_NODE_LIFECYCLE_OPERATIONS.length; i++) { + inlo.add(InterfacesDef.INTERFACE_NODE_LIFECYCLE_OPERATIONS[i]); + } + _commonValidateField(value, inlo, "interfaces"); + } else if (iname.equals(InterfacesDef.CONFIGURE) || iname.equals(InterfacesDef.CONFIGURE_SHORTNAME)) { + // maybe we should convert [] to arraylist??? + ArrayList irco = new ArrayList<>(); + for (int i = 0; i < InterfacesDef.INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS.length; i++) { + irco.add(InterfacesDef.INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS[i]); + } + _commonValidateField(value, irco, "interfaces"); + } else if (((NodeType) typeDefinition).getInterfaces().keySet().contains(iname)) { + _commonValidateField(value, _collectCustomIfaceOperations(iname), "interfaces"); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE212", String.format( + "UnknownFieldError: \"interfaces\" of template \"%s\" contains unknown field %s", name, iname))); + } + } + } + } + + @SuppressWarnings("unchecked") + private ArrayList _collectCustomIfaceOperations(String iname) { + ArrayList allowedOperations = new ArrayList<>(); + LinkedHashMap nodetypeIfaceDef = (LinkedHashMap) ((NodeType) + typeDefinition).getInterfaces().get(iname); + allowedOperations.addAll(nodetypeIfaceDef.keySet()); + String ifaceType = (String) nodetypeIfaceDef.get("type"); + if (ifaceType != null) { + LinkedHashMap ifaceTypeDef = null; + if (((NodeType) typeDefinition).customDef != null) { + ifaceTypeDef = (LinkedHashMap) ((NodeType) typeDefinition).customDef.get(ifaceType); + } + if (ifaceTypeDef == null) { + ifaceTypeDef = (LinkedHashMap) EntityType.TOSCA_DEF.get(ifaceType); + } + allowedOperations.addAll(ifaceTypeDef.keySet()); + } + // maybe we should convert [] to arraylist??? + ArrayList idrw = new ArrayList<>(); + for (int i = 0; i < InterfacesDef.INTERFACE_DEF_RESERVED_WORDS.length; i++) { + idrw.add(InterfacesDef.INTERFACE_DEF_RESERVED_WORDS[i]); + } + allowedOperations.removeAll(idrw); + return allowedOperations; + } + + /** + * Get all interface details for given node template.
+ * + * @return Map that contains the list of all interfaces and their definitions. + * If none found, an empty map will be returned. + */ + public Map> getAllInterfaceDetailsForNodeType() { + Map> interfaceMap = new LinkedHashMap<>(); + + // Get custom interface details + Map customInterfacesDetails = ((NodeType) typeDefinition).getInterfaces(); + // Get native interface details from tosca definitions + Object nativeInterfaceDetails = TOSCA_DEF.get(InterfacesDef.LIFECYCLE); + Map allInterfaceDetails = new LinkedHashMap<>(); + allInterfaceDetails.putAll(customInterfacesDetails); + if (nativeInterfaceDetails != null) { + allInterfaceDetails.put(InterfacesDef.LIFECYCLE, nativeInterfaceDetails); + } + + // Process all interface details from combined collection and return an interface Map with + // interface names and their definitions + for (Map.Entry me : allInterfaceDetails.entrySet()) { + ArrayList interfaces = new ArrayList<>(); + String interfaceType = me.getKey(); + Map interfaceValue = (Map) me.getValue(); + if (interfaceValue.containsKey("type")) { + interfaceType = (String) interfaceValue.get("type"); + } + + for (Map.Entry ve : interfaceValue.entrySet()) { + // Filter type as this is a reserved key and not an operation + if (!ve.getKey().equals("type")) { + InterfacesDef iface = new InterfacesDef(typeDefinition, interfaceType, this, ve.getKey(), ve.getValue()); + interfaces.add(iface); } - else if(iname.equals(InterfacesDef.CONFIGURE) || iname.equals(InterfacesDef.CONFIGURE_SHORTNAME)) { - // maybe we should convert [] to arraylist??? - ArrayList irco = new ArrayList<>(); - for(int i=0; i nodetemplate) { + for (String ntname : nodetemplate.keySet()) { + boolean bFound = false; + for (int i = 0; i < SECTIONS.length; i++) { + if (ntname.equals(SECTIONS[i])) { + bFound = true; + break; } - else if(((NodeType)typeDefinition).getInterfaces().keySet().contains(iname)) { - _commonValidateField(value,_collectCustomIfaceOperations(iname),"interfaces"); - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE212", String.format( - "UnknownFieldError: \"interfaces\" of template \"%s\" contains unknown field %s",name,iname))); - } - } - } - } - - @SuppressWarnings("unchecked") - private ArrayList _collectCustomIfaceOperations(String iname) { - ArrayList allowedOperations = new ArrayList<>(); - LinkedHashMap nodetypeIfaceDef = (LinkedHashMap)((NodeType) - typeDefinition).getInterfaces().get(iname); - allowedOperations.addAll(nodetypeIfaceDef.keySet()); - String ifaceType = (String)nodetypeIfaceDef.get("type"); - if(ifaceType != null) { - LinkedHashMap ifaceTypeDef = null; - if(((NodeType)typeDefinition).customDef != null) { - ifaceTypeDef = (LinkedHashMap)((NodeType)typeDefinition).customDef.get(ifaceType); - } - if(ifaceTypeDef == null) { - ifaceTypeDef = (LinkedHashMap)EntityType.TOSCA_DEF.get(ifaceType); - } - allowedOperations.addAll(ifaceTypeDef.keySet()); - } - // maybe we should convert [] to arraylist??? - ArrayList idrw = new ArrayList<>(); - for(int i=0; i - * @return Map that contains the list of all interfaces and their definitions. - * If none found, an empty map will be returned. - */ - public Map> getAllInterfaceDetailsForNodeType(){ - Map> interfaceMap = new LinkedHashMap<>(); - - // Get custom interface details - Map customInterfacesDetails = ((NodeType)typeDefinition).getInterfaces(); - // Get native interface details from tosca definitions - Object nativeInterfaceDetails = TOSCA_DEF.get(InterfacesDef.LIFECYCLE); - Map allInterfaceDetails = new LinkedHashMap<>(); - allInterfaceDetails.putAll(customInterfacesDetails); - if (nativeInterfaceDetails != null){ - allInterfaceDetails.put(InterfacesDef.LIFECYCLE, nativeInterfaceDetails); - } - - // Process all interface details from combined collection and return an interface Map with - // interface names and their definitions - for(Map.Entry me: allInterfaceDetails.entrySet()) { - ArrayList interfaces = new ArrayList<>(); - String interfaceType = me.getKey(); - Map interfaceValue = (Map)me.getValue(); - if(interfaceValue.containsKey("type")){ - interfaceType = (String) interfaceValue.get("type"); - } - - for(Map.Entry ve: interfaceValue.entrySet()) { - // Filter type as this is a reserved key and not an operation - if(!ve.getKey().equals("type")){ - InterfacesDef iface = new InterfacesDef(typeDefinition, interfaceType,this, ve.getKey(), ve.getValue()); - interfaces.add(iface); - } - } - interfaceMap.put(interfaceType, interfaces); - } - return interfaceMap; - } - - private void _validateFields(LinkedHashMap nodetemplate) { - for(String ntname: nodetemplate.keySet()) { - boolean bFound = false; - for(int i=0; i< SECTIONS.length; i++) { - if(ntname.equals(SECTIONS[i])) { - bFound = true; - break; - } - } - if(!bFound) { - for(int i=0; i< SPECIAL_SECTIONS.length; i++) { - if(ntname.equals(SPECIAL_SECTIONS[i])) { - bFound = true; - break; - } - } - - } - if(!bFound) { + } + if (!bFound) { + for (int i = 0; i < SPECIAL_SECTIONS.length; i++) { + if (ntname.equals(SPECIAL_SECTIONS[i])) { + bFound = true; + break; + } + } + + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE213", String.format( - "UnknownFieldError: Node template \"%s\" has unknown field \"%s\"",name,ntname))); - } - } - } - - // getter/setter - - // multilevel nesting - public SubstitutionMappings getSubMappingToscaTemplate() { - return subMappingToscaTemplate; - } - - public void setSubMappingToscaTemplate(SubstitutionMappings sm) { - subMappingToscaTemplate = sm; - } - - public Metadata getMetaData() { - return metadata; - } - - public void setMetaData(Metadata metadata) { - this.metadata = metadata; - } - - @Override - public String toString() { - return getName(); - } - - public TopologyTemplate getOriginComponentTemplate() { - return originComponentTemplate; - } - - public void setOriginComponentTemplate(TopologyTemplate originComponentTemplate) { - this.originComponentTemplate = originComponentTemplate; - } + "UnknownFieldError: Node template \"%s\" has unknown field \"%s\"", name, ntname))); + } + } + } + + // getter/setter + + // multilevel nesting + public SubstitutionMappings getSubMappingToscaTemplate() { + return subMappingToscaTemplate; + } + + public void setSubMappingToscaTemplate(SubstitutionMappings sm) { + subMappingToscaTemplate = sm; + } + + public Metadata getMetaData() { + return metadata; + } + + public void setMetaData(Metadata metadata) { + this.metadata = metadata; + } + + @Override + public String toString() { + return getName(); + } + + public TopologyTemplate getOriginComponentTemplate() { + return originComponentTemplate; + } + + public void setOriginComponentTemplate(TopologyTemplate originComponentTemplate) { + this.originComponentTemplate = originComponentTemplate; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java index 392a528..ca8ac55 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -31,138 +31,138 @@ import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.onap.sdc.toscaparser.api.utils.ValidateUtils; public class Policy extends EntityTemplate { - - - static final String TYPE = "type"; - static final String METADATA = "metadata"; - static final String DESCRIPTION = "description"; - static final String PROPERTIES = "properties"; - static final String TARGETS = "targets"; - private static final String TRIGGERS = "triggers"; - private static final String SECTIONS[] = { - TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS}; - - Metadata metaDataObject; - LinkedHashMap metaData = null; - ArrayList targetsList; // *** a list of NodeTemplate OR a list of Group *** - String targetsType; - ArrayList triggers; - LinkedHashMap properties; - - public Policy(String _name, - LinkedHashMap _policy, - ArrayList targetObjects, - String _targetsType, - LinkedHashMap _customDef) { - this(_name, _policy, targetObjects, _targetsType, _customDef, null); - } - - public Policy(String _name, - LinkedHashMap _policy, + + + static final String TYPE = "type"; + static final String METADATA = "metadata"; + static final String DESCRIPTION = "description"; + static final String PROPERTIES = "properties"; + static final String TARGETS = "targets"; + private static final String TRIGGERS = "triggers"; + private static final String SECTIONS[] = { + TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS}; + + Metadata metaDataObject; + LinkedHashMap metaData = null; + ArrayList targetsList; // *** a list of NodeTemplate OR a list of Group *** + String targetsType; + ArrayList triggers; + LinkedHashMap properties; + + public Policy(String _name, + LinkedHashMap _policy, + ArrayList targetObjects, + String _targetsType, + LinkedHashMap _customDef) { + this(_name, _policy, targetObjects, _targetsType, _customDef, null); + } + + public Policy(String _name, + LinkedHashMap _policy, // ArrayList targetObjects, - ArrayList targetObjects, - String _targetsType, - LinkedHashMap _customDef, NodeTemplate parentNodeTemplate) { - super(_name,_policy,"policy_type",_customDef, parentNodeTemplate); - - if(_policy.get(METADATA) != null) { - metaData = (LinkedHashMap)_policy.get(METADATA); - ValidateUtils.validateMap(metaData); - metaDataObject = new Metadata(metaData); + ArrayList targetObjects, + String _targetsType, + LinkedHashMap _customDef, NodeTemplate parentNodeTemplate) { + super(_name, _policy, "policy_type", _customDef, parentNodeTemplate); + + if (_policy.get(METADATA) != null) { + metaData = (LinkedHashMap) _policy.get(METADATA); + ValidateUtils.validateMap(metaData); + metaDataObject = new Metadata(metaData); } targetsList = targetObjects; targetsType = _targetsType; - triggers = _triggers((LinkedHashMap)_policy.get(TRIGGERS)); + triggers = _triggers((LinkedHashMap) _policy.get(TRIGGERS)); properties = null; - if(_policy.get("properties") != null) { - properties = (LinkedHashMap)_policy.get("properties"); + if (_policy.get("properties") != null) { + properties = (LinkedHashMap) _policy.get("properties"); } _validateKeys(); - } - - public ArrayList getTargets() { - return (ArrayList)entityTpl.get("targets"); - } - - public ArrayList getDescription() { - return (ArrayList)entityTpl.get("description"); - } - - public ArrayList getmetadata() { - return (ArrayList)entityTpl.get("metadata"); - } - - public String getTargetsType() { - return targetsType; - } - - public Metadata getMetaDataObj() { - return metaDataObject; - } - - public LinkedHashMap getMetaData() { - return metaData; - } - - // public ArrayList getTargetsList() { - public ArrayList getTargetsList() { - return targetsList; - } - - // entityTemplate already has a different getProperties... - // this is to access the local properties variable - public LinkedHashMap getPolicyProperties() { - return properties; - } - - private ArrayList _triggers(LinkedHashMap triggers) { - ArrayList triggerObjs = new ArrayList<>(); - if(triggers != null) { - for(Map.Entry me: triggers.entrySet()) { - String tname = me.getKey(); - LinkedHashMap ttriggerTpl = - (LinkedHashMap)me.getValue(); - Triggers triggersObj = new Triggers(tname,ttriggerTpl); + } + + public ArrayList getTargets() { + return (ArrayList) entityTpl.get("targets"); + } + + public ArrayList getDescription() { + return (ArrayList) entityTpl.get("description"); + } + + public ArrayList getmetadata() { + return (ArrayList) entityTpl.get("metadata"); + } + + public String getTargetsType() { + return targetsType; + } + + public Metadata getMetaDataObj() { + return metaDataObject; + } + + public LinkedHashMap getMetaData() { + return metaData; + } + + // public ArrayList getTargetsList() { + public ArrayList getTargetsList() { + return targetsList; + } + + // entityTemplate already has a different getProperties... + // this is to access the local properties variable + public LinkedHashMap getPolicyProperties() { + return properties; + } + + private ArrayList _triggers(LinkedHashMap triggers) { + ArrayList triggerObjs = new ArrayList<>(); + if (triggers != null) { + for (Map.Entry me : triggers.entrySet()) { + String tname = me.getKey(); + LinkedHashMap ttriggerTpl = + (LinkedHashMap) me.getValue(); + Triggers triggersObj = new Triggers(tname, ttriggerTpl); triggerObjs.add(triggersObj); - } - } - return triggerObjs; - } - - private void _validateKeys() { - for(String key: entityTpl.keySet()) { - boolean bFound = false; - for(int i=0; i customDef; - - public Property(Map.Entry propertyEntry){ + private static final Logger LOGGER = LoggerFactory.getLogger(Property.class.getName()); + + private static final String TYPE = "type"; + private static final String REQUIRED = "required"; + private static final String DESCRIPTION = "description"; + private static final String DEFAULT = "default"; + private static final String CONSTRAINTS = "constraints"; + private static String entrySchema = "entry_schema"; + private static String dataType = "datatypes"; + + private static final String[] PROPERTY_KEYS = { + TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS}; + + private static final String ENTRYTYPE = "type"; + private static final String ENTRYPROPERTIES = "properties"; + private static final String PATH_DELIMITER = "#"; + private static final String[] ENTRY_SCHEMA_KEYS = { + ENTRYTYPE, ENTRYPROPERTIES}; + + private String name; + private Object value; + private Schema schema; + private LinkedHashMap customDef; + + public Property(Map.Entry propertyEntry) { name = propertyEntry.getKey(); value = propertyEntry.getValue(); - } - public Property(String propname, - Object propvalue, - LinkedHashMap propschemaDict, - LinkedHashMap propcustomDef) { - + } + + public Property(String propname, + Object propvalue, + LinkedHashMap propschemaDict, + LinkedHashMap propcustomDef) { + name = propname; value = propvalue; customDef = propcustomDef; schema = new Schema(propname, propschemaDict); - } - - public String getType() { - return schema.getType(); - } - - public boolean isRequired() { - return schema.isRequired(); - } - - public String getDescription() { - return schema.getDescription(); - } - - public Object getDefault() { - return schema.getDefault(); - } - - public ArrayList getConstraints() { - return schema.getConstraints(); - } - - public LinkedHashMap getEntrySchema() { - return schema.getEntrySchema(); - } - - - public String getName() { - return name; - } - - public Object getValue() { - return value; - } - - // setter - public Object setValue(Object vob) { - value = vob; - return value; - } - - public void validate() { - // Validate if not a reference property - if(!Function.isFunction(value)) { - if(getType().equals(Schema.STRING)) { - value = value.toString(); - } - value = DataEntity.validateDatatype(getType(),value, - getEntrySchema(), - customDef, - name); - _validateConstraints(); - } - } - - private void _validateConstraints() { - if(getConstraints() != null) { - for(Constraint constraint: getConstraints()) { - constraint.validate(value); - } - } - } - - @Override - public String toString() { - return "Property{" + - "name='" + name + '\'' + - ", value=" + value + - ", schema=" + schema + - ", customDef=" + customDef + - '}'; - } + } + + public String getType() { + return schema.getType(); + } + + public boolean isRequired() { + return schema.isRequired(); + } + + public String getDescription() { + return schema.getDescription(); + } + + public Object getDefault() { + return schema.getDefault(); + } + + public ArrayList getConstraints() { + return schema.getConstraints(); + } + + public LinkedHashMap getEntrySchema() { + return schema.getEntrySchema(); + } + + + public String getName() { + return name; + } + + public Object getValue() { + return value; + } + + // setter + public Object setValue(Object vob) { + value = vob; + return value; + } + + public void validate() { + // Validate if not a reference property + if (!Function.isFunction(value)) { + if (getType().equals(Schema.STRING)) { + value = value.toString(); + } + value = DataEntity.validateDatatype(getType(), value, + getEntrySchema(), + customDef, + name); + validateConstraints(); + } + } + + private void validateConstraints() { + if (getConstraints() != null) { + for (Constraint constraint : getConstraints()) { + constraint.validate(value); + } + } + } + + @Override + public String toString() { + return "Property{" + + "name='" + name + '\'' + + ", value=" + value + + ", schema=" + schema + + ", customDef=" + customDef + + '}'; + } /** * Retrieves property value as list of strings if
* - the value is simple
* - the value is list of simple values
* - the provided path refers to a simple property inside a data type
- * @param propertyPath valid name of property for search.
- * If a name refers to a simple field inside a datatype, the property name should be defined with # delimiter.
* + * @param propertyPath valid name of property for search.
+ * If a name refers to a simple field inside a datatype, the property name should be defined with # delimiter.
* @return List of property values. If not found, empty list will be returned.
* If property value is a list either of simple fields or of simple fields inside a datatype, all values from the list should be returned */ public List getLeafPropertyValue(String propertyPath) { List propertyValueList = Collections.emptyList(); - if (logger.isDebugEnabled()) { - logger.debug("getLeafPropertyValue=> A new request: propertyPath: {}, value: {}", propertyPath, getValue()); - } - if (propertyPath == null || getValue() == null || - //if entry_schema disappears, it is datatype, - // otherwise it is map of simple types - should be ignored - isValueMapOfSimpleTypes()) { - logger.error("It is a wrong request - ignoring! propertyPath: {}, value: {}", propertyPath, getValue()); + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("getLeafPropertyValue=> A new request: propertyPath: {}, value: {}", propertyPath, getValue()); + } + if (propertyPath == null || getValue() == null + //if entry_schema disappears, it is datatype, + // otherwise it is map of simple types - should be ignored + || isValueMapOfSimpleTypes()) { + LOGGER.error("It is a wrong request - ignoring! propertyPath: {}, value: {}", propertyPath, getValue()); return propertyValueList; } String[] path = propertyPath.split(PATH_DELIMITER); @@ -176,16 +177,15 @@ public class Property { if (Schema.isRequestedTypeSimple(getPropertyTypeByPath(path))) { //the internal property type in the path is either simple or list of simple types if (isValueInsideDataType()) { - if (logger.isDebugEnabled()) { - logger.debug("The requested is an internal simple property inside of a data type"); - } + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("The requested is an internal simple property inside of a data type"); + } //requested value is an internal simple property inside of a data type propertyValueList = getSimplePropertyValueForComplexType(path); - } - else { - if (logger.isDebugEnabled()) { - logger.debug("The requested property has simple type or list of simple types"); - } + } else { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("The requested property has simple type or list of simple types"); + } //the requested property is simple type or list of simple types propertyValueList = getSimplePropertyValueForSimpleType(); } @@ -194,44 +194,43 @@ public class Property { } private boolean isValueMapOfSimpleTypes() { - if (getValue() instanceof Map && getEntrySchema() != null) { - logger.warn("This property value is a map of simple types"); - return true; - } - return false; - } + if (getValue() instanceof Map && getEntrySchema() != null) { + LOGGER.warn("This property value is a map of simple types"); + return true; + } + return false; + } private boolean isValueInsideDataType() { //value is either a list of values for data type //or data type return (Schema.LIST.equals(getType()) && isDataTypeInEntrySchema()) - || (getEntrySchema() == null && getType().contains(DATA_TYPE)); + || (getEntrySchema() == null && getType().contains(dataType)); } private Object getSimpleValueFromComplexObject(Object current, String[] path) { - if (current == null) { - return null; - } - int index = 0; - - if (path.length > index) { - for (int i = index; i < path.length; i++) { - if (current instanceof Map) { - current = ((Map) current).get(path[i]); - } else if (current instanceof List) { - current = ((List) current).get(0); - i--; - } - else { - return null; - } - } - } - if (current != null) { - return current; - } - return null; - } + if (current == null) { + return null; + } + int index = 0; + + if (path.length > index) { + for (int i = index; i < path.length; i++) { + if (current instanceof Map) { + current = ((Map) current).get(path[i]); + } else if (current instanceof List) { + current = ((List) current).get(0); + i--; + } else { + return null; + } + } + } + if (current != null) { + return current; + } + return null; + } private List getSimplePropertyValueForSimpleType() { if (getValue() instanceof List || getValue() instanceof Map) { @@ -240,32 +239,32 @@ public class Property { return Lists.newArrayList(String.valueOf(value)); } - private List getSimplePropertyValueForComplexType(String[] path) { - if (getValue() instanceof List ) { - return ((List) getValue()).stream() - .map(v -> { - if (path != null) { - return getSimpleValueFromComplexObject(v, path); - } else { - return v; - } - }) - //it might be null when get_input can't be resolved - // e.g.: - // - get_input has two parameters: 1. list and 2. index in this list - //and list has no value - // - neither value no default is defined for get_input - .filter(Objects::nonNull) - .map(String::valueOf) - .collect(Collectors.toList()); - } - //it is data type - List valueList = Lists.newArrayList(); - String valueString = String.valueOf(getSimpleValueFromComplexObject(getValue(), path)); - if (Objects.nonNull(valueString)) { - valueList.add(valueString); - } - return valueList; + private List getSimplePropertyValueForComplexType(String[] path) { + if (getValue() instanceof List) { + return ((List) getValue()).stream() + .map(v -> { + if (path != null) { + return getSimpleValueFromComplexObject(v, path); + } else { + return v; + } + }) + //it might be null when get_input can't be resolved + // e.g.: + // - get_input has two parameters: 1. list and 2. index in this list + //and list has no value + // - neither value no default is defined for get_input + .filter(Objects::nonNull) + .map(String::valueOf) + .collect(Collectors.toList()); + } + //it is data type + List valueList = Lists.newArrayList(); + String valueString = String.valueOf(getSimpleValueFromComplexObject(getValue(), path)); + if (Objects.nonNull(valueString)) { + valueList.add(valueString); + } + return valueList; } private String getPropertyTypeByPath(String[] path) { @@ -281,7 +280,7 @@ public class Property { String propertyType = getType(); if (Schema.LIST.equals(propertyType)) { //if it is list, return entry schema type - return (String)getEntrySchema().get(ENTRYTYPE); + return (String) getEntrySchema().get(ENTRYTYPE); } return propertyType; } @@ -297,7 +296,7 @@ public class Property { private String getInternalPropertyType(String dataTypeName, String[] path, int index) { if (path.length > index) { - LinkedHashMap complexProperty = (LinkedHashMap)customDef.get(dataTypeName); + LinkedHashMap complexProperty = (LinkedHashMap) customDef.get(dataTypeName); if (complexProperty != null) { LinkedHashMap dataTypeProperties = (LinkedHashMap) complexProperty.get(ENTRYPROPERTIES); return getPropertyTypeFromCustomDefDeeply(path, index, dataTypeProperties); @@ -308,7 +307,7 @@ public class Property { } private String getEntrySchemaType(LinkedHashMap property) { - LinkedHashMap entrySchema = (LinkedHashMap)property.get(ENTRY_SCHEMA); + LinkedHashMap entrySchema = (LinkedHashMap) property.get(Property.entrySchema); if (entrySchema != null) { return (String) entrySchema.get(TYPE); } @@ -320,7 +319,7 @@ public class Property { LinkedHashMap foundProperty = (LinkedHashMap) (properties).get(path[index]); if (foundProperty != null) { String propertyType = calculatePropertyType(foundProperty); - if (propertyType == null || index == path.length - 1){ + if (propertyType == null || index == path.length - 1) { return propertyType; } return getInternalPropertyType(propertyType, path, index + 1); @@ -330,8 +329,8 @@ public class Property { } private boolean isDataTypeInEntrySchema() { - String entrySchemaType = (String)getEntrySchema().get(ENTRYTYPE); - return entrySchemaType != null && entrySchemaType.contains(DATA_TYPE); + String entrySchemaType = (String) getEntrySchema().get(ENTRYTYPE); + return entrySchemaType != null && entrySchemaType.contains(dataType); } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java index 1b5d58a..d1a1383 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,134 +29,134 @@ import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; import org.onap.sdc.toscaparser.api.elements.EntityType; public class RelationshipTemplate extends EntityTemplate { - - private static final String DERIVED_FROM = "derived_from"; - private static final String PROPERTIES = "properties"; - private static final String REQUIREMENTS = "requirements"; - private static final String INTERFACES = "interfaces"; - private static final String CAPABILITIES = "capabilities"; - private static final String TYPE = "type"; - @SuppressWarnings("unused") - private static final String SECTIONS[] = { - DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES, CAPABILITIES, TYPE}; - - private String name; - private NodeTemplate target; - private NodeTemplate source; - private ArrayList _properties; - - public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, - String rtname, - LinkedHashMap rtcustomDef, - NodeTemplate rttarget, - NodeTemplate rtsource) { - this(rtrelationshipTemplate, rtname, rtcustomDef, rttarget, rtsource, null); - } - - public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, - String rtname, - LinkedHashMap rtcustomDef, - NodeTemplate rttarget, - NodeTemplate rtsource, NodeTemplate parentNodeTemplate) { - super(rtname,rtrelationshipTemplate,"relationship_type",rtcustomDef, parentNodeTemplate); - - name = rtname; - target = rttarget; - source = rtsource; - _properties = null; - } - - public ArrayList getPropertiesObjects() { - // Return properties objects for this template - if(_properties == null) { + + private static final String DERIVED_FROM = "derived_from"; + private static final String PROPERTIES = "properties"; + private static final String REQUIREMENTS = "requirements"; + private static final String INTERFACES = "interfaces"; + private static final String CAPABILITIES = "capabilities"; + private static final String TYPE = "type"; + @SuppressWarnings("unused") + private static final String SECTIONS[] = { + DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES, CAPABILITIES, TYPE}; + + private String name; + private NodeTemplate target; + private NodeTemplate source; + private ArrayList _properties; + + public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, + String rtname, + LinkedHashMap rtcustomDef, + NodeTemplate rttarget, + NodeTemplate rtsource) { + this(rtrelationshipTemplate, rtname, rtcustomDef, rttarget, rtsource, null); + } + + public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, + String rtname, + LinkedHashMap rtcustomDef, + NodeTemplate rttarget, + NodeTemplate rtsource, NodeTemplate parentNodeTemplate) { + super(rtname, rtrelationshipTemplate, "relationship_type", rtcustomDef, parentNodeTemplate); + + name = rtname; + target = rttarget; + source = rtsource; + _properties = null; + } + + public ArrayList getPropertiesObjects() { + // Return properties objects for this template + if (_properties == null) { _properties = _createRelationshipProperties(); - } + } return _properties; - } - - @SuppressWarnings({ "unchecked", "unused" }) - public ArrayList _createRelationshipProperties() { - ArrayList props = new ArrayList (); - LinkedHashMap properties = new LinkedHashMap(); - LinkedHashMap relationship = (LinkedHashMap)entityTpl.get("relationship"); - - if(relationship == null) { - for(Object val: entityTpl.values()) { - if(val instanceof LinkedHashMap) { - relationship = (LinkedHashMap)((LinkedHashMap)val).get("relationship"); - break; - } - } - } - - if(relationship != null) { - properties = (LinkedHashMap)((EntityType)typeDefinition).getValue(PROPERTIES,relationship,false); - } - if(properties == null) { - properties = new LinkedHashMap(); - } - if(properties == null) { - properties = (LinkedHashMap)entityTpl.get(PROPERTIES); - } - if(properties == null) { - properties = new LinkedHashMap(); - } - - if(properties != null) { - for(Map.Entry me: properties.entrySet()) { - String pname = me.getKey(); - Object pvalue = me.getValue(); - LinkedHashMap propsDef = ((StatefulEntityType)typeDefinition).getPropertiesDef(); - if(propsDef != null && propsDef.get(pname) != null) { - if(properties.get(pname) != null) { - pvalue = properties.get(name); - } - PropertyDef pd = (PropertyDef)propsDef.get(pname); - Property prop = new Property(pname,pvalue,pd.getSchema(),customDef); - props.add(prop); - } - } - } - ArrayList pds = ((StatefulEntityType)typeDefinition).getPropertiesDefObjects(); - for(PropertyDef p: pds) { - if(p.getDefault() != null && properties.get(p.getName()) == null) { - Property prop = new Property(p.getName(), (LinkedHashMap)p.getDefault(), p.getSchema(), customDef); + } + + @SuppressWarnings({"unchecked", "unused"}) + public ArrayList _createRelationshipProperties() { + ArrayList props = new ArrayList(); + LinkedHashMap properties = new LinkedHashMap(); + LinkedHashMap relationship = (LinkedHashMap) entityTpl.get("relationship"); + + if (relationship == null) { + for (Object val : entityTpl.values()) { + if (val instanceof LinkedHashMap) { + relationship = (LinkedHashMap) ((LinkedHashMap) val).get("relationship"); + break; + } + } + } + + if (relationship != null) { + properties = (LinkedHashMap) ((EntityType) typeDefinition).getValue(PROPERTIES, relationship, false); + } + if (properties == null) { + properties = new LinkedHashMap(); + } + if (properties == null) { + properties = (LinkedHashMap) entityTpl.get(PROPERTIES); + } + if (properties == null) { + properties = new LinkedHashMap(); + } + + if (properties != null) { + for (Map.Entry me : properties.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + LinkedHashMap propsDef = ((StatefulEntityType) typeDefinition).getPropertiesDef(); + if (propsDef != null && propsDef.get(pname) != null) { + if (properties.get(pname) != null) { + pvalue = properties.get(name); + } + PropertyDef pd = (PropertyDef) propsDef.get(pname); + Property prop = new Property(pname, pvalue, pd.getSchema(), customDef); + props.add(prop); + } + } + } + ArrayList pds = ((StatefulEntityType) typeDefinition).getPropertiesDefObjects(); + for (PropertyDef p : pds) { + if (p.getDefault() != null && properties.get(p.getName()) == null) { + Property prop = new Property(p.getName(), (LinkedHashMap) p.getDefault(), p.getSchema(), customDef); props.add(prop); - } - } + } + } return props; - } - + } + public void validate() { - _validateProperties(entityTpl,(StatefulEntityType)typeDefinition); + _validateProperties(entityTpl, (StatefulEntityType) typeDefinition); } - + // getters/setters public NodeTemplate getTarget() { - return target; + return target; } - + public NodeTemplate getSource() { - return source; + return source; } - + public void setSource(NodeTemplate nt) { - source = nt; + source = nt; } - + public void setTarget(NodeTemplate nt) { - target = nt; + target = nt; } - @Override - public String toString() { - return "RelationshipTemplate{" + - "name='" + name + '\'' + - ", target=" + target.getName() + - ", source=" + source.getName() + - ", _properties=" + _properties + - '}'; - } + @Override + public String toString() { + return "RelationshipTemplate{" + + "name='" + name + '\'' + + ", target=" + target.getName() + + ", source=" + source.getName() + + ", _properties=" + _properties + + '}'; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Repository.java b/src/main/java/org/onap/sdc/toscaparser/api/Repository.java index 2fff7f6..ee5e5bc 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Repository.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Repository.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,69 +27,69 @@ import org.onap.sdc.toscaparser.api.utils.UrlUtils; import java.util.LinkedHashMap; public class Repository { - - private static final String DESCRIPTION = "description"; - private static final String URL = "url"; - private static final String CREDENTIAL = "credential"; - private static final String SECTIONS[] ={DESCRIPTION, URL, CREDENTIAL}; - - private String name; - private Object reposit; - private String url; - - @SuppressWarnings("unchecked") - public Repository(String repName,Object repValue) { - name = repName; - reposit = repValue; - if(reposit instanceof LinkedHashMap) { - url = (String)((LinkedHashMap)reposit).get("url"); - if(url == null) { + + private static final String DESCRIPTION = "description"; + private static final String URL = "url"; + private static final String CREDENTIAL = "credential"; + private static final String SECTIONS[] = {DESCRIPTION, URL, CREDENTIAL}; + + private String name; + private Object reposit; + private String url; + + @SuppressWarnings("unchecked") + public Repository(String repName, Object repValue) { + name = repName; + reposit = repValue; + if (reposit instanceof LinkedHashMap) { + url = (String) ((LinkedHashMap) reposit).get("url"); + if (url == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE229", String.format( - "MissingRequiredFieldError: Repository \"%s\" is missing required field \"url\"", - name))); + "MissingRequiredFieldError: Repository \"%s\" is missing required field \"url\"", + name))); } - } - loadAndValidate(name,reposit); - } - - @SuppressWarnings("unchecked") - private void loadAndValidate(String val,Object repositDef) { - String keyname = val; - if(repositDef instanceof LinkedHashMap) { - for(String key: ((LinkedHashMap)reposit).keySet()) { - boolean bFound = false; - for(String sect: SECTIONS) { - if(key.equals(sect)) { - bFound = true; - break; - } - } - if(!bFound) { + } + loadAndValidate(name, reposit); + } + + @SuppressWarnings("unchecked") + private void loadAndValidate(String val, Object repositDef) { + String keyname = val; + if (repositDef instanceof LinkedHashMap) { + for (String key : ((LinkedHashMap) reposit).keySet()) { + boolean bFound = false; + for (String sect : SECTIONS) { + if (key.equals(sect)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE230", String.format( - "UnknownFieldError: repositories \"%s\" contains unknown field \"%s\"", - keyname,key))); - } - } - - String repositUrl = (String)((LinkedHashMap)repositDef).get("url"); - if(repositUrl != null) { - boolean urlVal = UrlUtils.validateUrl(repositUrl); - if(!urlVal) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE231", String.format( - "URLException: repsositories \"%s\" Invalid Url",keyname))); - } - } - } - } - - @Override - public String toString() { - return "Repository{" + - "name='" + name + '\'' + - ", reposit=" + reposit + - ", url='" + url + '\'' + - '}'; - } + "UnknownFieldError: repositories \"%s\" contains unknown field \"%s\"", + keyname, key))); + } + } + + String repositUrl = (String) ((LinkedHashMap) repositDef).get("url"); + if (repositUrl != null) { + boolean urlVal = UrlUtils.validateUrl(repositUrl); + if (!urlVal) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE231", String.format( + "URLException: repsositories \"%s\" Invalid Url", keyname))); + } + } + } + } + + @Override + public String toString() { + return "Repository{" + + "name='" + name + '\'' + + ", reposit=" + reposit + + ", url='" + url + '\'' + + '}'; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java index f980e0c..227b2a9 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,7 +20,6 @@ package org.onap.sdc.toscaparser.api; -import java.util.Map; public class RequirementAssignment { @@ -49,6 +48,7 @@ public class RequirementAssignment { /** * Get the name for requirement assignment. + * * @return the name for requirement assignment. */ public String getName() { @@ -57,6 +57,7 @@ public class RequirementAssignment { /** * Set the name for requirement + * * @param name - the name for requirement to set */ public void setName(String name) { @@ -65,6 +66,7 @@ public class RequirementAssignment { /** * Get the node name for requirement assignment. + * * @return the node name for requirement */ public String getNodeTemplateName() { @@ -73,6 +75,7 @@ public class RequirementAssignment { /** * Set the node name for requirement + * * @param nodeName - the node name for requirement to set */ public void setNodeTemplateName(String nodeName) { @@ -81,6 +84,7 @@ public class RequirementAssignment { /** * Get the capability name for requirement assignment. + * * @return the capability name for requirement */ public String getCapabilityName() { @@ -89,6 +93,7 @@ public class RequirementAssignment { /** * Set the capability name for requirement assignment. + * * @param capabilityName - the capability name for requirement to set */ public void setCapabilityName(String capabilityName) { @@ -97,6 +102,7 @@ public class RequirementAssignment { /** * Get the relationship object for requirement + * * @return the relationship object for requirement */ public Object getRelationship() { diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java index 1425f6c..2ba6230 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,6 +35,7 @@ public class RequirementAssignments { /** * Get all requirement assignments for Node Template.
* This object can be either the original one, holding all requirement assignments for this node template,or a filtered one, holding a filtered subset.
+ * * @return list of requirement assignments for the node template.
* If there are no requirement assignments, empty list is returned. */ @@ -44,6 +45,7 @@ public class RequirementAssignments { /** * Filter requirement assignments by requirement name. + * * @param reqName - The name of requirement * @return RequirementAssignments object, containing requirement assignments of this type.
* If no such found, filtering will result in an empty collection. diff --git a/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java b/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java index 1dec80a..a622a9a 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,55 +39,55 @@ public class SubstitutionMappings { // SubstitutionMappings exports the topology template as an // implementation of a Node type. - private static final String NODE_TYPE = "node_type"; - private static final String REQUIREMENTS = "requirements"; - private static final String CAPABILITIES = "capabilities"; - - private static final String SECTIONS[] = {NODE_TYPE, REQUIREMENTS, CAPABILITIES}; - - private static final String OPTIONAL_OUTPUTS[] = {"tosca_id", "tosca_name", "state"}; - - private LinkedHashMap subMappingDef; - private ArrayList nodetemplates; - private ArrayList inputs; - private ArrayList outputs; - private ArrayList groups; - private NodeTemplate subMappedNodeTemplate; - private LinkedHashMap customDefs; - private LinkedHashMap _capabilities; - private LinkedHashMap _requirements; - - public SubstitutionMappings(LinkedHashMap smsubMappingDef, - ArrayList smnodetemplates, - ArrayList sminputs, - ArrayList smoutputs, - ArrayList smgroups, - NodeTemplate smsubMappedNodeTemplate, - LinkedHashMap smcustomDefs) { - + private static final String NODE_TYPE = "node_type"; + private static final String REQUIREMENTS = "requirements"; + private static final String CAPABILITIES = "capabilities"; + + private static final String SECTIONS[] = {NODE_TYPE, REQUIREMENTS, CAPABILITIES}; + + private static final String OPTIONAL_OUTPUTS[] = {"tosca_id", "tosca_name", "state"}; + + private LinkedHashMap subMappingDef; + private ArrayList nodetemplates; + private ArrayList inputs; + private ArrayList outputs; + private ArrayList groups; + private NodeTemplate subMappedNodeTemplate; + private LinkedHashMap customDefs; + private LinkedHashMap _capabilities; + private LinkedHashMap _requirements; + + public SubstitutionMappings(LinkedHashMap smsubMappingDef, + ArrayList smnodetemplates, + ArrayList sminputs, + ArrayList smoutputs, + ArrayList smgroups, + NodeTemplate smsubMappedNodeTemplate, + LinkedHashMap smcustomDefs) { + subMappingDef = smsubMappingDef; nodetemplates = smnodetemplates; inputs = sminputs != null ? sminputs : new ArrayList(); outputs = smoutputs != null ? smoutputs : new ArrayList(); groups = smgroups != null ? smgroups : new ArrayList(); subMappedNodeTemplate = smsubMappedNodeTemplate; - customDefs = smcustomDefs != null ? smcustomDefs : new LinkedHashMap(); + customDefs = smcustomDefs != null ? smcustomDefs : new LinkedHashMap(); _validate(); _capabilities = null; _requirements = null; - } - - public String getType() { - if(subMappingDef != null) { - return (String)subMappingDef.get(NODE_TYPE); - } - return null; - } - - public ArrayList getNodeTemplates() { - return nodetemplates; - } + } + + public String getType() { + if (subMappingDef != null) { + return (String) subMappingDef.get(NODE_TYPE); + } + return null; + } + + public ArrayList getNodeTemplates() { + return nodetemplates; + } /* @classmethod @@ -95,39 +95,39 @@ public class SubstitutionMappings { if isinstance(sub_mapping_def, dict): return sub_mapping_def.get(cls.NODE_TYPE) */ - - public static String stGetNodeType(LinkedHashMap _subMappingDef) { - if(_subMappingDef instanceof LinkedHashMap) { - return (String)_subMappingDef.get(NODE_TYPE); - } - return null; - } - - public String getNodeType() { - return (String)subMappingDef.get(NODE_TYPE); - } - - public ArrayList getInputs() { - return inputs; - } - - public ArrayList getGroups() { - return groups; - } - - public LinkedHashMap getCapabilities() { - return (LinkedHashMap)subMappingDef.get(CAPABILITIES); - } - - public LinkedHashMap getRequirements() { - return (LinkedHashMap)subMappingDef.get(REQUIREMENTS); - } - - public NodeType getNodeDefinition() { - return new NodeType(getNodeType(), customDefs); - } - - private void _validate() { + + public static String stGetNodeType(LinkedHashMap _subMappingDef) { + if (_subMappingDef instanceof LinkedHashMap) { + return (String) _subMappingDef.get(NODE_TYPE); + } + return null; + } + + public String getNodeType() { + return (String) subMappingDef.get(NODE_TYPE); + } + + public ArrayList getInputs() { + return inputs; + } + + public ArrayList getGroups() { + return groups; + } + + public LinkedHashMap getCapabilities() { + return (LinkedHashMap) subMappingDef.get(CAPABILITIES); + } + + public LinkedHashMap getRequirements() { + return (LinkedHashMap) subMappingDef.get(REQUIREMENTS); + } + + public NodeType getNodeDefinition() { + return new NodeType(getNodeType(), customDefs); + } + + private void _validate() { // Basic validation _validateKeys(); _validateType(); @@ -137,149 +137,149 @@ public class SubstitutionMappings { _validateCapabilities(); _validateRequirements(); _validateOutputs(); - } - - private void _validateKeys() { - // validate the keys of substitution mappings - for(String key: subMappingDef.keySet()) { - boolean bFound = false; - for(String s: SECTIONS) { - if(s.equals(key)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE232", String.format( - "UnknownFieldError: SubstitutionMappings contain unknown field \"%s\"", - key))); - } - } - } - - private void _validateType() { + } + + private void _validateKeys() { + // validate the keys of substitution mappings + for (String key : subMappingDef.keySet()) { + boolean bFound = false; + for (String s : SECTIONS) { + if (s.equals(key)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE232", String.format( + "UnknownFieldError: SubstitutionMappings contain unknown field \"%s\"", + key))); + } + } + } + + private void _validateType() { // validate the node_type of substitution mappings - String nodeType = (String)subMappingDef.get(NODE_TYPE); - if(nodeType == null) { + String nodeType = (String) subMappingDef.get(NODE_TYPE); + if (nodeType == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE233", String.format( - "MissingRequiredFieldError: SubstitutionMappings used in topology_template is missing required field \"%s\"", - NODE_TYPE))); + "MissingRequiredFieldError: SubstitutionMappings used in topology_template is missing required field \"%s\"", + NODE_TYPE))); } Object nodeTypeDef = customDefs.get(nodeType); - if(nodeTypeDef == null) { + if (nodeTypeDef == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE234", String.format( - "InvalidNodeTypeError: \"%s\" is invalid",nodeType))); + "InvalidNodeTypeError: \"%s\" is invalid", nodeType))); } - } + } - private void _validateInputs() { + private void _validateInputs() { // validate the inputs of substitution mappings. // The inputs defined by the topology template have to match the // properties of the node type or the substituted node. If there are // more inputs than the substituted node has properties, default values //must be defined for those inputs. - - HashSet allInputs = new HashSet<>(); - for(Input inp: inputs) { - allInputs.add(inp.getName()); - } - HashSet requiredProperties = new HashSet<>(); - for(PropertyDef pd: getNodeDefinition().getPropertiesDefObjects()) { - if(pd.isRequired() && pd.getDefault() == null) { - requiredProperties.add(pd.getName()); - } - } + + HashSet allInputs = new HashSet<>(); + for (Input inp : inputs) { + allInputs.add(inp.getName()); + } + HashSet requiredProperties = new HashSet<>(); + for (PropertyDef pd : getNodeDefinition().getPropertiesDefObjects()) { + if (pd.isRequired() && pd.getDefault() == null) { + requiredProperties.add(pd.getName()); + } + } // Must provide inputs for required properties of node type. - for(String property: requiredProperties) { + for (String property : requiredProperties) { // Check property which is 'required' and has no 'default' value - if(!allInputs.contains(property)) { + if (!allInputs.contains(property)) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE235", String.format( - "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", - getNodeType(),property))); + "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", + getNodeType(), property))); } } // If the optional properties of node type need to be customized by // substituted node, it also is necessary to define inputs for them, // otherwise they are not mandatory to be defined. - HashSet customizedParameters = new HashSet<>(); - if(subMappedNodeTemplate != null) { - customizedParameters.addAll(subMappedNodeTemplate.getProperties().keySet()); - } - HashSet allProperties = new HashSet( - getNodeDefinition().getPropertiesDef().keySet()); - HashSet diffset = customizedParameters; - diffset.removeAll(allInputs); - for(String parameter: diffset) { - if(allProperties.contains(parameter)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE236", String.format( - "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", - getNodeType(),parameter))); - } - } - // Additional inputs are not in the properties of node type must - // provide default values. Currently the scenario may not happen - // because of parameters validation in nodetemplate, here is a - // guarantee. - for(Input inp: inputs) { - diffset = allInputs; - diffset.removeAll(allProperties); - if(diffset.contains(inp.getName()) && inp.getDefault() == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE237", String.format( - "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing rquired input \"%s\"", - getNodeType(),inp.getName()))); - } - } - } - - private void _validateCapabilities() { + HashSet customizedParameters = new HashSet<>(); + if (subMappedNodeTemplate != null) { + customizedParameters.addAll(subMappedNodeTemplate.getProperties().keySet()); + } + HashSet allProperties = new HashSet( + getNodeDefinition().getPropertiesDef().keySet()); + HashSet diffset = customizedParameters; + diffset.removeAll(allInputs); + for (String parameter : diffset) { + if (allProperties.contains(parameter)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE236", String.format( + "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", + getNodeType(), parameter))); + } + } + // Additional inputs are not in the properties of node type must + // provide default values. Currently the scenario may not happen + // because of parameters validation in nodetemplate, here is a + // guarantee. + for (Input inp : inputs) { + diffset = allInputs; + diffset.removeAll(allProperties); + if (diffset.contains(inp.getName()) && inp.getDefault() == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE237", String.format( + "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing rquired input \"%s\"", + getNodeType(), inp.getName()))); + } + } + } + + private void _validateCapabilities() { // validate the capabilities of substitution mappings // The capabilities must be in node template which be mapped. - LinkedHashMap tplsCapabilities = - (LinkedHashMap)subMappingDef.get(CAPABILITIES); - List nodeCapabilities = null; - if(subMappedNodeTemplate != null) { - nodeCapabilities = subMappedNodeTemplate.getCapabilities().getAll(); - } - if(nodeCapabilities != null) { - for(CapabilityAssignment cap: nodeCapabilities) { - if(tplsCapabilities != null && tplsCapabilities.get(cap.getName()) == null) { - ; //pass - // ValidationIssueCollector.appendException( - // UnknownFieldError(what='SubstitutionMappings', - // field=cap)) - } - } - } - } - - private void _validateRequirements() { + LinkedHashMap tplsCapabilities = + (LinkedHashMap) subMappingDef.get(CAPABILITIES); + List nodeCapabilities = null; + if (subMappedNodeTemplate != null) { + nodeCapabilities = subMappedNodeTemplate.getCapabilities().getAll(); + } + if (nodeCapabilities != null) { + for (CapabilityAssignment cap : nodeCapabilities) { + if (tplsCapabilities != null && tplsCapabilities.get(cap.getName()) == null) { + ; //pass + // ValidationIssueCollector.appendException( + // UnknownFieldError(what='SubstitutionMappings', + // field=cap)) + } + } + } + } + + private void _validateRequirements() { // validate the requirements of substitution mappings - //***************************************************** - //TO-DO - Different from Python code!! one is a bug... - //***************************************************** + //***************************************************** + //TO-DO - Different from Python code!! one is a bug... + //***************************************************** // The requirements must be in node template which be mapped. - LinkedHashMap tplsRequirements = - (LinkedHashMap)subMappingDef.get(REQUIREMENTS); - List nodeRequirements = null; - if(subMappedNodeTemplate != null) { - nodeRequirements = subMappedNodeTemplate.getRequirements().getAll(); - } - if(nodeRequirements != null) { - for(RequirementAssignment ro: nodeRequirements) { - String cap = ro.getName(); - if(tplsRequirements != null && tplsRequirements.get(cap) == null) { - ; //pass - // ValidationIssueCollector.appendException( - // UnknownFieldError(what='SubstitutionMappings', - // field=cap)) - } - } - } - } - - private void _validateOutputs() { + LinkedHashMap tplsRequirements = + (LinkedHashMap) subMappingDef.get(REQUIREMENTS); + List nodeRequirements = null; + if (subMappedNodeTemplate != null) { + nodeRequirements = subMappedNodeTemplate.getRequirements().getAll(); + } + if (nodeRequirements != null) { + for (RequirementAssignment ro : nodeRequirements) { + String cap = ro.getName(); + if (tplsRequirements != null && tplsRequirements.get(cap) == null) { + ; //pass + // ValidationIssueCollector.appendException( + // UnknownFieldError(what='SubstitutionMappings', + // field=cap)) + } + } + } + } + + private void _validateOutputs() { // validate the outputs of substitution mappings. // The outputs defined by the topology template have to match the @@ -292,46 +292,46 @@ public class SubstitutionMappings { // attributes of the node type according to the specification, but // it's reasonable that there are more inputs than the node type // has properties, the specification will be amended? - - for(Output output: outputs) { - Object ado = getNodeDefinition().getAttributesDef(); - if(ado != null && ((LinkedHashMap)ado).get(output.getName()) == null) { + + for (Output output : outputs) { + Object ado = getNodeDefinition().getAttributesDef(); + if (ado != null && ((LinkedHashMap) ado).get(output.getName()) == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE238", String.format( - "UnknownOutputError: Unknown output \"%s\" in SubstitutionMappings with node_type \"%s\"", - output.getName(),getNodeType()))); - } + "UnknownOutputError: Unknown output \"%s\" in SubstitutionMappings with node_type \"%s\"", + output.getName(), getNodeType()))); + } } - } + } - @Override - public String toString() { - return "SubstitutionMappings{" + + @Override + public String toString() { + return "SubstitutionMappings{" + // "subMappingDef=" + subMappingDef + // ", nodetemplates=" + nodetemplates + // ", inputs=" + inputs + // ", outputs=" + outputs + // ", groups=" + groups + - ", subMappedNodeTemplate=" + (subMappedNodeTemplate==null?"":subMappedNodeTemplate.getName()) + + ", subMappedNodeTemplate=" + (subMappedNodeTemplate == null ? "" : subMappedNodeTemplate.getName()) + // ", customDefs=" + customDefs + // ", _capabilities=" + _capabilities + // ", _requirements=" + _requirements + - '}'; - } - - @Deprecated - public String toLimitedString() { - return "SubstitutionMappings{" + - "subMappingDef=" + subMappingDef + - ", nodetemplates=" + nodetemplates + - ", inputs=" + inputs + - ", outputs=" + outputs + - ", groups=" + groups + - ", subMappedNodeTemplate=" + (subMappedNodeTemplate==null?"":subMappedNodeTemplate.getName()) + - ", customDefs=" + customDefs + - ", _capabilities=" + _capabilities + - ", _requirements=" + _requirements + - '}'; - } + '}'; + } + + @Deprecated + public String toLimitedString() { + return "SubstitutionMappings{" + + "subMappingDef=" + subMappingDef + + ", nodetemplates=" + nodetemplates + + ", inputs=" + inputs + + ", outputs=" + outputs + + ", groups=" + groups + + ", subMappedNodeTemplate=" + (subMappedNodeTemplate == null ? "" : subMappedNodeTemplate.getName()) + + ", customDefs=" + customDefs + + ", _capabilities=" + _capabilities + + ", _requirements=" + _requirements + + '}'; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java index 4c4afd3..2160527 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -32,528 +32,522 @@ import org.onap.sdc.toscaparser.api.parameters.Output; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.ArrayList; -import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; public class TopologyTemplate { - private static final String DESCRIPTION = "description"; - private static final String INPUTS = "inputs"; - private static final String NODE_TEMPLATES = "node_templates"; - private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; - private static final String OUTPUTS = "outputs"; - private static final String GROUPS = "groups"; - private static final String SUBSTITUTION_MAPPINGS = "substitution_mappings"; - private static final String POLICIES = "policies"; - private static final String METADATA = "metadata"; - - private static String SECTIONS[] = { - DESCRIPTION, INPUTS, NODE_TEMPLATES, RELATIONSHIP_TEMPLATES, - OUTPUTS, GROUPS, SUBSTITUTION_MAPPINGS, POLICIES, METADATA - }; - - private LinkedHashMap tpl; - LinkedHashMap metaData; + private static final String DESCRIPTION = "description"; + private static final String INPUTS = "inputs"; + private static final String NODE_TEMPLATES = "node_templates"; + private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; + private static final String OUTPUTS = "outputs"; + private static final String GROUPS = "groups"; + private static final String SUBSTITUTION_MAPPINGS = "substitution_mappings"; + private static final String POLICIES = "policies"; + private static final String METADATA = "metadata"; + + private static String[] SECTIONS = { + DESCRIPTION, INPUTS, NODE_TEMPLATES, RELATIONSHIP_TEMPLATES, + OUTPUTS, GROUPS, SUBSTITUTION_MAPPINGS, POLICIES, METADATA + }; + + private LinkedHashMap tpl; + LinkedHashMap metaData; private ArrayList inputs; private ArrayList outputs; private ArrayList relationshipTemplates; private ArrayList nodeTemplates; - private LinkedHashMap customDefs; - private LinkedHashMap relTypes;//TYPE + private LinkedHashMap customDefs; + private LinkedHashMap relTypes;//TYPE private NodeTemplate subMappedNodeTemplate; private ArrayList groups; private ArrayList policies; - private LinkedHashMap parsedParams = null;//TYPE + private LinkedHashMap parsedParams = null;//TYPE private String description; private ToscaGraph graph; private SubstitutionMappings substitutionMappings; - private boolean resolveGetInput; - - public TopologyTemplate( - LinkedHashMap _template, - LinkedHashMap _customDefs, - LinkedHashMap _relTypes,//TYPE + private boolean resolveGetInput; + + public TopologyTemplate( + LinkedHashMap _template, + LinkedHashMap _customDefs, + LinkedHashMap _relTypes,//TYPE LinkedHashMap _parsedParams, - NodeTemplate _subMappedNodeTemplate, - boolean _resolveGetInput) { - - tpl = _template; - if(tpl != null) { - subMappedNodeTemplate = _subMappedNodeTemplate; - metaData = _metaData(); - customDefs = _customDefs; - relTypes = _relTypes; - parsedParams = _parsedParams; - resolveGetInput = _resolveGetInput; - _validateField(); - description = _tplDescription(); - inputs = _inputs(); - relationshipTemplates =_relationshipTemplates(); - //todo: pass subMappedNodeTemplate to ET constractor - nodeTemplates = _nodeTemplates(); - outputs = _outputs(); - if(nodeTemplates != null) { - graph = new ToscaGraph(nodeTemplates); - } - groups = _groups(); - policies = _policies(); - _processIntrinsicFunctions(); - substitutionMappings = _substitutionMappings(); - } - } - - @SuppressWarnings("unchecked") - private ArrayList _inputs() { - //DumpUtils.dumpYaml(customDefs,0); - ArrayList alInputs = new ArrayList<>(); - for(String name: _tplInputs().keySet()) { - Object attrs = _tplInputs().get(name); - Input input = new Input(name,(LinkedHashMap)attrs,customDefs); - if(parsedParams != null && parsedParams.get(name) != null) { - input.validate(parsedParams.get(name)); + NodeTemplate _subMappedNodeTemplate, + boolean _resolveGetInput) { + + tpl = _template; + if (tpl != null) { + subMappedNodeTemplate = _subMappedNodeTemplate; + metaData = _metaData(); + customDefs = _customDefs; + relTypes = _relTypes; + parsedParams = _parsedParams; + resolveGetInput = _resolveGetInput; + _validateField(); + description = _tplDescription(); + inputs = _inputs(); + relationshipTemplates = _relationshipTemplates(); + //todo: pass subMappedNodeTemplate to ET constractor + nodeTemplates = _nodeTemplates(); + outputs = _outputs(); + if (nodeTemplates != null) { + graph = new ToscaGraph(nodeTemplates); } - else { + groups = _groups(); + policies = _policies(); + _processIntrinsicFunctions(); + substitutionMappings = _substitutionMappings(); + } + } + + @SuppressWarnings("unchecked") + private ArrayList _inputs() { + ArrayList alInputs = new ArrayList<>(); + for (String name : _tplInputs().keySet()) { + Object attrs = _tplInputs().get(name); + Input input = new Input(name, (LinkedHashMap) attrs, customDefs); + if (parsedParams != null && parsedParams.get(name) != null) { + input.validate(parsedParams.get(name)); + } else { Object _default = input.getDefault(); - if(_default != null) { + if (_default != null) { input.validate(_default); } } - if((parsedParams != null && parsedParams.get(input.getName()) == null || parsedParams == null) - && input.isRequired() && input.getDefault() == null) { - System.out.format("Log warning: The required parameter \"%s\" is not provided\n",input.getName()); + if ((parsedParams != null && parsedParams.get(input.getName()) == null || parsedParams == null) + && input.isRequired() && input.getDefault() == null) { + System.out.format("Log warning: The required parameter \"%s\" is not provided\n", input.getName()); } alInputs.add(input); - } + } return alInputs; - - } - private LinkedHashMap _metaData() { - if(tpl.get(METADATA) != null) { - return (LinkedHashMap)tpl.get(METADATA); + } + + private LinkedHashMap _metaData() { + if (tpl.get(METADATA) != null) { + return (LinkedHashMap) tpl.get(METADATA); + } else { + return new LinkedHashMap(); + } + + } + + private ArrayList _nodeTemplates() { + ArrayList alNodeTemplates = new ArrayList<>(); + LinkedHashMap tpls = _tplNodeTemplates(); + if (tpls != null) { + for (String name : tpls.keySet()) { + NodeTemplate tpl = new NodeTemplate(name, + tpls, + customDefs, + relationshipTemplates, + relTypes, + subMappedNodeTemplate); + if (tpl.getTypeDefinition() != null) { + boolean b = NodeType.TOSCA_DEF.get(tpl.getType()) != null; + if (b || (tpl.getCustomDef() != null && !tpl.getCustomDef().isEmpty())) { + tpl.validate(); + alNodeTemplates.add(tpl); + } + } + } + } + return alNodeTemplates; + } + + @SuppressWarnings("unchecked") + private ArrayList _relationshipTemplates() { + ArrayList alRelationshipTemplates = new ArrayList<>(); + LinkedHashMap tpls = _tplRelationshipTemplates(); + if (tpls != null) { + for (String name : tpls.keySet()) { + RelationshipTemplate tpl = new RelationshipTemplate( + (LinkedHashMap) tpls.get(name), name, customDefs, null, null, subMappedNodeTemplate); + + alRelationshipTemplates.add(tpl); + } } - else { - return new LinkedHashMap(); + return alRelationshipTemplates; + } + + private ArrayList _outputs() { + ArrayList alOutputs = new ArrayList<>(); + for (Map.Entry me : _tplOutputs().entrySet()) { + String oname = me.getKey(); + LinkedHashMap oattrs = (LinkedHashMap) me.getValue(); + Output o = new Output(oname, oattrs); + o.validate(); + alOutputs.add(o); + } + return alOutputs; + } + + private SubstitutionMappings _substitutionMappings() { + LinkedHashMap tplSubstitutionMapping = (LinkedHashMap) _tplSubstitutionMappings(); + + //*** the commenting-out below and the weaker condition are in the Python source + // #if tpl_substitution_mapping and self.sub_mapped_node_template: + if (tplSubstitutionMapping != null && tplSubstitutionMapping.size() > 0) { + return new SubstitutionMappings(tplSubstitutionMapping, + nodeTemplates, + inputs, + outputs, + groups, + subMappedNodeTemplate, + customDefs); + } + return null; + + } + + @SuppressWarnings("unchecked") + private ArrayList _policies() { + ArrayList alPolicies = new ArrayList<>(); + for (Map.Entry me : _tplPolicies().entrySet()) { + String policyName = me.getKey(); + LinkedHashMap policyTpl = (LinkedHashMap) me.getValue(); + ArrayList targetList = (ArrayList) policyTpl.get("targets"); + ArrayList targetNodes = new ArrayList<>(); + ArrayList targetObjects = new ArrayList<>(); + ArrayList targetGroups = new ArrayList<>(); + String targetsType = "groups"; + if (targetList != null && targetList.size() >= 1) { + targetGroups = _getPolicyGroups(targetList); + if (targetGroups == null || targetGroups.isEmpty()) { + targetsType = "node_templates"; + targetNodes = _getGroupMembers(targetList); + for (NodeTemplate nt : targetNodes) { + targetObjects.add(nt); + } + } else { + for (Group gr : targetGroups) { + targetObjects.add(gr); + } + } + } + Policy policyObj = new Policy(policyName, + policyTpl, + targetObjects, + targetsType, + customDefs, + subMappedNodeTemplate); + alPolicies.add(policyObj); } - - } - - private ArrayList _nodeTemplates() { - ArrayList alNodeTemplates = new ArrayList<>(); - LinkedHashMap tpls = _tplNodeTemplates(); - if(tpls != null) { - for(String name: tpls.keySet()) { - NodeTemplate tpl = new NodeTemplate(name, - tpls, - customDefs, - relationshipTemplates, - relTypes, - subMappedNodeTemplate); - if(tpl.getTypeDefinition() != null) { - boolean b = NodeType.TOSCA_DEF.get(tpl.getType()) != null; - if(b || (tpl.getCustomDef() != null && !tpl.getCustomDef().isEmpty())) { - tpl.validate(); - alNodeTemplates.add(tpl); - } - } - } - } - return alNodeTemplates; - } - - @SuppressWarnings("unchecked") - private ArrayList _relationshipTemplates() { - ArrayList alRelationshipTemplates = new ArrayList<>(); - LinkedHashMap tpls = _tplRelationshipTemplates(); - if(tpls != null) { - for(String name: tpls.keySet()) { - RelationshipTemplate tpl = new RelationshipTemplate( - (LinkedHashMap)tpls.get(name),name,customDefs,null,null, subMappedNodeTemplate); - - alRelationshipTemplates.add(tpl); - } - } - return alRelationshipTemplates; - } - - private ArrayList _outputs() { - ArrayList alOutputs = new ArrayList<>(); - for(Map.Entry me: _tplOutputs().entrySet()) { - String oname = me.getKey(); - LinkedHashMap oattrs = (LinkedHashMap)me.getValue(); - Output o = new Output(oname,oattrs); - o.validate(); - alOutputs.add(o); - } - return alOutputs; - } - - private SubstitutionMappings _substitutionMappings() { - LinkedHashMap tplSubstitutionMapping = (LinkedHashMap) _tplSubstitutionMappings(); - - //*** the commenting-out below and the weaker condition are in the Python source - // #if tpl_substitution_mapping and self.sub_mapped_node_template: - if(tplSubstitutionMapping != null && tplSubstitutionMapping.size() > 0) { - return new SubstitutionMappings(tplSubstitutionMapping, - nodeTemplates, - inputs, - outputs, - groups, - subMappedNodeTemplate, - customDefs); - } - return null; - - } - - @SuppressWarnings("unchecked") - private ArrayList _policies() { - ArrayList alPolicies = new ArrayList<>(); - for(Map.Entry me: _tplPolicies().entrySet()) { - String policyName = me.getKey(); - LinkedHashMap policyTpl = (LinkedHashMap)me.getValue(); - ArrayList targetList = (ArrayList)policyTpl.get("targets"); - ArrayList targetNodes = new ArrayList<>(); - ArrayList targetObjects = new ArrayList<>(); - ArrayList targetGroups = new ArrayList<>(); - String targetsType = "groups"; - if(targetList != null && targetList.size() >= 1) { - targetGroups = _getPolicyGroups(targetList); - if(targetGroups == null || targetGroups.isEmpty()) { - targetsType = "node_templates"; - targetNodes = _getGroupMembers(targetList); - for(NodeTemplate nt: targetNodes) { - targetObjects.add(nt); - } - } - else { - for(Group gr: targetGroups) { - targetObjects.add(gr); - } - } - } - Policy policyObj = new Policy(policyName, - policyTpl, - targetObjects, - targetsType, - customDefs, - subMappedNodeTemplate); - alPolicies.add(policyObj); - } return alPolicies; - } - - private ArrayList _groups() { - ArrayList groups = new ArrayList<>(); - ArrayList memberNodes = null; - for(Map.Entry me: _tplGroups().entrySet()) { - String groupName = me.getKey(); - LinkedHashMap groupTpl = (LinkedHashMap)me.getValue(); - ArrayList memberNames = (ArrayList)groupTpl.get("members"); - if(memberNames != null) { - DataEntity.validateDatatype("list", memberNames,null,null,null); - if(memberNames.size() < 1 || - (new HashSet(memberNames)).size() != memberNames.size()) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE005",String.format( + } + + private ArrayList _groups() { + ArrayList groups = new ArrayList<>(); + ArrayList memberNodes = null; + for (Map.Entry me : _tplGroups().entrySet()) { + String groupName = me.getKey(); + LinkedHashMap groupTpl = (LinkedHashMap) me.getValue(); + ArrayList memberNames = (ArrayList) groupTpl.get("members"); + if (memberNames != null) { + DataEntity.validateDatatype("list", memberNames, null, null, null); + if (memberNames.size() < 1 || + (new HashSet(memberNames)).size() != memberNames.size()) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE005", String.format( "InvalidGroupTargetException: Member nodes \"%s\" should be >= 1 and not repeated", memberNames.toString()))); - } - else { - memberNodes = _getGroupMembers(memberNames); - } - } + } else { + memberNodes = _getGroupMembers(memberNames); + } + } Group group = new Group(groupName, - groupTpl, - memberNodes, - customDefs, subMappedNodeTemplate); + groupTpl, + memberNodes, + customDefs, subMappedNodeTemplate); groups.add(group); - } - return groups; - } - - private ArrayList _getGroupMembers(ArrayList memberNames) { - ArrayList memberNodes = new ArrayList<>(); - _validateGroupMembers(memberNames); - for(String member: memberNames) { - for(NodeTemplate node: nodeTemplates) { - if(member.equals(node.getName())) { - memberNodes.add(node); - } - } - } - return memberNodes; - } - - private ArrayList _getPolicyGroups(ArrayList memberNames) { - ArrayList memberGroups = new ArrayList<>(); - for(String member: memberNames) { - for(Group group: groups) { - if(member.equals(group.getName())) { - memberGroups.add(group); - } - } - } - return memberGroups; - } - - private void _validateGroupMembers(ArrayList members) { - ArrayList nodeNames = new ArrayList<>(); - for(NodeTemplate node: nodeTemplates) { - nodeNames.add(node.getName()); - } - for(String member: members) { - if(!nodeNames.contains(member)) { + } + return groups; + } + + private ArrayList _getGroupMembers(ArrayList memberNames) { + ArrayList memberNodes = new ArrayList<>(); + _validateGroupMembers(memberNames); + for (String member : memberNames) { + for (NodeTemplate node : nodeTemplates) { + if (member.equals(node.getName())) { + memberNodes.add(node); + } + } + } + return memberNodes; + } + + private ArrayList _getPolicyGroups(ArrayList memberNames) { + ArrayList memberGroups = new ArrayList<>(); + for (String member : memberNames) { + for (Group group : groups) { + if (member.equals(group.getName())) { + memberGroups.add(group); + } + } + } + return memberGroups; + } + + private void _validateGroupMembers(ArrayList members) { + ArrayList nodeNames = new ArrayList<>(); + for (NodeTemplate node : nodeTemplates) { + nodeNames.add(node.getName()); + } + for (String member : members) { + if (!nodeNames.contains(member)) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE239", String.format( - "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"",member))); - } - } - } - - // topology template can act like node template - // it is exposed by substitution_mappings. - - public String nodetype() { - return substitutionMappings.getNodeType(); - } - - public LinkedHashMap capabilities() { - return substitutionMappings.getCapabilities(); - } - - public LinkedHashMap requirements() { - return substitutionMappings.getRequirements(); - } - - private String _tplDescription() { - return (String)tpl.get(DESCRIPTION); + "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"", member))); + } + } + } + + // topology template can act like node template + // it is exposed by substitution_mappings. + + public String nodetype() { + return substitutionMappings.getNodeType(); + } + + public LinkedHashMap capabilities() { + return substitutionMappings.getCapabilities(); + } + + public LinkedHashMap requirements() { + return substitutionMappings.getRequirements(); + } + + private String _tplDescription() { + return (String) tpl.get(DESCRIPTION); //if description: // return description.rstrip() - } + } - @SuppressWarnings("unchecked") - private LinkedHashMap _tplInputs() { - if(tpl.get(INPUTS) != null) { - return (LinkedHashMap)tpl.get(INPUTS); + @SuppressWarnings("unchecked") + private LinkedHashMap _tplInputs() { + if (tpl.get(INPUTS) != null) { + return (LinkedHashMap) tpl.get(INPUTS); } - return new LinkedHashMap(); + return new LinkedHashMap(); } @SuppressWarnings("unchecked") - private LinkedHashMap _tplNodeTemplates() { - return (LinkedHashMap)tpl.get(NODE_TEMPLATES); + private LinkedHashMap _tplNodeTemplates() { + return (LinkedHashMap) tpl.get(NODE_TEMPLATES); } @SuppressWarnings("unchecked") - private LinkedHashMap _tplRelationshipTemplates() { - if(tpl.get(RELATIONSHIP_TEMPLATES) != null) { - return (LinkedHashMap)tpl.get(RELATIONSHIP_TEMPLATES); + private LinkedHashMap _tplRelationshipTemplates() { + if (tpl.get(RELATIONSHIP_TEMPLATES) != null) { + return (LinkedHashMap) tpl.get(RELATIONSHIP_TEMPLATES); } - return new LinkedHashMap(); + return new LinkedHashMap(); } @SuppressWarnings("unchecked") - private LinkedHashMap _tplOutputs() { - if(tpl.get(OUTPUTS) != null) { - return (LinkedHashMap)tpl.get(OUTPUTS); - } - return new LinkedHashMap(); - } + private LinkedHashMap _tplOutputs() { + if (tpl.get(OUTPUTS) != null) { + return (LinkedHashMap) tpl.get(OUTPUTS); + } + return new LinkedHashMap(); + } @SuppressWarnings("unchecked") - private LinkedHashMap _tplSubstitutionMappings() { - if(tpl.get(SUBSTITUTION_MAPPINGS) != null) { - return (LinkedHashMap)tpl.get(SUBSTITUTION_MAPPINGS); + private LinkedHashMap _tplSubstitutionMappings() { + if (tpl.get(SUBSTITUTION_MAPPINGS) != null) { + return (LinkedHashMap) tpl.get(SUBSTITUTION_MAPPINGS); } - return new LinkedHashMap(); + return new LinkedHashMap(); } @SuppressWarnings("unchecked") - private LinkedHashMap _tplGroups() { - if(tpl.get(GROUPS) != null) { - return (LinkedHashMap)tpl.get(GROUPS); + private LinkedHashMap _tplGroups() { + if (tpl.get(GROUPS) != null) { + return (LinkedHashMap) tpl.get(GROUPS); } - return new LinkedHashMap(); + return new LinkedHashMap(); } @SuppressWarnings("unchecked") - private LinkedHashMap _tplPolicies() { - if(tpl.get(POLICIES) != null) { - return (LinkedHashMap)tpl.get(POLICIES); + private LinkedHashMap _tplPolicies() { + if (tpl.get(POLICIES) != null) { + return (LinkedHashMap) tpl.get(POLICIES); } - return new LinkedHashMap<>(); + return new LinkedHashMap<>(); } private void _validateField() { - for(String name: tpl.keySet()) { - boolean bFound = false; - for(String section: SECTIONS) { - if(name.equals(section)) { - bFound = true; - break; - } - } - if(!bFound) { + for (String name : tpl.keySet()) { + boolean bFound = false; + for (String section : SECTIONS) { + if (name.equals(section)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE240", String.format( - "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"",name))); - } - } + "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"", name))); + } + } } @SuppressWarnings("unchecked") - private void _processIntrinsicFunctions() { + private void _processIntrinsicFunctions() { // Process intrinsic functions // Current implementation processes functions within node template // properties, requirements, interfaces inputs and template outputs. - - if(nodeTemplates != null) { - for(NodeTemplate nt: nodeTemplates) { - for(Property prop: nt.getPropertiesObjects()) { - prop.setValue(Function.getFunction(this,nt,prop.getValue(), resolveGetInput)); - } - for(InterfacesDef ifd: nt.getInterfaces()) { - LinkedHashMap ifin = ifd.getInputs(); - if(ifin != null) { - for(Map.Entry me: ifin.entrySet()) { - String name = me.getKey(); - Object value = Function.getFunction(this,nt,me.getValue(), resolveGetInput); - ifd.setInput(name,value); - } - } - } - if(nt.getRequirements() != null) { - for(RequirementAssignment req: nt.getRequirements().getAll()) { - LinkedHashMap rel; - Object t = req.getRelationship(); - // it can be a string or a LHM... - if(t instanceof LinkedHashMap) { - rel = (LinkedHashMap)t; - } - else { - // we set it to null to fail the next test - // and avoid the get("proprties") - rel = null; - } - - if(rel != null && rel.get("properties") != null) { - LinkedHashMap relprops = - (LinkedHashMap)rel.get("properties"); - for(String key: relprops.keySet()) { - Object value = relprops.get(key); - Object func = Function.getFunction(this,req,value, resolveGetInput); - relprops.put(key,func); - } - } - } - } - if(nt.getCapabilitiesObjects() != null) { - for(CapabilityAssignment cap: nt.getCapabilitiesObjects()) { - if(cap.getPropertiesObjects() != null) { - for(Property prop: cap.getPropertiesObjects()) { - Object propvalue = Function.getFunction(this,nt,prop.getValue(), resolveGetInput); - if(propvalue instanceof GetInput) { - propvalue = ((GetInput)propvalue).result(); - for(String p: cap.getProperties().keySet()) { - //Object v = cap.getProperties().get(p); - if(p.equals(prop.getName())) { - cap.setProperty(p,propvalue); - } - } - } - } - } - } - } - for(RelationshipType rel: nt.getRelationships().keySet()) { - NodeTemplate node = nt.getRelationships().get(rel); - ArrayList relTpls = node.getRelationshipTemplate(); - if(relTpls != null) { - for(RelationshipTemplate relTpl: relTpls) { - // TT 5 - for(InterfacesDef iface: relTpl.getInterfaces()) { - if(iface.getInputs() != null) { - for(String name: iface.getInputs().keySet()) { - Object value = iface.getInputs().get(name); - Object func = Function.getFunction( - this, - relTpl, - value, - resolveGetInput); - iface.setInput(name,func); - } - } - } - } - } - } - } - } - for(Output output: outputs) { - Object func = Function.getFunction(this,outputs,output.getValue(), resolveGetInput); - if(func instanceof GetAttribute) { - output.setAttr(Output.VALUE,func); - } - } - } - - public static String getSubMappingNodeType(LinkedHashMap topologyTpl) { - if(topologyTpl != null && topologyTpl instanceof LinkedHashMap) { - Object submapTpl = topologyTpl.get(SUBSTITUTION_MAPPINGS); - return SubstitutionMappings.stGetNodeType((LinkedHashMap)submapTpl); - } - return null; - } - + + if (nodeTemplates != null) { + for (NodeTemplate nt : nodeTemplates) { + for (Property prop : nt.getPropertiesObjects()) { + prop.setValue(Function.getFunction(this, nt, prop.getValue(), resolveGetInput)); + } + for (InterfacesDef ifd : nt.getInterfaces()) { + LinkedHashMap ifin = ifd.getInputs(); + if (ifin != null) { + for (Map.Entry me : ifin.entrySet()) { + String name = me.getKey(); + Object value = Function.getFunction(this, nt, me.getValue(), resolveGetInput); + ifd.setInput(name, value); + } + } + } + if (nt.getRequirements() != null) { + for (RequirementAssignment req : nt.getRequirements().getAll()) { + LinkedHashMap rel; + Object t = req.getRelationship(); + // it can be a string or a LHM... + if (t instanceof LinkedHashMap) { + rel = (LinkedHashMap) t; + } else { + // we set it to null to fail the next test + // and avoid the get("proprties") + rel = null; + } + + if (rel != null && rel.get("properties") != null) { + LinkedHashMap relprops = + (LinkedHashMap) rel.get("properties"); + for (String key : relprops.keySet()) { + Object value = relprops.get(key); + Object func = Function.getFunction(this, req, value, resolveGetInput); + relprops.put(key, func); + } + } + } + } + if (nt.getCapabilitiesObjects() != null) { + for (CapabilityAssignment cap : nt.getCapabilitiesObjects()) { + if (cap.getPropertiesObjects() != null) { + for (Property prop : cap.getPropertiesObjects()) { + Object propvalue = Function.getFunction(this, nt, prop.getValue(), resolveGetInput); + if (propvalue instanceof GetInput) { + propvalue = ((GetInput) propvalue).result(); + for (String p : cap.getProperties().keySet()) { + //Object v = cap.getProperties().get(p); + if (p.equals(prop.getName())) { + cap.setProperty(p, propvalue); + } + } + } + } + } + } + } + for (RelationshipType rel : nt.getRelationships().keySet()) { + NodeTemplate node = nt.getRelationships().get(rel); + ArrayList relTpls = node.getRelationshipTemplate(); + if (relTpls != null) { + for (RelationshipTemplate relTpl : relTpls) { + // TT 5 + for (InterfacesDef iface : relTpl.getInterfaces()) { + if (iface.getInputs() != null) { + for (String name : iface.getInputs().keySet()) { + Object value = iface.getInputs().get(name); + Object func = Function.getFunction( + this, + relTpl, + value, + resolveGetInput); + iface.setInput(name, func); + } + } + } + } + } + } + } + } + for (Output output : outputs) { + Object func = Function.getFunction(this, outputs, output.getValue(), resolveGetInput); + if (func instanceof GetAttribute) { + output.setAttr(Output.VALUE, func); + } + } + } + + public static String getSubMappingNodeType(LinkedHashMap topologyTpl) { + if (topologyTpl != null && topologyTpl instanceof LinkedHashMap) { + Object submapTpl = topologyTpl.get(SUBSTITUTION_MAPPINGS); + return SubstitutionMappings.stGetNodeType((LinkedHashMap) submapTpl); + } + return null; + } + // getters - - public LinkedHashMap getTpl() { - return tpl; - } - - public LinkedHashMap getMetadata() { - return metaData; - } - - public ArrayList getInputs() { - return inputs; - } - - public ArrayList getOutputs() { - return outputs; - } - - public ArrayList getPolicies() { - return policies; - } - - public ArrayList getRelationshipTemplates() { - return relationshipTemplates; - } - - public ArrayList getNodeTemplates() { - return nodeTemplates; - } - - public ArrayList getGroups() { - return groups; - } - - public SubstitutionMappings getSubstitutionMappings() { - return substitutionMappings; - } - - public LinkedHashMap getParsedParams() { - return parsedParams; - } - - public boolean getResolveGetInput() { - return resolveGetInput; - } - public LinkedHashMap getCustomDefs() { - return customDefs; - } + + public LinkedHashMap getTpl() { + return tpl; + } + + public LinkedHashMap getMetadata() { + return metaData; + } + + public ArrayList getInputs() { + return inputs; + } + + public ArrayList getOutputs() { + return outputs; + } + + public ArrayList getPolicies() { + return policies; + } + + public ArrayList getRelationshipTemplates() { + return relationshipTemplates; + } + + public ArrayList getNodeTemplates() { + return nodeTemplates; + } + + public ArrayList getGroups() { + return groups; + } + + public SubstitutionMappings getSubstitutionMappings() { + return substitutionMappings; + } + + public LinkedHashMap getParsedParams() { + return parsedParams; + } + + public boolean getResolveGetInput() { + return resolveGetInput; + } + + public LinkedHashMap getCustomDefs() { + return customDefs; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java index 1799f2e..1706cdc 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,75 +20,75 @@ package org.onap.sdc.toscaparser.api; +import org.onap.sdc.toscaparser.api.elements.RelationshipType; + import java.util.ArrayList; import java.util.LinkedHashMap; -import org.onap.sdc.toscaparser.api.elements.RelationshipType; - //import java.util.Iterator; public class ToscaGraph { // Graph of Tosca Node Templates - private ArrayList nodeTemplates; - private LinkedHashMap vertices; - - public ToscaGraph(ArrayList inodeTemplates) { - nodeTemplates = inodeTemplates; - vertices = new LinkedHashMap(); - _create(); - } - - private void _createVertex(NodeTemplate node) { - if(vertices.get(node.getName()) == null) { - vertices.put(node.getName(),node); + private ArrayList nodeTemplates; + private LinkedHashMap vertices; + + public ToscaGraph(ArrayList inodeTemplates) { + nodeTemplates = inodeTemplates; + vertices = new LinkedHashMap(); + create(); + } + + private void createVertex(NodeTemplate node) { + if (vertices.get(node.getName()) == null) { + vertices.put(node.getName(), node); } - } - - private void _createEdge(NodeTemplate node1, - NodeTemplate node2, - RelationshipType relation) { - if(vertices.get(node1.getName()) == null) { - _createVertex(node1); - vertices.get(node1.name)._addNext(node2,relation); - } - } - - public NodeTemplate vertex(String name) { - if(vertices.get(name) != null) { + } + + private void createEdge(NodeTemplate node1, + NodeTemplate node2, + RelationshipType relation) { + if (vertices.get(node1.getName()) == null) { + createVertex(node1); + vertices.get(node1.name)._addNext(node2, relation); + } + } + + public NodeTemplate vertex(String name) { + if (vertices.get(name) != null) { return vertices.get(name); } return null; - } - -// public Iterator getIter() { + } + +// public Iterator getIter() { // return vertices.values().iterator(); // } - - private void _create() { - for(NodeTemplate node: nodeTemplates) { - LinkedHashMap relation = node.getRelationships(); - if(relation != null) { - for(RelationshipType rel: relation.keySet()) { - NodeTemplate nodeTpls = relation.get(rel); - for(NodeTemplate tpl: nodeTemplates) { - if(tpl.getName().equals(nodeTpls.getName())) { - _createEdge(node,tpl,rel); - } - } - } - } - _createVertex(node); - } - } - - @Override - public String toString() { - return "ToscaGraph{" + - "nodeTemplates=" + nodeTemplates + - ", vertices=" + vertices + - '}'; - } + + private void create() { + for (NodeTemplate node : nodeTemplates) { + LinkedHashMap relation = node.getRelationships(); + if (relation != null) { + for (RelationshipType rel : relation.keySet()) { + NodeTemplate nodeTpls = relation.get(rel); + for (NodeTemplate tpl : nodeTemplates) { + if (tpl.getName().equals(nodeTpls.getName())) { + createEdge(node, tpl, rel); + } + } + } + } + createVertex(node); + } + } + + @Override + public String toString() { + return "ToscaGraph{" + + "nodeTemplates=" + nodeTemplates + + ", vertices=" + vertices + + '}'; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java index 6edc291..ddb8ddb 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java @@ -5,9 +5,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -55,58 +55,58 @@ import org.yaml.snakeyaml.Yaml; public class ToscaTemplate extends Object { - public static final int MAX_LEVELS = 20; - private static Logger log = LoggerFactory.getLogger(ToscaTemplate.class.getName()); - - // TOSCA template key names - private static final String DEFINITION_VERSION = "tosca_definitions_version"; - private static final String DEFAULT_NAMESPACE = "tosca_default_namespace"; - private static final String TEMPLATE_NAME = "template_name"; - private static final String TOPOLOGY_TEMPLATE = "topology_template"; - private static final String TEMPLATE_AUTHOR = "template_author"; - private static final String TEMPLATE_VERSION = "template_version"; - private static final String DESCRIPTION = "description"; - private static final String IMPORTS = "imports"; - private static final String DSL_DEFINITIONS = "dsl_definitions"; - private static final String NODE_TYPES = "node_types"; - private static final String RELATIONSHIP_TYPES = "relationship_types"; - private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; - private static final String CAPABILITY_TYPES = "capability_types"; - private static final String ARTIFACT_TYPES = "artifact_types"; - private static final String DATA_TYPES = "data_types"; - private static final String INTERFACE_TYPES = "interface_types"; - private static final String POLICY_TYPES = "policy_types"; - private static final String GROUP_TYPES = "group_types"; - private static final String REPOSITORIES = "repositories"; - - private static String SECTIONS[] = { - DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME, + public static final int MAX_LEVELS = 20; + private static Logger log = LoggerFactory.getLogger(ToscaTemplate.class.getName()); + + // TOSCA template key names + private static final String DEFINITION_VERSION = "tosca_definitions_version"; + private static final String DEFAULT_NAMESPACE = "tosca_default_namespace"; + private static final String TEMPLATE_NAME = "template_name"; + private static final String TOPOLOGY_TEMPLATE = "topology_template"; + private static final String TEMPLATE_AUTHOR = "template_author"; + private static final String TEMPLATE_VERSION = "template_version"; + private static final String DESCRIPTION = "description"; + private static final String IMPORTS = "imports"; + private static final String DSL_DEFINITIONS = "dsl_definitions"; + private static final String NODE_TYPES = "node_types"; + private static final String RELATIONSHIP_TYPES = "relationship_types"; + private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; + private static final String CAPABILITY_TYPES = "capability_types"; + private static final String ARTIFACT_TYPES = "artifact_types"; + private static final String DATA_TYPES = "data_types"; + private static final String INTERFACE_TYPES = "interface_types"; + private static final String POLICY_TYPES = "policy_types"; + private static final String GROUP_TYPES = "group_types"; + private static final String REPOSITORIES = "repositories"; + + private static String SECTIONS[] = { + DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME, TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION, DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES, RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES, CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES, REPOSITORIES - }; + }; + + // Sections that are specific to individual template definitions + private static final String METADATA = "metadata"; + private static ArrayList SPECIAL_SECTIONS; - // Sections that are specific to individual template definitions - private static final String METADATA = "metadata"; - private static ArrayList SPECIAL_SECTIONS; - private ExtTools exttools = new ExtTools(); private ArrayList VALID_TEMPLATE_VERSIONS; - private LinkedHashMap> ADDITIONAL_SECTIONS; - - private boolean isFile; - private String path; - private String inputPath; - private String rootPath; - private LinkedHashMap parsedParams; - private boolean resolveGetInput; - private LinkedHashMap tpl; + private LinkedHashMap> ADDITIONAL_SECTIONS; + + private boolean isFile; + private String path; + private String inputPath; + private String rootPath; + private LinkedHashMap parsedParams; + private boolean resolveGetInput; + private LinkedHashMap tpl; private String version; private ArrayList imports; - private LinkedHashMap relationshipTypes; + private LinkedHashMap relationshipTypes; private Metadata metaData; private String description; private TopologyTemplate topologyTemplate; @@ -115,117 +115,113 @@ public class ToscaTemplate extends Object { private ArrayList relationshipTemplates; private ArrayList nodeTemplates; private ArrayList outputs; - private ArrayList policies; - private ArrayList groups; - private ConcurrentHashMap nestedToscaTplsWithTopology; + private ArrayList policies; + private ArrayList groups; + private ConcurrentHashMap nestedToscaTplsWithTopology; private ArrayList nestedToscaTemplatesWithTopology; private ToscaGraph graph; private String csarTempDir; private int nestingLoopCounter; - private LinkedHashMap> metaProperties; - private Set processedImports; - private LinkedHashMap customDefsFinal = new LinkedHashMap<>(); - private HashSet dataTypes; - - public ToscaTemplate(String _path, - LinkedHashMap _parsedParams, - boolean aFile, - LinkedHashMap yamlDictTpl) throws JToscaException { - init(_path, _parsedParams, aFile, yamlDictTpl, true); - } - - public ToscaTemplate(String _path, - LinkedHashMap _parsedParams, - boolean aFile, - LinkedHashMap yamlDictTpl, boolean resolveGetInput) throws JToscaException { - init(_path, _parsedParams, aFile, yamlDictTpl, resolveGetInput); - } - - @SuppressWarnings("unchecked") - private void init(String _path, - LinkedHashMap _parsedParams, - boolean aFile, - LinkedHashMap yamlDictTpl, boolean _resolveGetInput) throws JToscaException { - - ThreadLocalsHolder.setCollector(new ValidationIssueCollector()); - - VALID_TEMPLATE_VERSIONS = new ArrayList<>(); - VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_0"); - VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_1"); - VALID_TEMPLATE_VERSIONS.addAll(exttools.getVersions()); - ADDITIONAL_SECTIONS = new LinkedHashMap<>(); - SPECIAL_SECTIONS = new ArrayList<>(); - SPECIAL_SECTIONS.add(METADATA); - ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_0",SPECIAL_SECTIONS); - ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_1",SPECIAL_SECTIONS); - ADDITIONAL_SECTIONS.putAll(exttools.getSections()); - - //long startTime = System.nanoTime(); - - - isFile = aFile; - inputPath = null; - path = null; - tpl = null; - csarTempDir = null; - nestedToscaTplsWithTopology = new ConcurrentHashMap<>(); - nestedToscaTemplatesWithTopology = new ArrayList(); - resolveGetInput = _resolveGetInput; - metaProperties = new LinkedHashMap<>(); - - if(_path != null && !_path.isEmpty()) { - // save the original input path - inputPath = _path; - // get the actual path (will change with CSAR) - path = _getPath(_path); - // load the YAML template - if (path != null && !path.isEmpty()) { - try (InputStream input = new FileInputStream(new File(path));){ - //System.out.println("Loading YAML file " + path); - log.debug("ToscaTemplate Loading YAMEL file {}", path); - Yaml yaml = new Yaml(); - Object data = yaml.load(input); - this.tpl = (LinkedHashMap) data; - } - catch (FileNotFoundException e) { - log.error("ToscaTemplate - Exception loading yaml: {}", e.getMessage()); - log.error("Exception", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", - "ToscaTemplate - Exception loading yaml: -> " + e.getMessage())); - return; - } - catch(Exception e) { - log.error("ToscaTemplate - Error loading yaml, aborting -> ", e.getMessage()); - log.error("Exception", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", - "ToscaTemplate - Error loading yaml, aborting -> " + e.getMessage())); - return; - } - - if(yamlDictTpl != null) { - //msg = (_('Both path and yaml_dict_tpl arguments were ' - // 'provided. Using path and ignoring yaml_dict_tpl.')) - //log.info(msg) - log.debug("ToscaTemplate - Both path and yaml_dict_tpl arguments were provided. Using path and ignoring yaml_dict_tpl"); - } - } else { - // no input to process... - _abort(); - } - } - else { - if(yamlDictTpl != null) { + private LinkedHashMap> metaProperties; + private Set processedImports; + private LinkedHashMap customDefsFinal = new LinkedHashMap<>(); + private HashSet dataTypes; + + public ToscaTemplate(String _path, + LinkedHashMap _parsedParams, + boolean aFile, + LinkedHashMap yamlDictTpl) throws JToscaException { + init(_path, _parsedParams, aFile, yamlDictTpl, true); + } + + public ToscaTemplate(String _path, + LinkedHashMap _parsedParams, + boolean aFile, + LinkedHashMap yamlDictTpl, boolean resolveGetInput) throws JToscaException { + init(_path, _parsedParams, aFile, yamlDictTpl, resolveGetInput); + } + + @SuppressWarnings("unchecked") + private void init(String _path, + LinkedHashMap _parsedParams, + boolean aFile, + LinkedHashMap yamlDictTpl, boolean _resolveGetInput) throws JToscaException { + + ThreadLocalsHolder.setCollector(new ValidationIssueCollector()); + + VALID_TEMPLATE_VERSIONS = new ArrayList<>(); + VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_0"); + VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_1"); + VALID_TEMPLATE_VERSIONS.addAll(exttools.getVersions()); + ADDITIONAL_SECTIONS = new LinkedHashMap<>(); + SPECIAL_SECTIONS = new ArrayList<>(); + SPECIAL_SECTIONS.add(METADATA); + ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_0", SPECIAL_SECTIONS); + ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_1", SPECIAL_SECTIONS); + ADDITIONAL_SECTIONS.putAll(exttools.getSections()); + + //long startTime = System.nanoTime(); + + + isFile = aFile; + inputPath = null; + path = null; + tpl = null; + csarTempDir = null; + nestedToscaTplsWithTopology = new ConcurrentHashMap<>(); + nestedToscaTemplatesWithTopology = new ArrayList(); + resolveGetInput = _resolveGetInput; + metaProperties = new LinkedHashMap<>(); + + if (_path != null && !_path.isEmpty()) { + // save the original input path + inputPath = _path; + // get the actual path (will change with CSAR) + path = _getPath(_path); + // load the YAML template + if (path != null && !path.isEmpty()) { + try (InputStream input = new FileInputStream(new File(path));) { + //System.out.println("Loading YAML file " + path); + log.debug("ToscaTemplate Loading YAMEL file {}", path); + Yaml yaml = new Yaml(); + Object data = yaml.load(input); + this.tpl = (LinkedHashMap) data; + } catch (FileNotFoundException e) { + log.error("ToscaTemplate - Exception loading yaml: {}", e.getMessage()); + log.error("Exception", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", + "ToscaTemplate - Exception loading yaml: -> " + e.getMessage())); + return; + } catch (Exception e) { + log.error("ToscaTemplate - Error loading yaml, aborting -> ", e.getMessage()); + log.error("Exception", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", + "ToscaTemplate - Error loading yaml, aborting -> " + e.getMessage())); + return; + } + + if (yamlDictTpl != null) { + //msg = (_('Both path and yaml_dict_tpl arguments were ' + // 'provided. Using path and ignoring yaml_dict_tpl.')) + //log.info(msg) + log.debug("ToscaTemplate - Both path and yaml_dict_tpl arguments were provided. Using path and ignoring yaml_dict_tpl"); + } + } else { + // no input to process... + _abort(); + } + } else { + if (yamlDictTpl != null) { tpl = yamlDictTpl; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE244", - "ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse")); - log.debug("ToscaTemplate ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse"); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE244", + "ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse")); + log.debug("ToscaTemplate ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse"); - } - } + } + } - if(tpl != null) { + if (tpl != null) { parsedParams = _parsedParams; _validateField(); this.rootPath = path; @@ -235,10 +231,10 @@ public class ToscaTemplate extends Object { this.metaData = _tplMetaData(); this.relationshipTypes = _tplRelationshipTypes(); this.description = _tplDescription(); - this.dataTypes = getTopologyDataTypes(); - this.topologyTemplate = _topologyTemplate(); + this.dataTypes = getTopologyDataTypes(); + this.topologyTemplate = _topologyTemplate(); this.repositories = _tplRepositories(); - if(topologyTemplate.getTpl() != null) { + if (topologyTemplate.getTpl() != null) { this.inputs = _inputs(); this.relationshipTemplates = _relationshipTemplates(); this.nodeTemplates = _nodeTemplates(); @@ -246,422 +242,419 @@ public class ToscaTemplate extends Object { this.policies = _policies(); this.groups = _groups(); // _handleNestedToscaTemplatesWithTopology(); - _handleNestedToscaTemplatesWithTopology(topologyTemplate); + _handleNestedToscaTemplatesWithTopology(topologyTemplate); graph = new ToscaGraph(nodeTemplates); } } - if(csarTempDir != null) { - CSAR.deleteDir(new File(csarTempDir)); - csarTempDir = null; + if (csarTempDir != null) { + CSAR.deleteDir(new File(csarTempDir)); + csarTempDir = null; + } + + verifyTemplate(); + + } + + private void _abort() throws JToscaException { + // print out all exceptions caught + verifyTemplate(); + throw new JToscaException("jtosca aborting", JToscaErrorCodes.PATH_NOT_VALID.getValue()); + } + + private TopologyTemplate _topologyTemplate() { + return new TopologyTemplate( + _tplTopologyTemplate(), + _getAllCustomDefs(imports), + relationshipTypes, + parsedParams, + null, + resolveGetInput); + } + + private ArrayList _inputs() { + return topologyTemplate.getInputs(); + } + + private ArrayList _nodeTemplates() { + return topologyTemplate.getNodeTemplates(); + } + + private ArrayList _relationshipTemplates() { + return topologyTemplate.getRelationshipTemplates(); + } + + private ArrayList _outputs() { + return topologyTemplate.getOutputs(); + } + + private String _tplVersion() { + return (String) tpl.get(DEFINITION_VERSION); + } + + @SuppressWarnings("unchecked") + private Metadata _tplMetaData() { + Object mdo = tpl.get(METADATA); + if (mdo instanceof LinkedHashMap) { + return new Metadata((Map) mdo); + } else { + return null; + } + } + + private String _tplDescription() { + return (String) tpl.get(DESCRIPTION); + } + + @SuppressWarnings("unchecked") + private ArrayList _tplImports() { + return (ArrayList) tpl.get(IMPORTS); + } + + @SuppressWarnings("unchecked") + private ArrayList _tplRepositories() { + LinkedHashMap repositories = + (LinkedHashMap) tpl.get(REPOSITORIES); + ArrayList reposit = new ArrayList<>(); + if (repositories != null) { + for (Map.Entry me : repositories.entrySet()) { + Repository reposits = new Repository(me.getKey(), me.getValue()); + reposit.add(reposits); + } + } + return reposit; + } + + private LinkedHashMap _tplRelationshipTypes() { + return (LinkedHashMap) _getCustomTypes(RELATIONSHIP_TYPES, null); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplTopologyTemplate() { + return (LinkedHashMap) tpl.get(TOPOLOGY_TEMPLATE); + } + + private ArrayList _policies() { + return topologyTemplate.getPolicies(); + } + + private ArrayList _groups() { + return topologyTemplate.getGroups(); + } + + /** + * Read datatypes field + * + * @return return list of datatypes. + */ + @SuppressWarnings("unchecked") + private HashSet getTopologyDataTypes() { + LinkedHashMap value = + (LinkedHashMap) tpl.get(DATA_TYPES); + HashSet datatypes = new HashSet<>(); + if (value != null) { + customDefsFinal.putAll(value); + for (Map.Entry me : value.entrySet()) { + DataType datatype = new DataType(me.getKey(), value); + datatypes.add(datatype); + } } - - verifyTemplate(); - - } - - private void _abort() throws JToscaException { - // print out all exceptions caught - verifyTemplate(); - throw new JToscaException("jtosca aborting", JToscaErrorCodes.PATH_NOT_VALID.getValue()); - } - - private TopologyTemplate _topologyTemplate() { - return new TopologyTemplate( - _tplTopologyTemplate(), - _getAllCustomDefs(imports), - relationshipTypes, - parsedParams, - null, - resolveGetInput); - } - - private ArrayList _inputs() { - return topologyTemplate.getInputs(); - } - - private ArrayList _nodeTemplates() { - return topologyTemplate.getNodeTemplates(); - } - - private ArrayList _relationshipTemplates() { - return topologyTemplate.getRelationshipTemplates(); - } - - private ArrayList _outputs() { - return topologyTemplate.getOutputs(); - } - - private String _tplVersion() { - return (String)tpl.get(DEFINITION_VERSION); - } - - @SuppressWarnings("unchecked") - private Metadata _tplMetaData() { - Object mdo = tpl.get(METADATA); - if(mdo instanceof LinkedHashMap) { - return new Metadata((Map)mdo); - } - else { - return null; - } - } - - private String _tplDescription() { - return (String)tpl.get(DESCRIPTION); - } - - @SuppressWarnings("unchecked") - private ArrayList _tplImports() { - return (ArrayList)tpl.get(IMPORTS); - } - - @SuppressWarnings("unchecked") - private ArrayList _tplRepositories() { - LinkedHashMap repositories = - (LinkedHashMap)tpl.get(REPOSITORIES); - ArrayList reposit = new ArrayList<>(); - if(repositories != null) { - for(Map.Entry me: repositories.entrySet()) { - Repository reposits = new Repository(me.getKey(),me.getValue()); - reposit.add(reposits); - } - } - return reposit; - } - - private LinkedHashMap _tplRelationshipTypes() { - return (LinkedHashMap)_getCustomTypes(RELATIONSHIP_TYPES,null); - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplTopologyTemplate() { - return (LinkedHashMap)tpl.get(TOPOLOGY_TEMPLATE); - } - - private ArrayList _policies() { - return topologyTemplate.getPolicies(); - } - - private ArrayList _groups() { - return topologyTemplate.getGroups(); - } - - /** - * Read datatypes field - * @return return list of datatypes. - */ - @SuppressWarnings("unchecked") - private HashSet getTopologyDataTypes(){ - LinkedHashMap value = - (LinkedHashMap)tpl.get(DATA_TYPES); - HashSet datatypes = new HashSet<>(); - if(value != null) { - customDefsFinal.putAll(value); - for(Map.Entry me: value.entrySet()) { - DataType datatype = new DataType(me.getKey(), value); - datatypes.add(datatype); - } - } - - - return datatypes; - } - - /** - * This method is used to get consolidated custom definitions from all imports - * It is logically divided in two parts to handle imports; map and list formats. - * Before processing the imports; it sorts them to make sure the current directory imports are - * being processed first and then others. Once sorted; it processes each import one by one in - * recursive manner. - * To avoid cyclic dependency among imports; this method uses a set to keep track of all - * imports which are already processed and filters the imports which occurs more than once. - * - * @param alImports all imports which needs to be processed - * @return the linked hash map containing all import definitions - */ - - @SuppressWarnings("unchecked") - private LinkedHashMap _getAllCustomDefs(Object alImports) { - - - String types[] = { - IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, - DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES - }; - - List> imports = (List>) alImports; - if (imports != null && !imports.isEmpty()) { - if (imports.get(0) instanceof LinkedHashMap) { - imports = sortImports(imports); - - for (Map map : imports) { - List> singleImportList = new ArrayList<>(); - singleImportList.add(map); - - Map importNameDetails = getValidFileNameForImportReference(singleImportList); - singleImportList = filterImportsForRecursion(singleImportList, importNameDetails); - - if(!singleImportList.get(0).isEmpty()){ - LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(singleImportList)); - processedImports.add(importNameDetails.get("importFileName")); - - if (customDefs != null) { - customDefsFinal.putAll(customDefs); - - if (customDefs.get(IMPORTS) != null) { - resetPathForRecursiveImports(importNameDetails.get("importRelativeName")); - LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); - customDefsFinal.putAll(importDefs); - } - } - } - } - } else { - LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(imports)); - if (customDefs != null) { - customDefsFinal.putAll(customDefs); - - if (customDefs.get(IMPORTS) != null) { - LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); - customDefsFinal.putAll(importDefs); - } - } - } - } - - // As imports are not custom_types, remove from the dict - customDefsFinal.remove(IMPORTS); - - return customDefsFinal; - } - - /** - * This method is used to sort the imports in order so that same directory - * imports will be processed first - * - * @param customImports the custom imports - * @return the sorted list of imports - */ - private List> sortImports(List> customImports){ - List> finalList1 = new ArrayList<>(); - List> finalList2 = new ArrayList<>(); - Iterator> itr = customImports.iterator(); - while(itr.hasNext()) { - Map innerMap = itr.next(); - if (innerMap.toString().contains("../")) { - finalList2.add(innerMap); - itr.remove(); - } - else if (innerMap.toString().contains("/")) { - finalList1.add(innerMap); - itr.remove(); - } - } - - customImports.addAll(finalList1); - customImports.addAll(finalList2); - return customImports; - } - - /** - * This method is used to reset PATH variable after processing of current import file is done - * This is required because of relative path nature of imports present in files. - * - * @param currImportRelativeName the current import relative name - */ - private void resetPathForRecursiveImports(String currImportRelativeName){ - path = getPath(path, currImportRelativeName); - } - - /** - * This is a recursive method which starts from current import and then recursively finds a - * valid path relative to current import file name. - * By doing this it handles all nested hierarchy of imports defined in CSARs - * - * @param path the path - * @param importFileName the import file name - * @return the string containing updated path value - */ - private String getPath(String path, String importFileName){ - String tempFullPath = (Paths.get(path).toAbsolutePath().getParent() - .toString() + File.separator + importFileName.replace("../", "")).replace('\\', '/'); - String tempPartialPath = (Paths.get(path).toAbsolutePath().getParent().toString()).replace('\\', '/'); - if(Files.exists(Paths.get(tempFullPath))) - return tempFullPath; - else - return getPath(tempPartialPath, importFileName); - } - - /** - * This method is used to get full path name for the file which needs to be processed. It helps - * in situation where files are present in different directory and are references as relative - * paths. - * - * @param customImports the custom imports - * @return the map containing import file full and relative paths - */ - private Map getValidFileNameForImportReference(List> customImports){ - String importFileName; - Map retMap = new HashMap<>(); - for (Map map1 : customImports) { - for (Map.Entry entry : map1.entrySet()) { - Map innerMostMap = (Map) entry.getValue(); - Iterator> it = innerMostMap.entrySet().iterator(); - while (it.hasNext()) { - Map.Entry val = it.next(); - if(val.getValue().contains("/")){ - importFileName = (Paths.get(rootPath).toAbsolutePath().getParent().toString() + File - .separator + val.getValue().replace("../", "")).replace('\\', '/'); - } - else { - importFileName = (Paths.get(path).toAbsolutePath().getParent().toString() + File - .separator + val.getValue().replace("../", "")).replace('\\', '/'); - } - retMap.put("importFileName", importFileName); - retMap.put("importRelativeName", val.getValue()); - } - } - } - return retMap; - } - - /** - * This method is used to filter the imports which already gets processed in previous step. - * It handles the use case of cyclic dependency in imports which may cause Stack Overflow - * exception - * - * @param customImports the custom imports - * @param importNameDetails the import name details - * @return the list containing filtered imports - */ - private List> filterImportsForRecursion(List> - customImports, Map importNameDetails){ - for (Map map1 : customImports) { - for (Map.Entry entry : map1.entrySet()) { - Map innerMostMap = (Map) entry.getValue(); - Iterator> it = innerMostMap.entrySet().iterator(); - while (it.hasNext()) { - it.next(); - if (processedImports.contains(importNameDetails.get("importFileName"))) { - it.remove(); - } - } - } - } - - // Remove Empty elements - Iterator> itr = customImports.iterator(); - while(itr.hasNext()) { - Map innerMap = itr.next(); - Predicate predicate = p-> p.values().isEmpty(); - innerMap.values().removeIf(predicate); - } - - return customImports; - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _getCustomTypes(Object typeDefinitions,ArrayList alImports) { - + + + return datatypes; + } + + /** + * This method is used to get consolidated custom definitions from all imports + * It is logically divided in two parts to handle imports; map and list formats. + * Before processing the imports; it sorts them to make sure the current directory imports are + * being processed first and then others. Once sorted; it processes each import one by one in + * recursive manner. + * To avoid cyclic dependency among imports; this method uses a set to keep track of all + * imports which are already processed and filters the imports which occurs more than once. + * + * @param alImports all imports which needs to be processed + * @return the linked hash map containing all import definitions + */ + + @SuppressWarnings("unchecked") + private LinkedHashMap _getAllCustomDefs(Object alImports) { + + + String types[] = { + IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, + DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES + }; + + List> imports = (List>) alImports; + if (imports != null && !imports.isEmpty()) { + if (imports.get(0) instanceof LinkedHashMap) { + imports = sortImports(imports); + + for (Map map : imports) { + List> singleImportList = new ArrayList<>(); + singleImportList.add(map); + + Map importNameDetails = getValidFileNameForImportReference(singleImportList); + singleImportList = filterImportsForRecursion(singleImportList, importNameDetails); + + if (!singleImportList.get(0).isEmpty()) { + LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(singleImportList)); + processedImports.add(importNameDetails.get("importFileName")); + + if (customDefs != null) { + customDefsFinal.putAll(customDefs); + + if (customDefs.get(IMPORTS) != null) { + resetPathForRecursiveImports(importNameDetails.get("importRelativeName")); + LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); + customDefsFinal.putAll(importDefs); + } + } + } + } + } else { + LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(imports)); + if (customDefs != null) { + customDefsFinal.putAll(customDefs); + + if (customDefs.get(IMPORTS) != null) { + LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); + customDefsFinal.putAll(importDefs); + } + } + } + } + + // As imports are not custom_types, remove from the dict + customDefsFinal.remove(IMPORTS); + + return customDefsFinal; + } + + /** + * This method is used to sort the imports in order so that same directory + * imports will be processed first + * + * @param customImports the custom imports + * @return the sorted list of imports + */ + private List> sortImports(List> customImports) { + List> finalList1 = new ArrayList<>(); + List> finalList2 = new ArrayList<>(); + Iterator> itr = customImports.iterator(); + while (itr.hasNext()) { + Map innerMap = itr.next(); + if (innerMap.toString().contains("../")) { + finalList2.add(innerMap); + itr.remove(); + } else if (innerMap.toString().contains("/")) { + finalList1.add(innerMap); + itr.remove(); + } + } + + customImports.addAll(finalList1); + customImports.addAll(finalList2); + return customImports; + } + + /** + * This method is used to reset PATH variable after processing of current import file is done + * This is required because of relative path nature of imports present in files. + * + * @param currImportRelativeName the current import relative name + */ + private void resetPathForRecursiveImports(String currImportRelativeName) { + path = getPath(path, currImportRelativeName); + } + + /** + * This is a recursive method which starts from current import and then recursively finds a + * valid path relative to current import file name. + * By doing this it handles all nested hierarchy of imports defined in CSARs + * + * @param path the path + * @param importFileName the import file name + * @return the string containing updated path value + */ + private String getPath(String path, String importFileName) { + String tempFullPath = (Paths.get(path).toAbsolutePath().getParent() + .toString() + File.separator + importFileName.replace("../", "")).replace('\\', '/'); + String tempPartialPath = (Paths.get(path).toAbsolutePath().getParent().toString()).replace('\\', '/'); + if (Files.exists(Paths.get(tempFullPath))) + return tempFullPath; + else + return getPath(tempPartialPath, importFileName); + } + + /** + * This method is used to get full path name for the file which needs to be processed. It helps + * in situation where files are present in different directory and are references as relative + * paths. + * + * @param customImports the custom imports + * @return the map containing import file full and relative paths + */ + private Map getValidFileNameForImportReference(List> customImports) { + String importFileName; + Map retMap = new HashMap<>(); + for (Map map1 : customImports) { + for (Map.Entry entry : map1.entrySet()) { + Map innerMostMap = (Map) entry.getValue(); + Iterator> it = innerMostMap.entrySet().iterator(); + while (it.hasNext()) { + Map.Entry val = it.next(); + if (val.getValue().contains("/")) { + importFileName = (Paths.get(rootPath).toAbsolutePath().getParent().toString() + File + .separator + val.getValue().replace("../", "")).replace('\\', '/'); + } else { + importFileName = (Paths.get(path).toAbsolutePath().getParent().toString() + File + .separator + val.getValue().replace("../", "")).replace('\\', '/'); + } + retMap.put("importFileName", importFileName); + retMap.put("importRelativeName", val.getValue()); + } + } + } + return retMap; + } + + /** + * This method is used to filter the imports which already gets processed in previous step. + * It handles the use case of cyclic dependency in imports which may cause Stack Overflow + * exception + * + * @param customImports the custom imports + * @param importNameDetails the import name details + * @return the list containing filtered imports + */ + private List> filterImportsForRecursion(List> + customImports, Map importNameDetails) { + for (Map map1 : customImports) { + for (Map.Entry entry : map1.entrySet()) { + Map innerMostMap = (Map) entry.getValue(); + Iterator> it = innerMostMap.entrySet().iterator(); + while (it.hasNext()) { + it.next(); + if (processedImports.contains(importNameDetails.get("importFileName"))) { + it.remove(); + } + } + } + } + + // Remove Empty elements + Iterator> itr = customImports.iterator(); + while (itr.hasNext()) { + Map innerMap = itr.next(); + Predicate predicate = p -> p.values().isEmpty(); + innerMap.values().removeIf(predicate); + } + + return customImports; + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _getCustomTypes(Object typeDefinitions, ArrayList alImports) { + // Handle custom types defined in imported template files // This method loads the custom type definitions referenced in "imports" // section of the TOSCA YAML template. - - LinkedHashMap customDefs = new LinkedHashMap(); + + LinkedHashMap customDefs = new LinkedHashMap(); ArrayList typeDefs = new ArrayList(); - if(typeDefinitions instanceof String[]) { - for(String s: (String[])typeDefinitions) { - typeDefs.add(s); - } - } - else { - typeDefs.add((String)typeDefinitions); + if (typeDefinitions instanceof String[]) { + for (String s : (String[]) typeDefinitions) { + typeDefs.add(s); + } + } else { + typeDefs.add((String) typeDefinitions); } - if(alImports == null) { + if (alImports == null) { alImports = _tplImports(); } - if(alImports != null) { - ImportsLoader customService = new ImportsLoader(alImports,path,typeDefs,tpl); - ArrayList> nestedToscaTpls = customService.getNestedToscaTpls(); - _updateNestedToscaTplsWithTopology(nestedToscaTpls); + if (alImports != null) { + ImportsLoader customService = new ImportsLoader(alImports, path, typeDefs, tpl); + ArrayList> nestedToscaTpls = customService.getNestedToscaTpls(); + _updateNestedToscaTplsWithTopology(nestedToscaTpls); - customDefs = customService.getCustomDefs(); - if(customDefs == null) { - return null; - } + customDefs = customService.getCustomDefs(); + if (customDefs == null) { + return null; + } } //Handle custom types defined in current template file - for(String td: typeDefs) { - if(!td.equals(IMPORTS)) { - LinkedHashMap innerCustomTypes = (LinkedHashMap )tpl.get(td); - if(innerCustomTypes != null) { - customDefs.putAll(innerCustomTypes); - } - } + for (String td : typeDefs) { + if (!td.equals(IMPORTS)) { + LinkedHashMap innerCustomTypes = (LinkedHashMap) tpl.get(td); + if (innerCustomTypes != null) { + customDefs.putAll(innerCustomTypes); + } + } } return customDefs; - } - - private void _updateNestedToscaTplsWithTopology(ArrayList> nestedToscaTpls) { - for(LinkedHashMap ntpl: nestedToscaTpls) { - // there is just one key:value pair in ntpl - for(Map.Entry me: ntpl.entrySet()) { - String fileName = me.getKey(); - @SuppressWarnings("unchecked") - LinkedHashMap toscaTpl = (LinkedHashMap)me.getValue(); - if(toscaTpl.get(TOPOLOGY_TEMPLATE) != null) { - if(nestedToscaTplsWithTopology.get(fileName) == null) { - nestedToscaTplsWithTopology.putAll(ntpl); - } - } - } - } - } - - // multi level nesting - RECURSIVE - @SuppressWarnings("unchecked") - private void _handleNestedToscaTemplatesWithTopology(TopologyTemplate tt) { - if(++nestingLoopCounter > MAX_LEVELS) { - log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting"); - return; - } - // Reset Processed Imports for nested templates - this.processedImports = new HashSet<>(); - for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { - LinkedHashMap toscaTpl = - (LinkedHashMap)me.getValue(); - for(NodeTemplate nt: tt.getNodeTemplates()) { - if(_isSubMappedNode(nt,toscaTpl)) { - parsedParams = _getParamsForNestedTemplate(nt); - ArrayList alim = (ArrayList)toscaTpl.get(IMPORTS); - LinkedHashMap topologyTpl = - (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE); - TopologyTemplate topologyWithSubMapping = - new TopologyTemplate(topologyTpl, - _getAllCustomDefs(alim), - relationshipTypes, - parsedParams, - nt, - resolveGetInput); - nt.setOriginComponentTemplate(topologyWithSubMapping); - if(topologyWithSubMapping.getSubstitutionMappings() != null) { + } + + private void _updateNestedToscaTplsWithTopology(ArrayList> nestedToscaTpls) { + for (LinkedHashMap ntpl : nestedToscaTpls) { + // there is just one key:value pair in ntpl + for (Map.Entry me : ntpl.entrySet()) { + String fileName = me.getKey(); + @SuppressWarnings("unchecked") + LinkedHashMap toscaTpl = (LinkedHashMap) me.getValue(); + if (toscaTpl.get(TOPOLOGY_TEMPLATE) != null) { + if (nestedToscaTplsWithTopology.get(fileName) == null) { + nestedToscaTplsWithTopology.putAll(ntpl); + } + } + } + } + } + + // multi level nesting - RECURSIVE + @SuppressWarnings("unchecked") + private void _handleNestedToscaTemplatesWithTopology(TopologyTemplate tt) { + if (++nestingLoopCounter > MAX_LEVELS) { + log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting"); + return; + } + // Reset Processed Imports for nested templates + this.processedImports = new HashSet<>(); + for (Map.Entry me : nestedToscaTplsWithTopology.entrySet()) { + LinkedHashMap toscaTpl = + (LinkedHashMap) me.getValue(); + for (NodeTemplate nt : tt.getNodeTemplates()) { + if (_isSubMappedNode(nt, toscaTpl)) { + parsedParams = _getParamsForNestedTemplate(nt); + ArrayList alim = (ArrayList) toscaTpl.get(IMPORTS); + LinkedHashMap topologyTpl = + (LinkedHashMap) toscaTpl.get(TOPOLOGY_TEMPLATE); + TopologyTemplate topologyWithSubMapping = + new TopologyTemplate(topologyTpl, + _getAllCustomDefs(alim), + relationshipTypes, + parsedParams, + nt, + resolveGetInput); + nt.setOriginComponentTemplate(topologyWithSubMapping); + if (topologyWithSubMapping.getSubstitutionMappings() != null) { // Record nested topology templates in top level template //nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); // Set substitution mapping object for mapped node nt.setSubMappingToscaTemplate( - topologyWithSubMapping.getSubstitutionMappings()); + topologyWithSubMapping.getSubstitutionMappings()); _handleNestedToscaTemplatesWithTopology(topologyWithSubMapping); - } - } - } - } - } - + } + } + } + } + } + // private void _handleNestedToscaTemplatesWithTopology() { // for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { // String fname = me.getKey(); @@ -692,150 +685,145 @@ public class ToscaTemplate extends Object { // } // } - private void _validateField() { - String sVersion = _tplVersion(); - if(sVersion == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE245", String.format( - "MissingRequiredField: Template is missing required field \"%s\"",DEFINITION_VERSION))); - } - else { - _validateVersion(sVersion); - this.version = sVersion; - } - - for (String sKey : tpl.keySet()) { - boolean bFound = false; - for (String sSection: SECTIONS) { - if(sKey.equals(sSection)) { - bFound = true; - break; - } - } - // check ADDITIONAL_SECTIONS - if(!bFound) { - if(ADDITIONAL_SECTIONS.get(version) != null && - ADDITIONAL_SECTIONS.get(version).contains(sKey)) { - bFound = true; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE246", String.format( - "UnknownFieldError: Template contains unknown field \"%s\"", - sKey))); - } - } - } - - private void _validateVersion(String sVersion) { - boolean bFound = false; - for(String vtv: VALID_TEMPLATE_VERSIONS) { - if(sVersion.equals(vtv)) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE247", String.format( - "InvalidTemplateVersion: \"%s\" is invalid. Valid versions are %s", - sVersion,VALID_TEMPLATE_VERSIONS.toString()))); - } - else if ((!sVersion.equals("tosca_simple_yaml_1_0") && !sVersion.equals("tosca_simple_yaml_1_1"))) { - EntityType.updateDefinitions(sVersion); - - } - } - - private String _getPath(String _path) throws JToscaException { - if (_path.toLowerCase().endsWith(".yaml") || _path.toLowerCase().endsWith(".yml")) { - return _path; - } - else if (_path.toLowerCase().endsWith(".zip") || _path.toLowerCase().endsWith(".csar")) { - // a CSAR archive - CSAR csar = new CSAR(_path, isFile); - if (csar.validate()) { - try { - csar.decompress(); - metaProperties = csar.getMetaProperties(); - } - catch (IOException e) { - log.error("ToscaTemplate - _getPath - IOException trying to decompress {}", _path); - return null; - } - isFile = true; // the file has been decompressed locally - csar.cleanup(); - csarTempDir = csar.getTempDir(); - return csar.getTempDir() + File.separator + csar.getMainTemplate(); - } - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE248", "ValueError: " + _path + " is not a valid file")); - return null; - } - return null; - } - - private void verifyTemplate() throws JToscaException { - //Criticals - int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); - if (validationIssuesCaught > 0) { - List validationIssueStrings = ThreadLocalsHolder.getCollector().getValidationIssueReport(); - log.trace("####################################################################################################"); - log.trace("ToscaTemplate - verifyTemplate - {} Parsing Critical{} occurred...", validationIssuesCaught, (validationIssuesCaught > 1 ? "s" : "")); - for (String s : validationIssueStrings) { - log.trace("{}. CSAR name - {}", s, inputPath); - } - log.trace("####################################################################################################"); - } - - } - - public String getPath() { - return path; - } - - public String getVersion() { - return version; - } - - public String getDescription() { - return description; - } - - public TopologyTemplate getTopologyTemplate() { - return topologyTemplate; - } - - public Metadata getMetaData() { - return metaData; - } - - public ArrayList getInputs() { - if(inputs != null){ - inputs.stream().forEach(Input::resetAnnotaions); - } - return inputs; - } - - public ArrayList getOutputs() { - return outputs; - } - - public ArrayList getPolicies() { - return policies; - } - - public ArrayList getGroups() { - return groups; - } - - public ArrayList getNodeTemplates() { - return nodeTemplates; - } - - public LinkedHashMap getMetaProperties(String propertiesFile) { - return metaProperties.get(propertiesFile); - } - + private void _validateField() { + String sVersion = _tplVersion(); + if (sVersion == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE245", String.format( + "MissingRequiredField: Template is missing required field \"%s\"", DEFINITION_VERSION))); + } else { + _validateVersion(sVersion); + this.version = sVersion; + } + + for (String sKey : tpl.keySet()) { + boolean bFound = false; + for (String sSection : SECTIONS) { + if (sKey.equals(sSection)) { + bFound = true; + break; + } + } + // check ADDITIONAL_SECTIONS + if (!bFound) { + if (ADDITIONAL_SECTIONS.get(version) != null && + ADDITIONAL_SECTIONS.get(version).contains(sKey)) { + bFound = true; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE246", String.format( + "UnknownFieldError: Template contains unknown field \"%s\"", + sKey))); + } + } + } + + private void _validateVersion(String sVersion) { + boolean bFound = false; + for (String vtv : VALID_TEMPLATE_VERSIONS) { + if (sVersion.equals(vtv)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE247", String.format( + "InvalidTemplateVersion: \"%s\" is invalid. Valid versions are %s", + sVersion, VALID_TEMPLATE_VERSIONS.toString()))); + } else if ((!sVersion.equals("tosca_simple_yaml_1_0") && !sVersion.equals("tosca_simple_yaml_1_1"))) { + EntityType.updateDefinitions(sVersion); + + } + } + + private String _getPath(String _path) throws JToscaException { + if (_path.toLowerCase().endsWith(".yaml") || _path.toLowerCase().endsWith(".yml")) { + return _path; + } else if (_path.toLowerCase().endsWith(".zip") || _path.toLowerCase().endsWith(".csar")) { + // a CSAR archive + CSAR csar = new CSAR(_path, isFile); + if (csar.validate()) { + try { + csar.decompress(); + metaProperties = csar.getMetaProperties(); + } catch (IOException e) { + log.error("ToscaTemplate - _getPath - IOException trying to decompress {}", _path); + return null; + } + isFile = true; // the file has been decompressed locally + csar.cleanup(); + csarTempDir = csar.getTempDir(); + return csar.getTempDir() + File.separator + csar.getMainTemplate(); + } + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE248", "ValueError: " + _path + " is not a valid file")); + return null; + } + return null; + } + + private void verifyTemplate() throws JToscaException { + //Criticals + int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); + if (validationIssuesCaught > 0) { + List validationIssueStrings = ThreadLocalsHolder.getCollector().getValidationIssueReport(); + log.trace("####################################################################################################"); + log.trace("ToscaTemplate - verifyTemplate - {} Parsing Critical{} occurred...", validationIssuesCaught, (validationIssuesCaught > 1 ? "s" : "")); + for (String s : validationIssueStrings) { + log.trace("{}. CSAR name - {}", s, inputPath); + } + log.trace("####################################################################################################"); + } + + } + + public String getPath() { + return path; + } + + public String getVersion() { + return version; + } + + public String getDescription() { + return description; + } + + public TopologyTemplate getTopologyTemplate() { + return topologyTemplate; + } + + public Metadata getMetaData() { + return metaData; + } + + public ArrayList getInputs() { + if (inputs != null) { + inputs.stream().forEach(Input::resetAnnotaions); + } + return inputs; + } + + public ArrayList getOutputs() { + return outputs; + } + + public ArrayList getPolicies() { + return policies; + } + + public ArrayList getGroups() { + return groups; + } + + public ArrayList getNodeTemplates() { + return nodeTemplates; + } + + public LinkedHashMap getMetaProperties(String propertiesFile) { + return metaProperties.get(propertiesFile); + } + // private boolean _isSubMappedNode(NodeTemplate nt,LinkedHashMap toscaTpl) { // // Return True if the nodetemple is substituted // if(nt != null && nt.getSubMappingToscaTemplate() == null && @@ -846,105 +834,105 @@ public class ToscaTemplate extends Object { // return false; // } - private boolean _isSubMappedNode(NodeTemplate nt, LinkedHashMap toscaTpl) { - // Return True if the nodetemple is substituted - if(nt != null && nt.getSubMappingToscaTemplate() == null && - getSubMappingNodeType(toscaTpl).equals(nt.getType()) && - nt.getInterfaces().size() < 1) { - return true; - } - return false; - } - - private LinkedHashMap _getParamsForNestedTemplate(NodeTemplate nt) { - // Return total params for nested_template - LinkedHashMap pparams; - if(parsedParams != null) { - pparams = parsedParams; - } - else { - pparams = new LinkedHashMap(); - } - if(nt != null) { - for(String pname: nt.getProperties().keySet()) { - pparams.put(pname,nt.getPropertyValue(pname)); - } - } - return pparams; - } - - @SuppressWarnings("unchecked") - private String getSubMappingNodeType(LinkedHashMap toscaTpl) { - // Return substitution mappings node type - if(toscaTpl != null) { - return TopologyTemplate.getSubMappingNodeType( - (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE)); - } - return null; - } - - public boolean hasNestedTemplates() { + private boolean _isSubMappedNode(NodeTemplate nt, LinkedHashMap toscaTpl) { + // Return True if the nodetemple is substituted + if (nt != null && nt.getSubMappingToscaTemplate() == null && + getSubMappingNodeType(toscaTpl).equals(nt.getType()) && + nt.getInterfaces().size() < 1) { + return true; + } + return false; + } + + private LinkedHashMap _getParamsForNestedTemplate(NodeTemplate nt) { + // Return total params for nested_template + LinkedHashMap pparams; + if (parsedParams != null) { + pparams = parsedParams; + } else { + pparams = new LinkedHashMap(); + } + if (nt != null) { + for (String pname : nt.getProperties().keySet()) { + pparams.put(pname, nt.getPropertyValue(pname)); + } + } + return pparams; + } + + @SuppressWarnings("unchecked") + private String getSubMappingNodeType(LinkedHashMap toscaTpl) { + // Return substitution mappings node type + if (toscaTpl != null) { + return TopologyTemplate.getSubMappingNodeType( + (LinkedHashMap) toscaTpl.get(TOPOLOGY_TEMPLATE)); + } + return null; + } + + public boolean hasNestedTemplates() { // Return True if the tosca template has nested templates return nestedToscaTemplatesWithTopology != null && - nestedToscaTemplatesWithTopology.size() >= 1; - - } - - public ArrayList getNestedTemplates() { - return nestedToscaTemplatesWithTopology; - } - - public ConcurrentHashMap getNestedTopologyTemplates() { - return nestedToscaTplsWithTopology; - } - - /** - * Get datatypes. - * @return return list of datatypes. - */ - public HashSet getDataTypes() { - return dataTypes; - } - - @Override - public String toString() { - return "ToscaTemplate{" + - "exttools=" + exttools + - ", VALID_TEMPLATE_VERSIONS=" + VALID_TEMPLATE_VERSIONS + - ", ADDITIONAL_SECTIONS=" + ADDITIONAL_SECTIONS + - ", isFile=" + isFile + - ", path='" + path + '\'' + - ", inputPath='" + inputPath + '\'' + - ", parsedParams=" + parsedParams + - ", tpl=" + tpl + - ", version='" + version + '\'' + - ", imports=" + imports + - ", relationshipTypes=" + relationshipTypes + - ", metaData=" + metaData + - ", description='" + description + '\'' + - ", topologyTemplate=" + topologyTemplate + - ", repositories=" + repositories + - ", inputs=" + inputs + - ", relationshipTemplates=" + relationshipTemplates + - ", nodeTemplates=" + nodeTemplates + - ", outputs=" + outputs + - ", policies=" + policies + - ", nestedToscaTplsWithTopology=" + nestedToscaTplsWithTopology + - ", nestedToscaTemplatesWithTopology=" + nestedToscaTemplatesWithTopology + - ", graph=" + graph + - ", csarTempDir='" + csarTempDir + '\'' + - ", nestingLoopCounter=" + nestingLoopCounter + - ", dataTypes=" + dataTypes + - '}'; - } - - public List getInputs(boolean annotationsRequired) { - if(inputs != null && annotationsRequired){ - inputs.stream().forEach(Input::parseAnnotations); - return inputs; - } - return getInputs(); - } + nestedToscaTemplatesWithTopology.size() >= 1; + + } + + public ArrayList getNestedTemplates() { + return nestedToscaTemplatesWithTopology; + } + + public ConcurrentHashMap getNestedTopologyTemplates() { + return nestedToscaTplsWithTopology; + } + + /** + * Get datatypes. + * + * @return return list of datatypes. + */ + public HashSet getDataTypes() { + return dataTypes; + } + + @Override + public String toString() { + return "ToscaTemplate{" + + "exttools=" + exttools + + ", VALID_TEMPLATE_VERSIONS=" + VALID_TEMPLATE_VERSIONS + + ", ADDITIONAL_SECTIONS=" + ADDITIONAL_SECTIONS + + ", isFile=" + isFile + + ", path='" + path + '\'' + + ", inputPath='" + inputPath + '\'' + + ", parsedParams=" + parsedParams + + ", tpl=" + tpl + + ", version='" + version + '\'' + + ", imports=" + imports + + ", relationshipTypes=" + relationshipTypes + + ", metaData=" + metaData + + ", description='" + description + '\'' + + ", topologyTemplate=" + topologyTemplate + + ", repositories=" + repositories + + ", inputs=" + inputs + + ", relationshipTemplates=" + relationshipTemplates + + ", nodeTemplates=" + nodeTemplates + + ", outputs=" + outputs + + ", policies=" + policies + + ", nestedToscaTplsWithTopology=" + nestedToscaTplsWithTopology + + ", nestedToscaTemplatesWithTopology=" + nestedToscaTemplatesWithTopology + + ", graph=" + graph + + ", csarTempDir='" + csarTempDir + '\'' + + ", nestingLoopCounter=" + nestingLoopCounter + + ", dataTypes=" + dataTypes + + '}'; + } + + public List getInputs(boolean annotationsRequired) { + if (inputs != null && annotationsRequired) { + inputs.stream().forEach(Input::parseAnnotations); + return inputs; + } + return getInputs(); + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java b/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java index 91545c2..c78978f 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,121 +28,119 @@ import java.util.LinkedHashMap; public class Triggers extends EntityTemplate { - private static final String DESCRIPTION = "description"; - private static final String EVENT = "event_type"; - private static final String SCHEDULE = "schedule"; - private static final String TARGET_FILTER = "target_filter"; - private static final String CONDITION = "condition"; - private static final String ACTION = "action"; - - private static final String SECTIONS[] = { - DESCRIPTION, EVENT, SCHEDULE, TARGET_FILTER, CONDITION, ACTION - }; - - private static final String METER_NAME = "meter_name"; - private static final String CONSTRAINT = "constraint"; - private static final String PERIOD = "period"; - private static final String EVALUATIONS = "evaluations"; - private static final String METHOD = "method"; - private static final String THRESHOLD = "threshold"; - private static final String COMPARISON_OPERATOR = "comparison_operator"; - - private static final String CONDITION_KEYNAMES[] = { - METER_NAME, CONSTRAINT, PERIOD, EVALUATIONS, METHOD, THRESHOLD, COMPARISON_OPERATOR - }; - - private String name; - private LinkedHashMap triggerTpl; - - public Triggers(String _name,LinkedHashMap _triggerTpl) { - super(); // dummy. don't want super - name = _name; - triggerTpl = _triggerTpl; - _validateKeys(); - _validateCondition(); - _validateInput(); - } - - public String getDescription() { - return (String)triggerTpl.get("description"); - } - - public String getEvent() { - return (String)triggerTpl.get("event_type"); - } - - public LinkedHashMap getSchedule() { - return (LinkedHashMap)triggerTpl.get("schedule"); - } - - public LinkedHashMap getTargetFilter() { - return (LinkedHashMap)triggerTpl.get("target_filter"); - } - - public LinkedHashMap getCondition() { - return (LinkedHashMap)triggerTpl.get("condition"); - } - - public LinkedHashMap getAction() { - return (LinkedHashMap)triggerTpl.get("action"); - } - - private void _validateKeys() { - for(String key: triggerTpl.keySet()) { - boolean bFound = false; - for(int i=0; i triggerTpl; + + public Triggers(String name, LinkedHashMap triggerTpl) { + super(); // dummy. don't want super + this.name = name; + this.triggerTpl = triggerTpl; + validateKeys(); + validateCondition(); + validateInput(); + } + + public String getDescription() { + return (String) triggerTpl.get("description"); + } + + public String getEvent() { + return (String) triggerTpl.get("event_type"); + } + + public LinkedHashMap getSchedule() { + return (LinkedHashMap) triggerTpl.get("schedule"); + } + + public LinkedHashMap getTargetFilter() { + return (LinkedHashMap) triggerTpl.get("target_filter"); + } + + public LinkedHashMap getCondition() { + return (LinkedHashMap) triggerTpl.get("condition"); + } + + public LinkedHashMap getAction() { + return (LinkedHashMap) triggerTpl.get("action"); + } + + private void validateKeys() { + for (String key : triggerTpl.keySet()) { + boolean bFound = false; + for (int i = 0; i < SECTIONS.length; i++) { + if (key.equals(SECTIONS[i])) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE249", String.format( + "UnknownFieldError: Triggers \"%s\" contains unknown field \"%s\"", + name, key))); + } + } + } + + private void validateCondition() { + for (String key : getCondition().keySet()) { + boolean bFound = false; + for (int i = 0; i < CONDITION_KEYNAMES.length; i++) { + if (key.equals(CONDITION_KEYNAMES[i])) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE250", String.format( + "UnknownFieldError: Triggers \"%s\" contains unknown field \"%s\"", + name, key))); + } + } + } + + private void validateInput() { + for (String key : getCondition().keySet()) { + Object value = getCondition().get(key); + if (key.equals(PERIOD) || key.equals(EVALUATIONS)) { + ValidateUtils.validateInteger(value); + } else if (key.equals(THRESHOLD)) { + ValidateUtils.validateNumeric(value); + } else if (key.equals(METER_NAME) || key.equals(METHOD)) { + ValidateUtils.validateString(value); + } + } + } + + @Override + public String toString() { + return "Triggers{" + + "name='" + name + '\'' + + ", triggerTpl=" + triggerTpl + + '}'; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java b/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java index b7adfa4..f2bb650 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,20 +38,23 @@ public class UnsupportedType { of un_supported_types. As tosca-parser move to provide support for version 1.1 and higher, they will be removed. */ - - private static final String unsupportedTypes[] = { - "tosca.test.invalidtype", - "tosca.nodes.Storage.ObjectStorage", - "tosca.nodes.Storage.BlockStorage"}; + + private UnsupportedType() { + } + + private static final String[] UNSUPPORTED_TYPES = { + "tosca.test.invalidtype", + "tosca.nodes.Storage.ObjectStorage", + "tosca.nodes.Storage.BlockStorage"}; public static boolean validateType(String entityType) { - for(String ust: unsupportedTypes) { - if(ust.equals(entityType)) { + for (String ust : UNSUPPORTED_TYPES) { + if (ust.equals(entityType)) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE251", String.format( - "UnsupportedTypeError: Entity type \"%s\" is not supported",entityType))); - return true; - } - } + "UnsupportedTypeError: Entity type \"%s\" is not supported", entityType))); + return true; + } + } return false; } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java index b96399b..56416c6 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,26 +22,26 @@ package org.onap.sdc.toscaparser.api.common; public class JToscaException extends Exception { - private static final long serialVersionUID = 1L; - private String code; + private static final long serialVersionUID = 1L; + private String code; - public JToscaException(String message, String code) { - super(message); - this.code = code; - } + public JToscaException(String message, String code) { + super(message); + this.code = code; + } - public String getCode() { - return code; - } + public String getCode() { + return code; + } - public void setCode(String code) { - this.code = code; - } + public void setCode(String code) { + this.code = code; + } - //JE1001 - Meta file missing - //JE1002 - Invalid yaml content - //JE1003 - Entry-Definition not defined in meta file - //JE1004 - Entry-Definition file missing - //JE1005 - General Error - //JE1006 - General Error/Path not valid + //JE1001 - Meta file missing + //JE1002 - Invalid yaml content + //JE1003 - Entry-Definition not defined in meta file + //JE1004 - Entry-Definition file missing + //JE1005 - General Error + //JE1006 - General Error/Path not valid } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java index 9eb8f54..19c9583 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,34 +22,34 @@ package org.onap.sdc.toscaparser.api.common; public class JToscaValidationIssue { - private String code; - private String message; + private String code; + private String message; - public JToscaValidationIssue(String code, String message) { - super(); - this.code = code; - this.message = message; - } + public JToscaValidationIssue(String code, String message) { + super(); + this.code = code; + this.message = message; + } - public String getMessage() { - return message; - } + public String getMessage() { + return message; + } - public void setMessage(String message) { - this.message = message; - } + public void setMessage(String message) { + this.message = message; + } - public String getCode() { - return code; - } + public String getCode() { + return code; + } - public void setCode(String code) { - this.code = code; - } - - @Override - public String toString() { - return "JToscaError [code=" + code + ", message=" + message + "]"; - } + public void setCode(String code) { + this.code = code; + } + + @Override + public String toString() { + return "JToscaError [code=" + code + ", message=" + message + "]"; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java b/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java index 2769c1a..c109ffd 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,37 +23,36 @@ package org.onap.sdc.toscaparser.api.common; import java.util.IllegalFormatException; public class TOSCAException extends Exception { - private String message = "An unkown exception has occurred"; - private static boolean FATAL_EXCEPTION_FORMAT_ERRORS = false; - private String msgFmt = null; - - public TOSCAException(String...strings) { - try { - message = String.format(msgFmt,(Object[])strings); - } - catch (IllegalFormatException e) { - // TODO log - - if(FATAL_EXCEPTION_FORMAT_ERRORS) { - throw e; - } - - } - - } - - public String __str__() { - return message; - } - - public static void generate_inv_schema_property_error(String name, String attr, String value, String valid_values) { - //TODO - - } - - public static void setFatalFormatException(boolean flag) { - FATAL_EXCEPTION_FORMAT_ERRORS = flag; - } - + private String message = "An unkown exception has occurred"; + private static boolean FATAL_EXCEPTION_FORMAT_ERRORS = false; + private String msgFmt = null; + + public TOSCAException(String... strings) { + try { + message = String.format(msgFmt, (Object[]) strings); + } catch (IllegalFormatException e) { + // TODO log + + if (FATAL_EXCEPTION_FORMAT_ERRORS) { + throw e; + } + + } + + } + + public String __str__() { + return message; + } + + public static void generate_inv_schema_property_error(String name, String attr, String value, String valid_values) { + //TODO + + } + + public static void setFatalFormatException(boolean flag) { + FATAL_EXCEPTION_FORMAT_ERRORS = flag; + } + } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java b/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java index 25bb854..71c0401 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,10 +26,11 @@ import java.util.*; public class ValidationIssueCollector { - private Map validationIssues = new HashMap(); + private Map validationIssues = new HashMap(); + public void appendValidationIssue(JToscaValidationIssue issue) { - validationIssues.put(issue.getMessage(),issue); + validationIssues.put(issue.getMessage(), issue); } @@ -37,13 +38,14 @@ public class ValidationIssueCollector { List report = new ArrayList<>(); if (!validationIssues.isEmpty()) { for (JToscaValidationIssue exception : validationIssues.values()) { - report.add("["+exception.getCode()+"]: "+ exception.getMessage()); + report.add("[" + exception.getCode() + "]: " + exception.getMessage()); } } return report; } - public Map getValidationIssues() { + + public Map getValidationIssues() { return validationIssues; } @@ -51,5 +53,5 @@ public class ValidationIssueCollector { public int validationIssuesCaught() { return validationIssues.size(); } - + } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java index 3dce5e6..9cf8c6c 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,67 +23,63 @@ package org.onap.sdc.toscaparser.api.elements; import java.util.LinkedHashMap; public class ArtifactTypeDef extends StatefulEntityType { - - private String type; - private LinkedHashMap customDef; - private LinkedHashMap properties; - private LinkedHashMap parentArtifacts; - - - - public ArtifactTypeDef(String atype,LinkedHashMap _customDef) { - super(atype,ARTIFACT_PREFIX,_customDef); - - type = atype; - customDef = _customDef; - properties = null; - if(defs != null) { - properties = (LinkedHashMap)defs.get(PROPERTIES); - } - parentArtifacts = _getParentArtifacts(); - } - - private LinkedHashMap _getParentArtifacts() { - LinkedHashMap artifacts = new LinkedHashMap<>(); - String parentArtif = null; - if(getParentType() != null) { - parentArtif = getParentType().getType(); - } - if(parentArtif != null && !parentArtif.isEmpty()) { - while(!parentArtif.equals("tosca.artifacts.Root")) { - Object ob = TOSCA_DEF.get(parentArtif); - artifacts.put(parentArtif,ob); - parentArtif = - (String)((LinkedHashMap)ob).get("derived_from"); + + private String type; + private LinkedHashMap customDef; + private LinkedHashMap properties; + private LinkedHashMap parentArtifacts; + + + public ArtifactTypeDef(String type, LinkedHashMap customDef) { + super(type, ARTIFACT_PREFIX, customDef); + + this.type = type; + this.customDef = customDef; + properties = defs != null ? (LinkedHashMap) defs.get(PROPERTIES) : null; + parentArtifacts = getParentArtifacts(); + } + + private LinkedHashMap getParentArtifacts() { + LinkedHashMap artifacts = new LinkedHashMap<>(); + String parentArtif = null; + if (getParentType() != null) { + parentArtif = getParentType().getType(); + } + if (parentArtif != null && !parentArtif.isEmpty()) { + while (!parentArtif.equals("tosca.artifacts.Root")) { + Object ob = TOSCA_DEF.get(parentArtif); + artifacts.put(parentArtif, ob); + parentArtif = + (String) ((LinkedHashMap) ob).get("derived_from"); } - } - return artifacts; - } - - public ArtifactTypeDef getParentType() { + } + return artifacts; + } + + public ArtifactTypeDef getParentType() { // Return a artifact entity from which this entity is derived - if(defs == null) { - return null; + if (defs == null) { + return null; } String partifactEntity = derivedFrom(defs); - if(partifactEntity != null) { - return new ArtifactTypeDef(partifactEntity,customDef); + if (partifactEntity != null) { + return new ArtifactTypeDef(partifactEntity, customDef); } return null; - } - - public Object getArtifact(String name) { + } + + public Object getArtifact(String name) { // Return the definition of an artifact field by name - if(defs != null) { + if (defs != null) { return defs.get(name); } return null; - } - - public String getType() { - return type; - } - + } + + public String getType() { + return type; + } + } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java index 2070c50..e4a30f1 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,27 +24,27 @@ import java.util.LinkedHashMap; public class AttributeDef { // TOSCA built-in Attribute type - - private String name; - private Object value; - private LinkedHashMap schema; - public AttributeDef(String adName, Object adValue, LinkedHashMap adSchema) { + private String name; + private Object value; + private LinkedHashMap schema; + + public AttributeDef(String adName, Object adValue, LinkedHashMap adSchema) { name = adName; value = adValue; schema = adSchema; } - + public String getName() { - return name; + return name; } public Object getValue() { - return value; + return value; } - public LinkedHashMap getSchema() { - return schema; + public LinkedHashMap getSchema() { + return schema; } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java index 9f9610e..e3c24b3 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,134 +25,132 @@ import java.util.LinkedHashMap; import java.util.Map; public class CapabilityTypeDef extends StatefulEntityType { - // TOSCA built-in capabilities type + // TOSCA built-in capabilities type - private static final String TOSCA_TYPEURI_CAPABILITY_ROOT = "tosca.capabilities.Root"; + private static final String TOSCA_TYPEURI_CAPABILITY_ROOT = "tosca.capabilities.Root"; - private String name; - private String nodetype; - private LinkedHashMap customDef; - private LinkedHashMap properties; - private LinkedHashMap parentCapabilities; + private String name; + private String nodetype; + private LinkedHashMap customDef; + private LinkedHashMap properties; + private LinkedHashMap parentCapabilities; - @SuppressWarnings("unchecked") - public CapabilityTypeDef(String cname,String ctype,String ntype,LinkedHashMap ccustomDef) { - super(ctype,CAPABILITY_PREFIX,ccustomDef); - - name = cname; + @SuppressWarnings("unchecked") + public CapabilityTypeDef(String cname, String ctype, String ntype, LinkedHashMap ccustomDef) { + super(ctype, CAPABILITY_PREFIX, ccustomDef); + + name = cname; nodetype = ntype; properties = null; customDef = ccustomDef; - if(defs != null) { - properties = (LinkedHashMap)defs.get(PROPERTIES); + if (defs != null) { + properties = (LinkedHashMap) defs.get(PROPERTIES); } - parentCapabilities = _getParentCapabilities(customDef); - } - - @SuppressWarnings("unchecked") - public ArrayList getPropertiesDefObjects () { + parentCapabilities = getParentCapabilities(customDef); + } + + @SuppressWarnings("unchecked") + public ArrayList getPropertiesDefObjects() { // Return a list of property definition objects - ArrayList propsdefs = new ArrayList<>(); - LinkedHashMap parentProperties = new LinkedHashMap<>(); - if(parentCapabilities != null) { - for(Map.Entry me: parentCapabilities.entrySet()) { - parentProperties.put(me.getKey(),((LinkedHashMap)me.getValue()).get("properties")); - } - } - if(properties != null) { - for(Map.Entry me: properties.entrySet()) { - propsdefs.add(new PropertyDef(me.getKey(),null,(LinkedHashMap)me.getValue())); - } - } - if(parentProperties != null) { - for(Map.Entry me: parentProperties.entrySet()) { - LinkedHashMap props = (LinkedHashMap)me.getValue(); - if (props != null) { - for(Map.Entry pe: props.entrySet()) { - String prop = pe.getKey(); - LinkedHashMap schema = (LinkedHashMap)pe.getValue(); - // add parent property if not overridden by children type - if(properties == null || properties.get(prop) == null) { - propsdefs.add(new PropertyDef(prop, null, schema)); - } - } - } - } - } - return propsdefs; - } - - public LinkedHashMap getPropertiesDef() { - LinkedHashMap pds = new LinkedHashMap<>(); - for(PropertyDef pd: getPropertiesDefObjects()) { - pds.put(pd.getName(),pd); - } - return pds; - } - - public PropertyDef getPropertyDefValue(String pdname) { + ArrayList propsdefs = new ArrayList<>(); + LinkedHashMap parentProperties = new LinkedHashMap<>(); + if (parentCapabilities != null) { + for (Map.Entry me : parentCapabilities.entrySet()) { + parentProperties.put(me.getKey(), ((LinkedHashMap) me.getValue()).get("properties")); + } + } + if (properties != null) { + for (Map.Entry me : properties.entrySet()) { + propsdefs.add(new PropertyDef(me.getKey(), null, (LinkedHashMap) me.getValue())); + } + } + if (parentProperties != null) { + for (Map.Entry me : parentProperties.entrySet()) { + LinkedHashMap props = (LinkedHashMap) me.getValue(); + if (props != null) { + for (Map.Entry pe : props.entrySet()) { + String prop = pe.getKey(); + LinkedHashMap schema = (LinkedHashMap) pe.getValue(); + // add parent property if not overridden by children type + if (properties == null || properties.get(prop) == null) { + propsdefs.add(new PropertyDef(prop, null, schema)); + } + } + } + } + } + return propsdefs; + } + + public LinkedHashMap getPropertiesDef() { + LinkedHashMap pds = new LinkedHashMap<>(); + for (PropertyDef pd : getPropertiesDefObjects()) { + pds.put(pd.getName(), pd); + } + return pds; + } + + public PropertyDef getPropertyDefValue(String pdname) { // Return the definition of a given property name - LinkedHashMap propsDef = getPropertiesDef(); - if(propsDef != null && propsDef.get(pdname) != null) { - return (PropertyDef)propsDef.get(pdname).getPDValue(); - } - return null; - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _getParentCapabilities(LinkedHashMap customDef) { - LinkedHashMap capabilities = new LinkedHashMap<>(); - CapabilityTypeDef parentCap = getParentType(); - if(parentCap != null) { - String sParentCap = parentCap.getType(); - while(!sParentCap.equals(TOSCA_TYPEURI_CAPABILITY_ROOT)) { - if(TOSCA_DEF.get(sParentCap) != null) { - capabilities.put(sParentCap,TOSCA_DEF.get(sParentCap)); - } - else if(customDef != null && customDef.get(sParentCap) != null) { - capabilities.put(sParentCap,customDef.get(sParentCap)); - } - sParentCap = (String)((LinkedHashMap)capabilities.get(sParentCap)).get("derived_from"); - } - } - return capabilities; - } - - public CapabilityTypeDef getParentType() { + LinkedHashMap propsDef = getPropertiesDef(); + if (propsDef != null && propsDef.get(pdname) != null) { + return (PropertyDef) propsDef.get(pdname).getPDValue(); + } + return null; + } + + @SuppressWarnings("unchecked") + private LinkedHashMap getParentCapabilities(LinkedHashMap customDef) { + LinkedHashMap capabilities = new LinkedHashMap<>(); + CapabilityTypeDef parentCap = getParentType(); + if (parentCap != null) { + String sParentCap = parentCap.getType(); + while (!sParentCap.equals(TOSCA_TYPEURI_CAPABILITY_ROOT)) { + if (TOSCA_DEF.get(sParentCap) != null) { + capabilities.put(sParentCap, TOSCA_DEF.get(sParentCap)); + } else if (customDef != null && customDef.get(sParentCap) != null) { + capabilities.put(sParentCap, customDef.get(sParentCap)); + } + sParentCap = (String) ((LinkedHashMap) capabilities.get(sParentCap)).get("derived_from"); + } + } + return capabilities; + } + + public CapabilityTypeDef getParentType() { // Return a capability this capability is derived from - if(defs == null) { - return null; - } - String pnode = derivedFrom(defs); - if(pnode != null && !pnode.isEmpty()) { + if (defs == null) { + return null; + } + String pnode = derivedFrom(defs); + if (pnode != null && !pnode.isEmpty()) { return new CapabilityTypeDef(name, pnode, nodetype, customDef); - } - return null; - } + } + return null; + } - public boolean inheritsFrom(ArrayList typeNames) { + public boolean inheritsFrom(ArrayList typeNames) { // Check this capability is in type_names // Check if this capability or some of its parent types // are in the list of types: type_names - if(typeNames.contains(getType())) { - return true; - } - else if(getParentType() != null) { - return getParentType().inheritsFrom(typeNames); - } - return false; - } - - // getters/setters - - public LinkedHashMap getProperties() { - return properties; - } - - public String getName() { - return name; - } + if (typeNames.contains(getType())) { + return true; + } else if (getParentType() != null) { + return getParentType().inheritsFrom(typeNames); + } + return false; + } + + // getters/setters + + public LinkedHashMap getProperties() { + return properties; + } + + public String getName() { + return name; + } } /*python @@ -227,7 +225,7 @@ class CapabilityTypeDef(StatefulEntityType): if pnode: return CapabilityTypeDef(self.name, pnode, self.nodetype, self.custom_def) - + def inherits_from(self, type_names): '''Check this capability is in type_names diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java index 4b6451d..d8cf460 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,66 +24,66 @@ import java.util.ArrayList; import java.util.LinkedHashMap; public class DataType extends StatefulEntityType { - - LinkedHashMap customDef; - - public DataType(String _dataTypeName,LinkedHashMap _customDef) { - super(_dataTypeName,DATATYPE_NETWORK_PREFIX,_customDef); - - customDef = _customDef; - } - - public DataType getParentType() { + + LinkedHashMap customDef; + + public DataType(String _dataTypeName, LinkedHashMap _customDef) { + super(_dataTypeName, DATATYPE_NETWORK_PREFIX, _customDef); + + customDef = _customDef; + } + + public DataType getParentType() { // Return a datatype this datatype is derived from - if(defs != null) { - String ptype = derivedFrom(defs); - if(ptype != null) { - return new DataType(ptype,customDef); - } - } + if (defs != null) { + String ptype = derivedFrom(defs); + if (ptype != null) { + return new DataType(ptype, customDef); + } + } return null; - } + } - public String getValueType() { + public String getValueType() { // Return 'type' section in the datatype schema - if(defs != null) { - return (String)entityValue(defs,"type"); - } - return null; - } + if (defs != null) { + return (String) entityValue(defs, "type"); + } + return null; + } - public ArrayList getAllPropertiesObjects() { + public ArrayList getAllPropertiesObjects() { //Return all properties objects defined in type and parent type - ArrayList propsDef = getPropertiesDefObjects(); + ArrayList propsDef = getPropertiesDefObjects(); DataType ptype = getParentType(); - while(ptype != null) { + while (ptype != null) { propsDef.addAll(ptype.getPropertiesDefObjects()); ptype = ptype.getParentType(); } return propsDef; - } - - public LinkedHashMap getAllProperties() { + } + + public LinkedHashMap getAllProperties() { // Return a dictionary of all property definition name-object pairs - LinkedHashMap pno = new LinkedHashMap<>(); - for(PropertyDef pd: getAllPropertiesObjects()) { - pno.put(pd.getName(),pd); - } + LinkedHashMap pno = new LinkedHashMap<>(); + for (PropertyDef pd : getAllPropertiesObjects()) { + pno.put(pd.getName(), pd); + } return pno; - } + } - public Object getAllPropertyValue(String name) { + public Object getAllPropertyValue(String name) { // Return the value of a given property name - LinkedHashMap propsDef = getAllProperties(); - if(propsDef != null && propsDef.get(name) != null) { + LinkedHashMap propsDef = getAllProperties(); + if (propsDef != null && propsDef.get(name) != null) { return propsDef.get(name).getPDValue(); } return null; - } - - public LinkedHashMap getDefs() { - return defs; - } + } + + public LinkedHashMap getDefs() { + return defs; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java index 62f51d2..efc6ac9 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -36,189 +36,189 @@ import org.yaml.snakeyaml.Yaml; public class EntityType { - private static Logger log = LoggerFactory.getLogger(EntityType.class.getName()); - - private static final String TOSCA_DEFINITION_1_0_YAML = "TOSCA_definition_1_0.yaml"; - protected static final String DERIVED_FROM = "derived_from"; - protected static final String PROPERTIES = "properties"; - protected static final String ATTRIBUTES = "attributes"; - protected static final String REQUIREMENTS = "requirements"; - protected static final String INTERFACES = "interfaces"; - protected static final String CAPABILITIES = "capabilities"; - protected static final String TYPE = "type"; - protected static final String ARTIFACTS = "artifacts"; - - @SuppressWarnings("unused") - private static final String SECTIONS[] = { - DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS, + private static Logger log = LoggerFactory.getLogger(EntityType.class.getName()); + + private static final String TOSCA_DEFINITION_1_0_YAML = "TOSCA_definition_1_0.yaml"; + protected static final String DERIVED_FROM = "derived_from"; + protected static final String PROPERTIES = "properties"; + protected static final String ATTRIBUTES = "attributes"; + protected static final String REQUIREMENTS = "requirements"; + protected static final String INTERFACES = "interfaces"; + protected static final String CAPABILITIES = "capabilities"; + protected static final String TYPE = "type"; + protected static final String ARTIFACTS = "artifacts"; + + @SuppressWarnings("unused") + private static final String SECTIONS[] = { + DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS, INTERFACES, CAPABILITIES, TYPE, ARTIFACTS - }; + }; - public static final String TOSCA_DEF_SECTIONS[] = { - "node_types", "data_types", "artifact_types", - "group_types", "relationship_types", - "capability_types", "interface_types", - "policy_types"}; + public static final String TOSCA_DEF_SECTIONS[] = { + "node_types", "data_types", "artifact_types", + "group_types", "relationship_types", + "capability_types", "interface_types", + "policy_types"}; // TOSCA definition file - //private final static String path = EntityType.class.getProtectionDomain().getCodeSource().getLocation().getPath(); - - //private final static String path = EntityType.class.getClassLoader().getResource("TOSCA_definition_1_0.yaml").getFile(); + //private final static String path = EntityType.class.getProtectionDomain().getCodeSource().getLocation().getPath(); + + //private final static String path = EntityType.class.getClassLoader().getResource("TOSCA_definition_1_0.yaml").getFile(); //private final static String TOSCA_DEF_FILE = EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); - - private static LinkedHashMap TOSCA_DEF_LOAD_AS_IS = loadTdf(); - - //EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); - - @SuppressWarnings("unchecked") - private static LinkedHashMap loadTdf() { - String toscaDefLocation = EntityType.class.getClassLoader().getResource(TOSCA_DEFINITION_1_0_YAML).getFile(); - InputStream input = EntityType.class.getClassLoader().getResourceAsStream(TOSCA_DEFINITION_1_0_YAML); - if (input == null){ - log.error("EntityType - loadTdf - Couldn't load TOSCA_DEF_FILE {}", toscaDefLocation); - } - Yaml yaml = new Yaml(); - Object loaded = yaml.load(input); - //@SuppressWarnings("unchecked") - return (LinkedHashMap) loaded; - } + + private static LinkedHashMap TOSCA_DEF_LOAD_AS_IS = loadTdf(); + + //EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); + + @SuppressWarnings("unchecked") + private static LinkedHashMap loadTdf() { + String toscaDefLocation = EntityType.class.getClassLoader().getResource(TOSCA_DEFINITION_1_0_YAML).getFile(); + InputStream input = EntityType.class.getClassLoader().getResourceAsStream(TOSCA_DEFINITION_1_0_YAML); + if (input == null) { + log.error("EntityType - loadTdf - Couldn't load TOSCA_DEF_FILE {}", toscaDefLocation); + } + Yaml yaml = new Yaml(); + Object loaded = yaml.load(input); + //@SuppressWarnings("unchecked") + return (LinkedHashMap) loaded; + } // Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS - public static LinkedHashMap TOSCA_DEF; - static { - TOSCA_DEF = new LinkedHashMap(); - for(String section: TOSCA_DEF_SECTIONS) { - @SuppressWarnings("unchecked") - LinkedHashMap value = (LinkedHashMap)TOSCA_DEF_LOAD_AS_IS.get(section); - if(value != null) { - for(String key: value.keySet()) { - TOSCA_DEF.put(key, value.get(key)); - } - } + public static LinkedHashMap TOSCA_DEF; + + static { + TOSCA_DEF = new LinkedHashMap(); + for (String section : TOSCA_DEF_SECTIONS) { + @SuppressWarnings("unchecked") + LinkedHashMap value = (LinkedHashMap) TOSCA_DEF_LOAD_AS_IS.get(section); + if (value != null) { + for (String key : value.keySet()) { + TOSCA_DEF.put(key, value.get(key)); + } + } } - } - - public static final String DEPENDSON = "tosca.relationships.DependsOn"; - public static final String HOSTEDON = "tosca.relationships.HostedOn"; - public static final String CONNECTSTO = "tosca.relationships.ConnectsTo"; - public static final String ATTACHESTO = "tosca.relationships.AttachesTo"; - public static final String LINKSTO = "tosca.relationships.network.LinksTo"; - public static final String BINDSTO = "tosca.relationships.network.BindsTo"; - - public static final String RELATIONSHIP_TYPE[] = { - "tosca.relationships.DependsOn", - "tosca.relationships.HostedOn", - "tosca.relationships.ConnectsTo", - "tosca.relationships.AttachesTo", - "tosca.relationships.network.LinksTo", - "tosca.relationships.network.BindsTo"}; - - public static final String NODE_PREFIX = "tosca.nodes."; - public static final String RELATIONSHIP_PREFIX = "tosca.relationships."; - public static final String CAPABILITY_PREFIX = "tosca.capabilities."; - public static final String INTERFACE_PREFIX = "tosca.interfaces."; - public static final String ARTIFACT_PREFIX = "tosca.artifacts."; - public static final String POLICY_PREFIX = "tosca.policies."; - public static final String GROUP_PREFIX = "tosca.groups."; - //currently the data types are defined only for network - // but may have changes in the future. - public static final String DATATYPE_PREFIX = "tosca.datatypes."; - public static final String DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + "network."; - public static final String TOSCA = "tosca"; - - protected String type; - protected LinkedHashMap defs = null; - public Object getParentType() { return null; } - - public String derivedFrom(LinkedHashMap defs) { + } + + public static final String DEPENDSON = "tosca.relationships.DependsOn"; + public static final String HOSTEDON = "tosca.relationships.HostedOn"; + public static final String CONNECTSTO = "tosca.relationships.ConnectsTo"; + public static final String ATTACHESTO = "tosca.relationships.AttachesTo"; + public static final String LINKSTO = "tosca.relationships.network.LinksTo"; + public static final String BINDSTO = "tosca.relationships.network.BindsTo"; + + public static final String RELATIONSHIP_TYPE[] = { + "tosca.relationships.DependsOn", + "tosca.relationships.HostedOn", + "tosca.relationships.ConnectsTo", + "tosca.relationships.AttachesTo", + "tosca.relationships.network.LinksTo", + "tosca.relationships.network.BindsTo"}; + + public static final String NODE_PREFIX = "tosca.nodes."; + public static final String RELATIONSHIP_PREFIX = "tosca.relationships."; + public static final String CAPABILITY_PREFIX = "tosca.capabilities."; + public static final String INTERFACE_PREFIX = "tosca.interfaces."; + public static final String ARTIFACT_PREFIX = "tosca.artifacts."; + public static final String POLICY_PREFIX = "tosca.policies."; + public static final String GROUP_PREFIX = "tosca.groups."; + //currently the data types are defined only for network + // but may have changes in the future. + public static final String DATATYPE_PREFIX = "tosca.datatypes."; + public static final String DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + "network."; + public static final String TOSCA = "tosca"; + + protected String type; + protected LinkedHashMap defs = null; + + public Object getParentType() { + return null; + } + + public String derivedFrom(LinkedHashMap defs) { // Return a type this type is derived from - return (String)entityValue(defs, "derived_from"); + return (String) entityValue(defs, "derived_from"); } public boolean isDerivedFrom(String type_str) { // Check if object inherits from the given type // Returns true if this object is derived from 'type_str' // False otherwise. - if(type == null || this.type.isEmpty()) { + if (type == null || this.type.isEmpty()) { return false; - } - else if(type == type_str) { + } else if (type == type_str) { return true; - } - else if(getParentType() != null) { - return ((EntityType)getParentType()).isDerivedFrom(type_str); - } - else { + } else if (getParentType() != null) { + return ((EntityType) getParentType()).isDerivedFrom(type_str); + } else { return false; } } - public Object entityValue(LinkedHashMap defs, String key) { - if(defs != null) { - return defs.get(key); - } - return null; + public Object entityValue(LinkedHashMap defs, String key) { + if (defs != null) { + return defs.get(key); + } + return null; } @SuppressWarnings("unchecked") - public Object getValue(String ndtype, LinkedHashMap _defs, boolean parent) { + public Object getValue(String ndtype, LinkedHashMap _defs, boolean parent) { Object value = null; - if(_defs == null) { - if(defs == null) { + if (_defs == null) { + if (defs == null) { return null; } _defs = this.defs; } - Object defndt = _defs.get(ndtype); - if(defndt != null) { + Object defndt = _defs.get(ndtype); + if (defndt != null) { // copy the value to avoid that next operations add items in the // item definitions //value = copy.copy(defs[ndtype]) - value = CopyUtils.copyLhmOrAl(defndt); + value = CopyUtils.copyLhmOrAl(defndt); } - - if(parent) { + + if (parent) { EntityType p = this; - if(p != null) { - while(p != null) { - if(p.defs != null && p.defs.get(ndtype) != null) { + if (p != null) { + while (p != null) { + if (p.defs != null && p.defs.get(ndtype) != null) { // get the parent value Object parentValue = p.defs.get(ndtype); - if(value != null) { - if(value instanceof LinkedHashMap) { - for(Map.Entry me: ((LinkedHashMap)parentValue).entrySet()) { - String k = me.getKey(); - if(((LinkedHashMap)value).get(k) == null) { - ((LinkedHashMap)value).put(k,me.getValue()); - } - } + if (value != null) { + if (value instanceof LinkedHashMap) { + for (Map.Entry me : ((LinkedHashMap) parentValue).entrySet()) { + String k = me.getKey(); + if (((LinkedHashMap) value).get(k) == null) { + ((LinkedHashMap) value).put(k, me.getValue()); + } + } } - if(value instanceof ArrayList) { - for(Object pValue: (ArrayList)parentValue) { - if(!((ArrayList)value).contains(pValue)) { - ((ArrayList)value).add(pValue); - } - } + if (value instanceof ArrayList) { + for (Object pValue : (ArrayList) parentValue) { + if (!((ArrayList) value).contains(pValue)) { + ((ArrayList) value).add(pValue); + } + } } - } - else { - // value = copy.copy(parent_value) + } else { + // value = copy.copy(parent_value) value = CopyUtils.copyLhmOrAl(parentValue); } - } - p = (EntityType)p.getParentType(); + } + p = (EntityType) p.getParentType(); } } } - - return value; - } + + return value; + } @SuppressWarnings("unchecked") - public Object getDefinition(String ndtype) { + public Object getDefinition(String ndtype) { Object value = null; - LinkedHashMap _defs; + LinkedHashMap _defs; // no point in hasattr, because we have it, and it // doesn't do anything except emit an exception anyway //if not hasattr(self, 'defs'): @@ -228,59 +228,57 @@ public class EntityType { //else: // defs = self.defs _defs = this.defs; - - if(_defs != null && _defs.get(ndtype) != null) { - value = _defs.get(ndtype); + + if (_defs != null && _defs.get(ndtype) != null) { + value = _defs.get(ndtype); } Object p = getParentType(); - if(p != null) { - Object inherited = ((EntityType)p).getDefinition(ndtype); - if(inherited != null) { + if (p != null) { + Object inherited = ((EntityType) p).getDefinition(ndtype); + if (inherited != null) { // inherited = dict(inherited) WTF?!? - if(value == null) { - value = inherited; - } - else { - //????? + if (value == null) { + value = inherited; + } else { + //????? //inherited.update(value) //value.update(inherited) - for(Map.Entry me: ((LinkedHashMap)inherited).entrySet()) { - ((LinkedHashMap)value).put(me.getKey(),me.getValue()); - } - } - } + for (Map.Entry me : ((LinkedHashMap) inherited).entrySet()) { + ((LinkedHashMap) value).put(me.getKey(), me.getValue()); + } + } + } } return value; } - - public static void updateDefinitions(String version) { + + public static void updateDefinitions(String version) { ExtTools exttools = new ExtTools(); String extensionDefsFile = exttools.getDefsFile(version); - try (InputStream input = EntityType.class.getClassLoader().getResourceAsStream(extensionDefsFile);){ - Yaml yaml = new Yaml(); - LinkedHashMap nfvDefFile = (LinkedHashMap)yaml.load(input); - LinkedHashMap nfvDef = new LinkedHashMap<>(); - for(String section: TOSCA_DEF_SECTIONS) { - if(nfvDefFile.get(section) != null) { - LinkedHashMap value = - (LinkedHashMap)nfvDefFile.get(section); - for(String key: value.keySet()) { - nfvDef.put(key, value.get(key)); - } - } - } - TOSCA_DEF.putAll(nfvDef); - } - catch (IOException e) { - log.error("EntityType - updateDefinitions - Failed to update definitions from defs file {}",extensionDefsFile); - log.error("Exception:", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE280", - String.format("Failed to update definitions from defs file \"%s\" ",extensionDefsFile))); - return; - } + try (InputStream input = EntityType.class.getClassLoader().getResourceAsStream(extensionDefsFile);) { + Yaml yaml = new Yaml(); + LinkedHashMap nfvDefFile = (LinkedHashMap) yaml.load(input); + LinkedHashMap nfvDef = new LinkedHashMap<>(); + for (String section : TOSCA_DEF_SECTIONS) { + if (nfvDefFile.get(section) != null) { + LinkedHashMap value = + (LinkedHashMap) nfvDefFile.get(section); + for (String key : value.keySet()) { + nfvDef.put(key, value.get(key)); + } + } + } + TOSCA_DEF.putAll(nfvDef); + } catch (IOException e) { + log.error("EntityType - updateDefinitions - Failed to update definitions from defs file {}", extensionDefsFile); + log.error("Exception:", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE280", + String.format("Failed to update definitions from defs file \"%s\" ", extensionDefsFile))); + return; + } } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java index cbcb6f6..db6f2b7 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,151 +29,150 @@ import java.util.Map; public class GroupType extends StatefulEntityType { - private static final String DERIVED_FROM = "derived_from"; - private static final String VERSION = "version"; - private static final String METADATA = "metadata"; - private static final String DESCRIPTION = "description"; - private static final String PROPERTIES = "properties"; - private static final String MEMBERS = "members"; - private static final String INTERFACES = "interfaces"; - - private static final String SECTIONS[] = { - DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; - - private String groupType; - private LinkedHashMap customDef; - private String groupDescription; - private String groupVersion; - //private LinkedHashMap groupProperties; - //private ArrayList groupMembers; - private LinkedHashMap metaData; - - @SuppressWarnings("unchecked") - public GroupType(String _grouptype,LinkedHashMap _customDef) { - super(_grouptype,GROUP_PREFIX,_customDef); - - groupType = _grouptype; - customDef = _customDef; - _validateFields(); - if(defs != null) { - groupDescription = (String)defs.get(DESCRIPTION); - groupVersion = (String)defs.get(VERSION); - //groupProperties = (LinkedHashMap)defs.get(PROPERTIES); - //groupMembers = (ArrayList)defs.get(MEMBERS); - Object mdo = defs.get(METADATA); - if(mdo instanceof LinkedHashMap) { - metaData = (LinkedHashMap)mdo; - } - else { - metaData = null; - } - - if(metaData != null) { - _validateMetadata(metaData); - } - } - } - - public GroupType getParentType() { + private static final String DERIVED_FROM = "derived_from"; + private static final String VERSION = "version"; + private static final String METADATA = "metadata"; + private static final String DESCRIPTION = "description"; + private static final String PROPERTIES = "properties"; + private static final String MEMBERS = "members"; + private static final String INTERFACES = "interfaces"; + + private static final String[] SECTIONS = { + DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; + + private String groupType; + private LinkedHashMap customDef; + private String groupDescription; + private String groupVersion; + //private LinkedHashMap groupProperties; + //private ArrayList groupMembers; + private LinkedHashMap metaData; + + @SuppressWarnings("unchecked") + public GroupType(String groupType, LinkedHashMap customDef) { + super(groupType, GROUP_PREFIX, customDef); + + this.groupType = groupType; + this.customDef = customDef; + validateFields(); + if (defs != null) { + groupDescription = (String) defs.get(DESCRIPTION); + groupVersion = (String) defs.get(VERSION); + //groupProperties = (LinkedHashMap)defs.get(PROPERTIES); + //groupMembers = (ArrayList)defs.get(MEMBERS); + Object mdo = defs.get(METADATA); + if (mdo instanceof LinkedHashMap) { + metaData = (LinkedHashMap) mdo; + } else { + metaData = null; + } + + if (metaData != null) { + validateMetadata(metaData); + } + } + } + + public GroupType getParentType() { // Return a group statefulentity of this entity is derived from. - if(defs == null) { + if (defs == null) { return null; } String pgroupEntity = derivedFrom(defs); - if(pgroupEntity != null) { - return new GroupType(pgroupEntity,customDef); + if (pgroupEntity != null) { + return new GroupType(pgroupEntity, customDef); } return null; - } - - public String getDescription() { - return groupDescription; - } - - public String getVersion() { - return groupVersion; - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getInterfaces() { - Object ifo = getValue(INTERFACES,null,false); - if(ifo instanceof LinkedHashMap) { - return (LinkedHashMap)ifo; - } - return new LinkedHashMap(); - } - - private void _validateFields() { - if(defs != null) { - for(String name: defs.keySet()) { - boolean bFound = false; - for(String sect: SECTIONS) { - if(name.equals(sect)) { - bFound = true; - break; - } - } - if(!bFound) { + } + + public String getDescription() { + return groupDescription; + } + + public String getVersion() { + return groupVersion; + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getInterfaces() { + Object ifo = getValue(INTERFACES, null, false); + if (ifo instanceof LinkedHashMap) { + return (LinkedHashMap) ifo; + } + return new LinkedHashMap(); + } + + private void validateFields() { + if (defs != null) { + for (String name : defs.keySet()) { + boolean bFound = false; + for (String sect : SECTIONS) { + if (name.equals(sect)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE120", String.format( - "UnknownFieldError: Group Type \"%s\" contains unknown field \"%s\"", - groupType,name))); - } - } - } - } - - @SuppressWarnings("unchecked") - private void _validateMetadata(LinkedHashMap metadata) { - String mtt = (String) metadata.get("type"); - if(mtt != null && !mtt.equals("map") && !mtt.equals("tosca:map")) { + "UnknownFieldError: Group Type \"%s\" contains unknown field \"%s\"", + groupType, name))); + } + } + } + } + + @SuppressWarnings("unchecked") + private void validateMetadata(LinkedHashMap metadata) { + String mtt = (String) metadata.get("type"); + if (mtt != null && !mtt.equals("map") && !mtt.equals("tosca:map")) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE121", String.format( - "InvalidTypeError: \"%s\" defined in group for metadata is invalid", - mtt))); - } - for(String entrySchema: metadata.keySet()) { - Object estob = metadata.get(entrySchema); - if(estob instanceof LinkedHashMap) { - String est = (String)((LinkedHashMap)estob).get("type"); - if(!est.equals("string")) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE122", String.format( - "InvalidTypeError: \"%s\" defined in group for metadata \"%s\" is invalid", - est,entrySchema))); - } - } - } - } - - public String getType() { - return groupType; - } - + "InvalidTypeError: \"%s\" defined in group for metadata is invalid", + mtt))); + } + for (String entrySchema : metadata.keySet()) { + Object estob = metadata.get(entrySchema); + if (estob instanceof LinkedHashMap) { + String est = (String) ((LinkedHashMap) estob).get("type"); + if (!est.equals("string")) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE122", String.format( + "InvalidTypeError: \"%s\" defined in group for metadata \"%s\" is invalid", + est, entrySchema))); + } + } + } + } + + public String getType() { + return groupType; + } + @SuppressWarnings("unchecked") - public ArrayList getCapabilitiesObjects() { + public ArrayList getCapabilitiesObjects() { // Return a list of capability objects - ArrayList typecapabilities = new ArrayList<>(); - LinkedHashMap caps = (LinkedHashMap)getValue(CAPABILITIES, null, true); - if(caps != null) { + ArrayList typecapabilities = new ArrayList<>(); + LinkedHashMap caps = (LinkedHashMap) getValue(CAPABILITIES, null, true); + if (caps != null) { // 'cname' is symbolic name of the capability // 'cvalue' is a dict { 'type': } - for(Map.Entry me: caps.entrySet()) { - String cname = me.getKey(); - LinkedHashMap cvalue = (LinkedHashMap)me.getValue(); - String ctype = cvalue.get("type"); - CapabilityTypeDef cap = new CapabilityTypeDef(cname,ctype,type,customDef); - typecapabilities.add(cap); - } + for (Map.Entry me : caps.entrySet()) { + String cname = me.getKey(); + LinkedHashMap cvalue = (LinkedHashMap) me.getValue(); + String ctype = cvalue.get("type"); + CapabilityTypeDef cap = new CapabilityTypeDef(cname, ctype, type, customDef); + typecapabilities.add(cap); + } } return typecapabilities; - } - - public LinkedHashMap getCapabilities() { + } + + public LinkedHashMap getCapabilities() { // Return a dictionary of capability name-objects pairs - LinkedHashMap caps = new LinkedHashMap<>(); - for(CapabilityTypeDef ctd: getCapabilitiesObjects()) { - caps.put(ctd.getName(),ctd); - } - return caps; - } + LinkedHashMap caps = new LinkedHashMap<>(); + for (CapabilityTypeDef ctd : getCapabilitiesObjects()) { + caps.put(ctd.getName(), ctd); + } + return caps; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java index ceb8fb9..2862a11 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -30,167 +30,163 @@ import java.util.Map; public class InterfacesDef extends StatefulEntityType { - public static final String LIFECYCLE = "tosca.interfaces.node.lifecycle.Standard"; - public static final String CONFIGURE = "tosca.interfaces.relationship.Configure"; - public static final String LIFECYCLE_SHORTNAME = "Standard"; - public static final String CONFIGURE_SHORTNAME = "Configure"; - - public static final String SECTIONS[] = { - LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME,CONFIGURE_SHORTNAME - }; - - public static final String IMPLEMENTATION = "implementation"; - public static final String DESCRIPTION = "description"; - public static final String INPUTS = "inputs"; - - public static final String INTERFACE_DEF_RESERVED_WORDS[] = { - "type", "inputs", "derived_from", "version", "description"}; - - private EntityType ntype; - private EntityTemplate nodeTemplate; - - private String operationName; - private Object operationDef; - private Object implementation; - private LinkedHashMap inputs; - private String description; - - @SuppressWarnings("unchecked") - public InterfacesDef(EntityType inodeType, - String interfaceType, - EntityTemplate inodeTemplate, - String iname, - Object ivalue) { - // void - super(); - - ntype = inodeType; - nodeTemplate = inodeTemplate; - type = interfaceType; - operationName = iname; - operationDef = ivalue; - implementation = null; - inputs = null; - defs = new LinkedHashMap(); - - if(interfaceType.equals(LIFECYCLE_SHORTNAME)) { - interfaceType = LIFECYCLE; - } - if(interfaceType.equals(CONFIGURE_SHORTNAME)) { - interfaceType = CONFIGURE; - } - - // only NodeType has getInterfaces "hasattr(ntype,interfaces)" - // while RelationshipType does not - if(ntype instanceof NodeType) { - if(((NodeType)ntype).getInterfaces() != null && - ((NodeType)ntype).getInterfaces().values().contains(interfaceType)) { - LinkedHashMap nii = (LinkedHashMap) - ((NodeType)ntype).getInterfaces().get(interfaceType); - interfaceType = (String)nii.get("type"); - } - } - if(inodeType != null) { - if(nodeTemplate != null && nodeTemplate.getCustomDef() != null && - nodeTemplate.getCustomDef().containsKey(interfaceType)) { - defs = (LinkedHashMap) - nodeTemplate.getCustomDef().get(interfaceType); - } - else { - defs = (LinkedHashMap)TOSCA_DEF.get(interfaceType); - } - } - - if(ivalue != null) { - if(ivalue instanceof LinkedHashMap) { - for(Map.Entry me: ((LinkedHashMap)ivalue).entrySet()) { - if(me.getKey().equals(IMPLEMENTATION)) { - implementation = me.getValue(); - } - else if(me.getKey().equals(INPUTS)) { - inputs = (LinkedHashMap)me.getValue(); - } - else if(me.getKey().equals(DESCRIPTION)) { - description = (String)me.getValue(); - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE123", String.format( - "UnknownFieldError: \"interfaces\" of template \"%s\" contain unknown field \"%s\"", - nodeTemplate.getName(),me.getKey()))); - } - } - } - } - } - - public ArrayList getLifecycleOps() { - if(defs != null) { - if(type.equals(LIFECYCLE)) { - return _ops(); - } - } - return null; - } - - public ArrayList getInterfaceOps() { - if(defs != null) { - ArrayList ops = _ops(); - ArrayList idrw = new ArrayList<>(); - for(int i=0; i getConfigureOps() { - if(defs != null) { - if(type.equals(CONFIGURE)) { - return _ops(); - } - } - return null; - } - - private ArrayList _ops() { - return new ArrayList(defs.keySet()); - } - - // getters/setters - - public LinkedHashMap getInputs() { - return inputs; - } - - public void setInput(String name,Object value) { - inputs.put(name, value); - } - - public Object getImplementation(){ - return implementation; - } - - public void setImplementation(Object implementation){ - this.implementation = implementation; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - - public String getOperationName() { - return operationName; - } - - public void setOperationName(String operationName) { - this.operationName = operationName; - } + public static final String LIFECYCLE = "tosca.interfaces.node.lifecycle.Standard"; + public static final String CONFIGURE = "tosca.interfaces.relationship.Configure"; + public static final String LIFECYCLE_SHORTNAME = "Standard"; + public static final String CONFIGURE_SHORTNAME = "Configure"; + + public static final String[] SECTIONS = { + LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME, CONFIGURE_SHORTNAME + }; + + public static final String IMPLEMENTATION = "implementation"; + public static final String DESCRIPTION = "description"; + public static final String INPUTS = "inputs"; + + public static final String[] INTERFACE_DEF_RESERVED_WORDS = { + "type", "inputs", "derived_from", "version", "description"}; + + private EntityType ntype; + private EntityTemplate nodeTemplate; + + private String operationName; + private Object operationDef; + private Object implementation; + private LinkedHashMap inputs; + private String description; + + @SuppressWarnings("unchecked") + public InterfacesDef(EntityType inodeType, + String interfaceType, + EntityTemplate inodeTemplate, + String iname, + Object ivalue) { + // void + super(); + + ntype = inodeType; + nodeTemplate = inodeTemplate; + type = interfaceType; + operationName = iname; + operationDef = ivalue; + implementation = null; + inputs = null; + defs = new LinkedHashMap<>(); + + if (interfaceType.equals(LIFECYCLE_SHORTNAME)) { + interfaceType = LIFECYCLE; + } + if (interfaceType.equals(CONFIGURE_SHORTNAME)) { + interfaceType = CONFIGURE; + } + + // only NodeType has getInterfaces "hasattr(ntype,interfaces)" + // while RelationshipType does not + if (ntype instanceof NodeType) { + if (((NodeType) ntype).getInterfaces() != null + && ((NodeType) ntype).getInterfaces().values().contains(interfaceType)) { + LinkedHashMap nii = (LinkedHashMap) + ((NodeType) ntype).getInterfaces().get(interfaceType); + interfaceType = (String) nii.get("type"); + } + } + if (inodeType != null) { + if (nodeTemplate != null && nodeTemplate.getCustomDef() != null + && nodeTemplate.getCustomDef().containsKey(interfaceType)) { + defs = (LinkedHashMap) + nodeTemplate.getCustomDef().get(interfaceType); + } else { + defs = (LinkedHashMap) TOSCA_DEF.get(interfaceType); + } + } + + if (ivalue != null) { + if (ivalue instanceof LinkedHashMap) { + for (Map.Entry me : ((LinkedHashMap) ivalue).entrySet()) { + if (me.getKey().equals(IMPLEMENTATION)) { + implementation = me.getValue(); + } else if (me.getKey().equals(INPUTS)) { + inputs = (LinkedHashMap) me.getValue(); + } else if (me.getKey().equals(DESCRIPTION)) { + description = (String) me.getValue(); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE123", String.format( + "UnknownFieldError: \"interfaces\" of template \"%s\" contain unknown field \"%s\"", + nodeTemplate.getName(), me.getKey()))); + } + } + } + } + } + + public ArrayList getLifecycleOps() { + if (defs != null) { + if (type.equals(LIFECYCLE)) { + return ops(); + } + } + return null; + } + + public ArrayList getInterfaceOps() { + if (defs != null) { + ArrayList ops = ops(); + ArrayList idrw = new ArrayList<>(); + for (int i = 0; i < InterfacesDef.INTERFACE_DEF_RESERVED_WORDS.length; i++) { + idrw.add(InterfacesDef.INTERFACE_DEF_RESERVED_WORDS[i]); + } + ops.removeAll(idrw); + return ops; + } + return null; + } + + public ArrayList getConfigureOps() { + if (defs != null) { + if (type.equals(CONFIGURE)) { + return ops(); + } + } + return null; + } + + private ArrayList ops() { + return new ArrayList(defs.keySet()); + } + + // getters/setters + + public LinkedHashMap getInputs() { + return inputs; + } + + public void setInput(String name, Object value) { + inputs.put(name, value); + } + + public Object getImplementation() { + return implementation; + } + + public void setImplementation(Object implementation) { + this.implementation = implementation; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getOperationName() { + return operationName; + } + + public void setOperationName(String operationName) { + this.operationName = operationName; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java index dd914d4..f3de49e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,36 +26,37 @@ import java.util.Map; import java.util.stream.Collectors; public class Metadata { - - private final Map metadataMap; - public Metadata(Map metadataMap) { + private final Map metadataMap; + + public Metadata(Map metadataMap) { this.metadataMap = metadataMap != null ? metadataMap : new HashMap<>(); } - public String getValue(String key) { - - Object obj = this.metadataMap.get(key); - if (obj != null){ - return String.valueOf(obj); - } - return null; - } - - /** - * Get all properties of a Metadata object.
- * This object represents the "metadata" section of some entity. - * @return all properties of this Metadata, as a key-value. - */ - public Map getAllProperties() { - return metadataMap.entrySet().stream().map(e-> new AbstractMap.SimpleEntry(e.getKey(), String.valueOf(e.getValue()))).collect(Collectors.toMap(Map.Entry::getKey,Map.Entry::getValue)); - } - - @Override - public String toString() { - return "Metadata{" + - "metadataMap=" + metadataMap + - '}'; - } + public String getValue(String key) { + + Object obj = this.metadataMap.get(key); + if (obj != null) { + return String.valueOf(obj); + } + return null; + } + + /** + * Get all properties of a Metadata object.
+ * This object represents the "metadata" section of some entity. + * + * @return all properties of this Metadata, as a key-value. + */ + public Map getAllProperties() { + return metadataMap.entrySet().stream().map(e -> new AbstractMap.SimpleEntry(e.getKey(), String.valueOf(e.getValue()))).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + @Override + public String toString() { + return "Metadata{" + + "metadataMap=" + metadataMap + + '}'; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java index 918c629..c251be9 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,305 +28,300 @@ import java.util.LinkedHashMap; import java.util.Map; public class NodeType extends StatefulEntityType { - // TOSCA built-in node type - - private static final String DERIVED_FROM = "derived_from"; - private static final String METADATA = "metadata"; - private static final String PROPERTIES = "properties"; - private static final String VERSION = "version"; - private static final String DESCRIPTION = "description"; - private static final String ATTRIBUTES = "attributes"; - private static final String REQUIREMENTS = "requirements"; - private static final String CAPABILITIES = "capabilities"; - private static final String INTERFACES = "interfaces"; - private static final String ARTIFACTS = "artifacts"; - - private static final String SECTIONS[] = { - DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS - }; - - private String ntype; - public LinkedHashMap customDef; - - public NodeType(String nttype,LinkedHashMap ntcustomDef) { - super(nttype,NODE_PREFIX, ntcustomDef); + // TOSCA built-in node type + + private static final String DERIVED_FROM = "derived_from"; + private static final String METADATA = "metadata"; + private static final String PROPERTIES = "properties"; + private static final String VERSION = "version"; + private static final String DESCRIPTION = "description"; + private static final String ATTRIBUTES = "attributes"; + private static final String REQUIREMENTS = "requirements"; + private static final String CAPABILITIES = "capabilities"; + private static final String INTERFACES = "interfaces"; + private static final String ARTIFACTS = "artifacts"; + + private static final String SECTIONS[] = { + DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS + }; + + private String ntype; + public LinkedHashMap customDef; + + public NodeType(String nttype, LinkedHashMap ntcustomDef) { + super(nttype, NODE_PREFIX, ntcustomDef); ntype = nttype; customDef = ntcustomDef; _validateKeys(); - } + } - public Object getParentType() { + public Object getParentType() { // Return a node this node is derived from - if(defs == null) { - return null; - } - String pnode = derivedFrom(defs); - if(pnode != null && !pnode.isEmpty()) { - return new NodeType(pnode,customDef); - } - return null; - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getRelationship() { + if (defs == null) { + return null; + } + String pnode = derivedFrom(defs); + if (pnode != null && !pnode.isEmpty()) { + return new NodeType(pnode, customDef); + } + return null; + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getRelationship() { // Return a dictionary of relationships to other node types // This method returns a dictionary of named relationships that nodes // of the current node type (self) can have to other nodes (of specific // types) in a TOSCA template. - LinkedHashMap relationship = new LinkedHashMap<>(); - ArrayList> requires; - Object treq = getAllRequirements(); - if(treq != null) { + LinkedHashMap relationship = new LinkedHashMap<>(); + ArrayList> requires; + Object treq = getAllRequirements(); + if (treq != null) { // NOTE(sdmonov): Check if requires is a dict. // If it is a dict convert it to a list of dicts. // This is needed because currently the code below supports only // lists as requirements definition. The following check will // make sure if a map (dict) was provided it will be converted to // a list before proceeding to the parsing. - if(treq instanceof LinkedHashMap) { - requires = new ArrayList<>(); - for(Map.Entry me: ((LinkedHashMap)treq).entrySet()) { - LinkedHashMap tl = new LinkedHashMap<>(); - tl.put(me.getKey(),me.getValue()); - requires.add(tl); - } - } - else { - requires = (ArrayList>)treq; + if (treq instanceof LinkedHashMap) { + requires = new ArrayList<>(); + for (Map.Entry me : ((LinkedHashMap) treq).entrySet()) { + LinkedHashMap tl = new LinkedHashMap<>(); + tl.put(me.getKey(), me.getValue()); + requires.add(tl); + } + } else { + requires = (ArrayList>) treq; } - + String keyword = null; String nodeType = null; - for(LinkedHashMap require: requires) { - String relation = null; - for(Map.Entry re: require.entrySet()) { - String key = re.getKey(); - LinkedHashMap req = (LinkedHashMap)re.getValue(); - if(req.get("relationship") != null) { - Object trelation = req.get("relationship"); - // trelation is a string or a dict with "type" mapped to the string we want - if(trelation instanceof String) { - relation = (String)trelation; - } - else { - if(((LinkedHashMap)trelation).get("type") != null) { - relation = (String)((LinkedHashMap)trelation).get("type"); - } - } - nodeType = (String)req.get("node"); - //BUG meaningless?? LinkedHashMap value = req; - if(nodeType != null) { - keyword = "node"; - } - else { - String getRelation = null; + for (LinkedHashMap require : requires) { + String relation = null; + for (Map.Entry re : require.entrySet()) { + String key = re.getKey(); + LinkedHashMap req = (LinkedHashMap) re.getValue(); + if (req.get("relationship") != null) { + Object trelation = req.get("relationship"); + // trelation is a string or a dict with "type" mapped to the string we want + if (trelation instanceof String) { + relation = (String) trelation; + } else { + if (((LinkedHashMap) trelation).get("type") != null) { + relation = (String) ((LinkedHashMap) trelation).get("type"); + } + } + nodeType = (String) req.get("node"); + //BUG meaningless?? LinkedHashMap value = req; + if (nodeType != null) { + keyword = "node"; + } else { + String getRelation = null; // If nodeTypeByCap is a dict and has a type key // we need to lookup the node type using // the capability type - String captype = (String)req.get("capability"); - nodeType = _getNodeTypeByCap(captype); - if (nodeType != null){ - getRelation = _getRelation(key, nodeType); - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( - "NodeTypeRequirementForCapabilityUnfulfilled: Node type: \"%s\" with requrement \"%s\" for node type with capability type \"%s\" is not found\\unfulfilled", this.ntype, key, captype))); - } - if (getRelation != null) { - relation = getRelation; - } - keyword = key; - } - } - } - if(relation == null || nodeType == null){ - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( - "NodeTypeForRelationUnfulfilled: Node type \"%s\" - relationship type \"%s\" is unfulfilled", this.ntype, relation))); - } else { - RelationshipType rtype = new RelationshipType(relation, keyword, customDef); - NodeType relatednode = new NodeType(nodeType, customDef); - relationship.put(rtype, relatednode); - } + String captype = (String) req.get("capability"); + nodeType = _getNodeTypeByCap(captype); + if (nodeType != null) { + getRelation = _getRelation(key, nodeType); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( + "NodeTypeRequirementForCapabilityUnfulfilled: Node type: \"%s\" with requrement \"%s\" for node type with capability type \"%s\" is not found\\unfulfilled", this.ntype, key, captype))); + } + if (getRelation != null) { + relation = getRelation; + } + keyword = key; + } + } + } + if (relation == null || nodeType == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( + "NodeTypeForRelationUnfulfilled: Node type \"%s\" - relationship type \"%s\" is unfulfilled", this.ntype, relation))); + } else { + RelationshipType rtype = new RelationshipType(relation, keyword, customDef); + NodeType relatednode = new NodeType(nodeType, customDef); + relationship.put(rtype, relatednode); + } } - } - return relationship; - - } - + } + return relationship; + + } + @SuppressWarnings("unchecked") - private String _getNodeTypeByCap(String cap) { + private String _getNodeTypeByCap(String cap) { // Find the node type that has the provided capability // This method will lookup all node types if they have the // provided capability. // Filter the node types ArrayList nodeTypes = new ArrayList<>(); - for(String nt: customDef.keySet()) { - if(nt.startsWith(NODE_PREFIX) || nt.startsWith("org.openecomp") && !nt.equals("tosca.nodes.Root")) { - nodeTypes.add(nt); - } + for (String nt : customDef.keySet()) { + if (nt.startsWith(NODE_PREFIX) || nt.startsWith("org.openecomp") && !nt.equals("tosca.nodes.Root")) { + nodeTypes.add(nt); + } } - for(String nt: nodeTypes) { - LinkedHashMap nodeDef = (LinkedHashMap)customDef.get(nt); - if(nodeDef instanceof LinkedHashMap && nodeDef.get("capabilities") != null) { - LinkedHashMap nodeCaps = (LinkedHashMap)nodeDef.get("capabilities"); - if(nodeCaps != null) { - for(Object val: nodeCaps.values()) { - if(val instanceof LinkedHashMap) { - String tp = (String)((LinkedHashMap)val).get("type"); - if(tp != null && tp.equals(cap)) { - return nt; - } - } - } - } + for (String nt : nodeTypes) { + LinkedHashMap nodeDef = (LinkedHashMap) customDef.get(nt); + if (nodeDef instanceof LinkedHashMap && nodeDef.get("capabilities") != null) { + LinkedHashMap nodeCaps = (LinkedHashMap) nodeDef.get("capabilities"); + if (nodeCaps != null) { + for (Object val : nodeCaps.values()) { + if (val instanceof LinkedHashMap) { + String tp = (String) ((LinkedHashMap) val).get("type"); + if (tp != null && tp.equals(cap)) { + return nt; + } + } + } + } } - } + } return null; - } - + } + @SuppressWarnings("unchecked") - private String _getRelation(String key,String ndtype) { - String relation = null; - NodeType ntype = new NodeType(ndtype, customDef); - LinkedHashMap caps = ntype.getCapabilities(); - if(caps != null && caps.get(key) != null) { - CapabilityTypeDef c = caps.get(key); - for(int i=0; i< RELATIONSHIP_TYPE.length; i++) { - String r = RELATIONSHIP_TYPE[i]; - if(r != null) { - relation = r; - break; - } - LinkedHashMap rtypedef = (LinkedHashMap)customDef.get(r); - for(Object o: rtypedef.values()) { - LinkedHashMap properties = (LinkedHashMap)o; - if(properties.get(c.getType()) != null) { - relation = r; - break; - } - } - if(relation != null) { - break; - } - else { - for(Object o: rtypedef.values()) { - LinkedHashMap properties = (LinkedHashMap)o; - if(properties.get(c.getParentType()) != null) { - relation = r; - break; - } - } - } - } - } - return relation; + private String _getRelation(String key, String ndtype) { + String relation = null; + NodeType ntype = new NodeType(ndtype, customDef); + LinkedHashMap caps = ntype.getCapabilities(); + if (caps != null && caps.get(key) != null) { + CapabilityTypeDef c = caps.get(key); + for (int i = 0; i < RELATIONSHIP_TYPE.length; i++) { + String r = RELATIONSHIP_TYPE[i]; + if (r != null) { + relation = r; + break; + } + LinkedHashMap rtypedef = (LinkedHashMap) customDef.get(r); + for (Object o : rtypedef.values()) { + LinkedHashMap properties = (LinkedHashMap) o; + if (properties.get(c.getType()) != null) { + relation = r; + break; + } + } + if (relation != null) { + break; + } else { + for (Object o : rtypedef.values()) { + LinkedHashMap properties = (LinkedHashMap) o; + if (properties.get(c.getParentType()) != null) { + relation = r; + break; + } + } + } + } + } + return relation; } @SuppressWarnings("unchecked") - public ArrayList getCapabilitiesObjects() { + public ArrayList getCapabilitiesObjects() { // Return a list of capability objects - ArrayList typecapabilities = new ArrayList<>(); - LinkedHashMap caps = (LinkedHashMap)getValue(CAPABILITIES, null, true); - if(caps != null) { + ArrayList typecapabilities = new ArrayList<>(); + LinkedHashMap caps = (LinkedHashMap) getValue(CAPABILITIES, null, true); + if (caps != null) { // 'cname' is symbolic name of the capability // 'cvalue' is a dict { 'type': } - for(Map.Entry me: caps.entrySet()) { - String cname = me.getKey(); - LinkedHashMap cvalue = (LinkedHashMap)me.getValue(); - String ctype = cvalue.get("type"); - CapabilityTypeDef cap = new CapabilityTypeDef(cname,ctype,type,customDef); - typecapabilities.add(cap); - } + for (Map.Entry me : caps.entrySet()) { + String cname = me.getKey(); + LinkedHashMap cvalue = (LinkedHashMap) me.getValue(); + String ctype = cvalue.get("type"); + CapabilityTypeDef cap = new CapabilityTypeDef(cname, ctype, type, customDef); + typecapabilities.add(cap); + } } return typecapabilities; - } - - public LinkedHashMap getCapabilities() { + } + + public LinkedHashMap getCapabilities() { // Return a dictionary of capability name-objects pairs - LinkedHashMap caps = new LinkedHashMap<>(); - for(CapabilityTypeDef ctd: getCapabilitiesObjects()) { - caps.put(ctd.getName(),ctd); - } - return caps; - } - - @SuppressWarnings("unchecked") - public ArrayList getRequirements() { - return (ArrayList)getValue(REQUIREMENTS,null,true); - } - - public ArrayList getAllRequirements() { - return getRequirements(); - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getInterfaces() { - return (LinkedHashMap)getValue(INTERFACES,null,false); - } - - - @SuppressWarnings("unchecked") - public ArrayList getLifecycleInputs() - { + LinkedHashMap caps = new LinkedHashMap<>(); + for (CapabilityTypeDef ctd : getCapabilitiesObjects()) { + caps.put(ctd.getName(), ctd); + } + return caps; + } + + @SuppressWarnings("unchecked") + public ArrayList getRequirements() { + return (ArrayList) getValue(REQUIREMENTS, null, true); + } + + public ArrayList getAllRequirements() { + return getRequirements(); + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getInterfaces() { + return (LinkedHashMap) getValue(INTERFACES, null, false); + } + + + @SuppressWarnings("unchecked") + public ArrayList getLifecycleInputs() { // Return inputs to life cycle operations if found ArrayList inputs = new ArrayList<>(); - LinkedHashMap interfaces = getInterfaces(); - if(interfaces != null) { - for(Map.Entry me: interfaces.entrySet()) { - String iname = me.getKey(); - LinkedHashMap ivalue = (LinkedHashMap)me.getValue(); - if(iname.equals(InterfacesDef.LIFECYCLE)) { - for(Map.Entry ie: ivalue.entrySet()) { - if(ie.getKey().equals("input")) { - LinkedHashMap y = (LinkedHashMap)ie.getValue(); - for(String i: y.keySet()) { - inputs.add(i); - } - } - } - } - } + LinkedHashMap interfaces = getInterfaces(); + if (interfaces != null) { + for (Map.Entry me : interfaces.entrySet()) { + String iname = me.getKey(); + LinkedHashMap ivalue = (LinkedHashMap) me.getValue(); + if (iname.equals(InterfacesDef.LIFECYCLE)) { + for (Map.Entry ie : ivalue.entrySet()) { + if (ie.getKey().equals("input")) { + LinkedHashMap y = (LinkedHashMap) ie.getValue(); + for (String i : y.keySet()) { + inputs.add(i); + } + } + } + } + } } return inputs; - } - - public ArrayList getLifecycleOperations() { - // Return available life cycle operations if found - ArrayList ops = null; - LinkedHashMap interfaces = getInterfaces(); - if(interfaces != null) { - InterfacesDef i = new InterfacesDef(this,InterfacesDef.LIFECYCLE,null,null,null); - ops = i.getLifecycleOps(); - } - return ops; - } - - public CapabilityTypeDef getCapability(String name) { - //BUG?? the python code has to be wrong - // it refers to a bad attribute 'value'... - LinkedHashMap caps = getCapabilities(); - if(caps != null) { - return caps.get(name); - } - return null; + } + + public ArrayList getLifecycleOperations() { + // Return available life cycle operations if found + ArrayList ops = null; + LinkedHashMap interfaces = getInterfaces(); + if (interfaces != null) { + InterfacesDef i = new InterfacesDef(this, InterfacesDef.LIFECYCLE, null, null, null); + ops = i.getLifecycleOps(); + } + return ops; + } + + public CapabilityTypeDef getCapability(String name) { + //BUG?? the python code has to be wrong + // it refers to a bad attribute 'value'... + LinkedHashMap caps = getCapabilities(); + if (caps != null) { + return caps.get(name); + } + return null; /* def get_capability(self, name): caps = self.get_capabilities() if caps and name in caps.keys(): return caps[name].value */ - } + } public String getCapabilityType(String name) { - //BUG?? the python code has to be wrong - // it refers to a bad attribute 'value'... - CapabilityTypeDef captype = getCapability(name); - if(captype != null) { - return captype.getType(); - } - return null; + //BUG?? the python code has to be wrong + // it refers to a bad attribute 'value'... + CapabilityTypeDef captype = getCapability(name); + if (captype != null) { + return captype.getType(); + } + return null; /* def get_capability_type(self, name): captype = self.get_capability(name) @@ -336,21 +331,21 @@ public class NodeType extends StatefulEntityType { } private void _validateKeys() { - if(defs != null) { - for(String key: defs.keySet()) { - boolean bFound = false; - for(int i=0; i< SECTIONS.length; i++) { - if(key.equals(SECTIONS[i])) { - bFound = true; - break; - } - } - if(!bFound) { + if (defs != null) { + for (String key : defs.keySet()) { + boolean bFound = false; + for (int i = 0; i < SECTIONS.length; i++) { + if (key.equals(SECTIONS[i])) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE124", String.format( - "UnknownFieldError: Nodetype \"%s\" has unknown field \"%s\"",ntype,key))); - } - } - } + "UnknownFieldError: Nodetype \"%s\" has unknown field \"%s\"", ntype, key))); + } + } + } } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java index e4d1dd6..b227a31 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,180 +21,178 @@ package org.onap.sdc.toscaparser.api.elements; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.TOSCAVersionProperty; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.ArrayList; import java.util.LinkedHashMap; -import org.onap.sdc.toscaparser.api.utils.TOSCAVersionProperty; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class PolicyType extends StatefulEntityType { - - private static final String DERIVED_FROM = "derived_from"; - private static final String METADATA = "metadata"; - private static final String PROPERTIES = "properties"; - private static final String VERSION = "version"; - private static final String DESCRIPTION = "description"; - private static final String TARGETS = "targets"; - private static final String TRIGGERS = "triggers"; - private static final String TYPE = "type"; - - private static final String SECTIONS[] = { - DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS, TRIGGERS, TYPE - }; - - private LinkedHashMap customDef; - private String policyDescription; - private Object policyVersion; - private LinkedHashMap properties; - private LinkedHashMap parentPolicies; - private LinkedHashMap metaData; - private ArrayList targetsList; - - - public PolicyType(String _type, LinkedHashMap _customDef) { - super(_type,POLICY_PREFIX,_customDef); - - type = _type; - customDef = _customDef; - _validateKeys(); - + + private static final String DERIVED_FROM = "derived_from"; + private static final String METADATA = "metadata"; + private static final String PROPERTIES = "properties"; + private static final String VERSION = "version"; + private static final String DESCRIPTION = "description"; + private static final String TARGETS = "targets"; + private static final String TRIGGERS = "triggers"; + private static final String TYPE = "type"; + + private static final String[] SECTIONS = { + DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS, TRIGGERS, TYPE + }; + + private LinkedHashMap customDef; + private String policyDescription; + private Object policyVersion; + private LinkedHashMap properties; + private LinkedHashMap parentPolicies; + private LinkedHashMap metaData; + private ArrayList targetsList; + + + public PolicyType(String type, LinkedHashMap customDef) { + super(type, POLICY_PREFIX, customDef); + + this.type = type; + this.customDef = customDef; + validateKeys(); + metaData = null; - if(defs != null && defs.get(METADATA) != null) { - metaData = (LinkedHashMap)defs.get(METADATA); - _validateMetadata(metaData); + if (defs != null && defs.get(METADATA) != null) { + metaData = (LinkedHashMap) defs.get(METADATA); + validateMetadata(metaData); } properties = null; - if(defs != null && defs.get(PROPERTIES) != null) { - properties = (LinkedHashMap)defs.get(PROPERTIES); + if (defs != null && defs.get(PROPERTIES) != null) { + properties = (LinkedHashMap) defs.get(PROPERTIES); } - parentPolicies = _getParentPolicies(); + parentPolicies = getParentPolicies(); policyVersion = null; - if(defs != null && defs.get(VERSION) != null) { + if (defs != null && defs.get(VERSION) != null) { policyVersion = (new TOSCAVersionProperty( - defs.get(VERSION))).getVersion(); + defs.get(VERSION).toString())).getVersion(); } policyDescription = null; - if(defs != null && defs.get(DESCRIPTION) != null) { - policyDescription = (String)defs.get(DESCRIPTION); + if (defs != null && defs.get(DESCRIPTION) != null) { + policyDescription = (String) defs.get(DESCRIPTION); } - + targetsList = null; - if(defs != null && defs.get(TARGETS) != null) { - targetsList = (ArrayList)defs.get(TARGETS); - _validateTargets(targetsList,customDef); + if (defs != null && defs.get(TARGETS) != null) { + targetsList = (ArrayList) defs.get(TARGETS); + validateTargets(targetsList, this.customDef); + } + + } + + private LinkedHashMap getParentPolicies() { + LinkedHashMap policies = new LinkedHashMap<>(); + String parentPolicy; + if (getParentType() != null) { + parentPolicy = getParentType().getType(); + } else { + parentPolicy = null; } - - } - - private LinkedHashMap _getParentPolicies() { - LinkedHashMap policies = new LinkedHashMap<>(); - String parentPolicy; - if(getParentType() != null) { - parentPolicy = getParentType().getType(); - } - else { - parentPolicy = null; - } - if(parentPolicy != null) { - while(parentPolicy != null && !parentPolicy.equals("tosca.policies.Root")) { - policies.put(parentPolicy, TOSCA_DEF.get(parentPolicy)); - parentPolicy = (String) - ((LinkedHashMap)policies.get(parentPolicy)).get("derived_from);"); - } - } - return policies; - } - - public String getType() { - return type; - } - - public PolicyType getParentType() { + if (parentPolicy != null) { + while (parentPolicy != null && !parentPolicy.equals("tosca.policies.Root")) { + policies.put(parentPolicy, TOSCA_DEF.get(parentPolicy)); + parentPolicy = (String) + ((LinkedHashMap) policies.get(parentPolicy)).get("derived_from);"); + } + } + return policies; + } + + public String getType() { + return type; + } + + public PolicyType getParentType() { // Return a policy statefulentity of this node is derived from - if(defs == null) { - return null; - } - String ppolicyEntity = derivedFrom(defs); - if(ppolicyEntity != null) { - return new PolicyType(ppolicyEntity,customDef); + if (defs == null) { + return null; + } + String policyEntity = derivedFrom(defs); + if (policyEntity != null) { + return new PolicyType(policyEntity, customDef); } return null; - } - - public Object getPolicy(String name) { + } + + public Object getPolicy(String name) { // Return the definition of a policy field by name - if(defs != null && defs.get(name) != null) { + if (defs != null && defs.get(name) != null) { return defs.get(name); } return null; - } + } - public ArrayList getTargets() { + public ArrayList getTargets() { // Return targets return targetsList; - } - - public String getDescription() { - return policyDescription; - } - - public Object getVersion() { - return policyVersion; - } - - private void _validateKeys() { - for(String key: defs.keySet()) { - boolean bFound = false; - for(String sect: SECTIONS) { - if(key.equals(sect)) { - bFound = true; - break; - } - } - if(!bFound) { + } + + public String getDescription() { + return policyDescription; + } + + public Object getVersion() { + return policyVersion; + } + + private void validateKeys() { + for (String key : defs.keySet()) { + boolean bFound = false; + for (String sect : SECTIONS) { + if (key.equals(sect)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE125", String.format( - "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"", - type,key))); - } - } - } - - private void _validateTargets(ArrayList _targetsList, - LinkedHashMap _customDef) { - for(String nodetype: _targetsList) { - if(_customDef.get(nodetype) == null) { + "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"", + type, key))); + } + } + } + + private void validateTargets(ArrayList targetsList, + LinkedHashMap customDef) { + for (String nodetype : targetsList) { + if (customDef.get(nodetype) == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE126", String.format( - "InvalidTypeError: \"%s\" defined in targets for policy \"%s\"", - nodetype,type))); - - } - } - } - - private void _validateMetadata(LinkedHashMap _metaData) { - String mtype = (String)_metaData.get("type"); - if(mtype != null && !mtype.equals("map") && !mtype.equals("tosca:map")) { + "InvalidTypeError: \"%s\" defined in targets for policy \"%s\"", + nodetype, type))); + + } + } + } + + private void validateMetadata(LinkedHashMap metaData) { + String mtype = (String) metaData.get("type"); + if (mtype != null && !mtype.equals("map") && !mtype.equals("tosca:map")) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE127", String.format( - "InvalidTypeError: \"%s\" defined in policy for metadata", - mtype))); - } - for(String entrySchema: metaData.keySet()) { - Object estob = metaData.get(entrySchema); - if(estob instanceof LinkedHashMap) { - String est = (String) - ((LinkedHashMap)estob).get("type"); - if(!est.equals("string")) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE128", String.format( - "InvalidTypeError: \"%s\" defined in policy for metadata \"%s\"", - est,entrySchema))); - } - } - } - } + "InvalidTypeError: \"%s\" defined in policy for metadata", + mtype))); + } + for (String entrySchema : this.metaData.keySet()) { + Object estob = this.metaData.get(entrySchema); + if (estob instanceof LinkedHashMap) { + String est = (String) + ((LinkedHashMap) estob).get("type"); + if (!est.equals("string")) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE128", String.format( + "InvalidTypeError: \"%s\" defined in policy for metadata \"%s\"", + est, entrySchema))); + } + } + } + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java index 65304dd..01fb9fc 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -38,16 +38,16 @@ public class PortSpec { private static final String SOURCE_RANGE = "source_range"; private static final String TARGET = "target"; private static final String TARGET_RANGE = "target_range"; - + private static final String PROPERTY_NAMES[] = { - PROTOCOL, SOURCE, SOURCE_RANGE, - TARGET, TARGET_RANGE + PROTOCOL, SOURCE, SOURCE_RANGE, + TARGET, TARGET_RANGE }; - + // todo(TBD) May want to make this a subclass of DataType // and change init method to set PortSpec's properties public PortSpec() { - + } // The following additional requirements MUST be tested: @@ -59,47 +59,44 @@ public class PortSpec { // 3) A valid PortSpec MUST have a value for the target property that is // within the numeric range specified by the property target_range // when target_range is specified. - public static void validateAdditionalReq(Object _properties, - String propName, - LinkedHashMap custom_def) { - + public static void validateAdditionalReq(Object _properties, + String propName, + LinkedHashMap custom_def) { + try { - LinkedHashMap properties = (LinkedHashMap)_properties; + LinkedHashMap properties = (LinkedHashMap) _properties; Object source = properties.get(PortSpec.SOURCE); Object sourceRange = properties.get(PortSpec.SOURCE_RANGE); Object target = properties.get(PortSpec.TARGET); Object targetRange = properties.get(PortSpec.TARGET_RANGE); // verify one of the specified values is set - if(source == null && sourceRange == null && - target == null && targetRange == null) { + if (source == null && sourceRange == null && + target == null && targetRange == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE129", String.format( - "InvalidTypeAdditionalRequirementsError: Additional requirements for type \"%s\" not met", - TYPE_URI))); + "InvalidTypeAdditionalRequirementsError: Additional requirements for type \"%s\" not met", + TYPE_URI))); } // Validate source value is in specified range - if(source != null && sourceRange != null) { - ValidateUtils.validateValueInRange(source,sourceRange,SOURCE); - } - else { + if (source != null && sourceRange != null) { + ValidateUtils.validateValueInRange(source, sourceRange, SOURCE); + } else { DataEntity portdef = new DataEntity("PortDef", source, null, SOURCE); portdef.validate(); } // Validate target value is in specified range - if(target != null && targetRange != null) { - ValidateUtils.validateValueInRange(target,targetRange,SOURCE); - } - else { + if (target != null && targetRange != null) { + ValidateUtils.validateValueInRange(target, targetRange, SOURCE); + } else { DataEntity portdef = new DataEntity("PortDef", source, null, TARGET); portdef.validate(); } - } - catch(Exception e) { + } catch (Exception e) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE130", String.format( - "ValueError: \"%s\" do not meet requirements for type \"%s\"", - _properties.toString(),SHORTNAME))); + "ValueError: \"%s\" do not meet requirements for type \"%s\"", + _properties.toString(), SHORTNAME))); } - } + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java index 6e1fe61..484d17e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,12 +27,12 @@ import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; public class PropertyDef { - - private static final String PROPERTY_KEYNAME_DEFAULT = "default"; - private static final String PROPERTY_KEYNAME_REQUIRED = "required"; - private static final String PROPERTY_KEYNAME_STATUS = "status"; - private static final String VALID_PROPERTY_KEYNAMES[] = { - PROPERTY_KEYNAME_DEFAULT, + + private static final String PROPERTY_KEYNAME_DEFAULT = "default"; + private static final String PROPERTY_KEYNAME_REQUIRED = "required"; + private static final String PROPERTY_KEYNAME_STATUS = "status"; + private static final String VALID_PROPERTY_KEYNAMES[] = { + PROPERTY_KEYNAME_DEFAULT, PROPERTY_KEYNAME_REQUIRED, PROPERTY_KEYNAME_STATUS}; @@ -41,122 +41,120 @@ public class PropertyDef { private static final String VALID_REQUIRED_VALUES[] = {"true", "false"}; private static final String PROPERTY_STATUS_SUPPORTED = "supported"; - private static final String PROPERTY_STATUS_EXPERIMENTAL = "experimental"; - private static final String VALID_STATUS_VALUES[] = { - PROPERTY_STATUS_SUPPORTED, PROPERTY_STATUS_EXPERIMENTAL}; - - private static final String PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED; - - private String name; - private Object value; - private LinkedHashMap schema; - private String _status; - private boolean _required; - - public PropertyDef(String pdName, Object pdValue, - LinkedHashMap pdSchema) { - name = pdName; - value = pdValue; - schema = pdSchema; + private static final String PROPERTY_STATUS_EXPERIMENTAL = "experimental"; + private static final String VALID_STATUS_VALUES[] = { + PROPERTY_STATUS_SUPPORTED, PROPERTY_STATUS_EXPERIMENTAL}; + + private static final String PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED; + + private String name; + private Object value; + private LinkedHashMap schema; + private String _status; + private boolean _required; + + public PropertyDef(String pdName, Object pdValue, + LinkedHashMap pdSchema) { + name = pdName; + value = pdValue; + schema = pdSchema; _status = PROPERTY_STATUS_DEFAULT; _required = PROPERTY_REQUIRED_DEFAULT; - if(schema != null) { - // Validate required 'type' property exists - if(schema.get("type") == null) { - //msg = (_('Schema definition of "%(pname)s" must have a "type" ' - // 'attribute.') % dict(pname=self.name)) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE131", String.format( - "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name))); - } - _loadRequiredAttrFromSchema(); - _loadStatusAttrFromSchema(); + if (schema != null) { + // Validate required 'type' property exists + if (schema.get("type") == null) { + //msg = (_('Schema definition of "%(pname)s" must have a "type" ' + // 'attribute.') % dict(pname=self.name)) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE131", String.format( + "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute", name))); + } + _loadRequiredAttrFromSchema(); + _loadStatusAttrFromSchema(); + } + } + + public Object getDefault() { + if (schema != null) { + for (Map.Entry me : schema.entrySet()) { + if (me.getKey().equals(PROPERTY_KEYNAME_DEFAULT)) { + return me.getValue(); + } + } } - } - - public Object getDefault() { - if(schema != null) { - for(Map.Entry me: schema.entrySet()) { - if(me.getKey().equals(PROPERTY_KEYNAME_DEFAULT)) { - return me.getValue(); - } - } - } - return null; - } - - public boolean isRequired() { - return _required; - } - - private void _loadRequiredAttrFromSchema() { + return null; + } + + public boolean isRequired() { + return _required; + } + + private void _loadRequiredAttrFromSchema() { // IF 'required' keyname exists verify it's a boolean, // if so override default - Object val = schema.get(PROPERTY_KEYNAME_REQUIRED); - if(val != null) { - if(val instanceof Boolean) { - _required = (boolean)val; - } - else { + Object val = schema.get(PROPERTY_KEYNAME_REQUIRED); + if (val != null) { + if (val instanceof Boolean) { + _required = (boolean) val; + } else { //valid_values = ', '.join(self.VALID_REQUIRED_VALUES) //attr = self.PROPERTY_KEYNAME_REQUIRED //TOSCAException.generate_inv_schema_property_error(self, // attr, // value, // valid_values) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE132", String.format( - "Schema definition of \"%s\" has \"required\" attribute with an invalid value", - name))); - } - } - } - - public String getStatus() { - return _status; - } - - private void _loadStatusAttrFromSchema() { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE132", String.format( + "Schema definition of \"%s\" has \"required\" attribute with an invalid value", + name))); + } + } + } + + public String getStatus() { + return _status; + } + + private void _loadStatusAttrFromSchema() { // IF 'status' keyname exists verify it's a boolean, // if so override default - String sts = (String)schema.get(PROPERTY_KEYNAME_STATUS); - if(sts != null) { - boolean bFound = false; - for(String vsv: VALID_STATUS_VALUES) { - if(vsv.equals(sts)) { - bFound = true; - break; - } - } - if(bFound) { - _status = sts; - } - else { + String sts = (String) schema.get(PROPERTY_KEYNAME_STATUS); + if (sts != null) { + boolean bFound = false; + for (String vsv : VALID_STATUS_VALUES) { + if (vsv.equals(sts)) { + bFound = true; + break; + } + } + if (bFound) { + _status = sts; + } else { //valid_values = ', '.join(self.VALID_STATUS_VALUES) //attr = self.PROPERTY_KEYNAME_STATUS //TOSCAException.generate_inv_schema_property_error(self, // attr, // value, // valid_values) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE006", String.format( - "Schema definition of \"%s\" has \"status\" attribute with an invalid value", - name))); - } - } - } - - public String getName() { - return name; - } - - public LinkedHashMap getSchema() { - return schema; - } - - public Object getPDValue() { - // there's getValue in EntityType... - return value; - } - + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE006", String.format( + "Schema definition of \"%s\" has \"status\" attribute with an invalid value", + name))); + } + } + } + + public String getName() { + return name; + } + + public LinkedHashMap getSchema() { + return schema; + } + + public Object getPDValue() { + // there's getValue in EntityType... + return value; + } + } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java index 17f420d..4c39ec2 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,56 +27,56 @@ import java.util.LinkedHashMap; public class RelationshipType extends StatefulEntityType { - private static final String DERIVED_FROM = "derived_from"; - private static final String VALID_TARGET_TYPES = "valid_target_types"; - private static final String INTERFACES = "interfaces"; - private static final String ATTRIBUTES = "attributes"; - private static final String PROPERTIES = "properties"; - private static final String DESCRIPTION = "description"; - private static final String VERSION = "version"; - private static final String CREDENTIAL = "credential"; - - private static final String SECTIONS[] = { - DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES, - ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, CREDENTIAL}; - - private String capabilityName; - private LinkedHashMap customDef; - - public RelationshipType(String _type, String _capabilityName, LinkedHashMap _customDef) { - super(_type,RELATIONSHIP_PREFIX,_customDef); - capabilityName = _capabilityName; - customDef = _customDef; - } - - public RelationshipType getParentType() { + private static final String DERIVED_FROM = "derived_from"; + private static final String VALID_TARGET_TYPES = "valid_target_types"; + private static final String INTERFACES = "interfaces"; + private static final String ATTRIBUTES = "attributes"; + private static final String PROPERTIES = "properties"; + private static final String DESCRIPTION = "description"; + private static final String VERSION = "version"; + private static final String CREDENTIAL = "credential"; + + private static final String[] SECTIONS = { + DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES, + ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, CREDENTIAL}; + + private String capabilityName; + private LinkedHashMap customDef; + + public RelationshipType(String type, String capabilityName, LinkedHashMap customDef) { + super(type, RELATIONSHIP_PREFIX, customDef); + this.capabilityName = capabilityName; + this.customDef = customDef; + } + + public RelationshipType getParentType() { // Return a relationship this reletionship is derived from.''' String prel = derivedFrom(defs); - if(prel != null) { - return new RelationshipType(prel,null,customDef); + if (prel != null) { + return new RelationshipType(prel, null, customDef); } return null; - } - - public Object getValidTargetTypes() { - return entityValue(defs,"valid_target_types"); - } - - private void _validateKeys() { - for(String key: defs.keySet()) { - boolean bFound = false; - for(int i=0; i< SECTIONS.length; i++) { - if(key.equals(SECTIONS[i])) { - bFound = true; - break; - } - } - if(!bFound) { + } + + public Object getValidTargetTypes() { + return entityValue(defs, "valid_target_types"); + } + + private void validateKeys() { + for (String key : defs.keySet()) { + boolean bFound = false; + for (String section : SECTIONS) { + if (key.equals(section)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE133", String.format( - "UnknownFieldError: Relationshiptype \"%s\" has unknown field \"%s\"",type,key))); - } + "UnknownFieldError: Relationshiptype \"%s\" has unknown field \"%s\"", type, key))); + } } - } + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java index eeaa07c..1eaa8a0 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,147 +20,152 @@ package org.onap.sdc.toscaparser.api.elements; -import java.util.HashMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import org.onap.sdc.toscaparser.api.utils.ValidateUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.HashMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + public abstract class ScalarUnit { - private static Logger log = LoggerFactory.getLogger(ScalarUnit.class.getName()); + private static Logger log = LoggerFactory.getLogger(ScalarUnit.class.getName()); + + private static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; + private static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; + private static final String SCALAR_UNIT_TIME = "scalar-unit.time"; - private static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; - private static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; - private static final String SCALAR_UNIT_TIME = "scalar-unit.time"; - - public static final String SCALAR_UNIT_TYPES[] = { + public static final String[] SCALAR_UNIT_TYPES = { SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME - }; - - private Object value; - protected HashMap SCALAR_UNIT_DICT; - protected String SCALAR_UNIT_DEFAULT; - - public ScalarUnit(Object _value) { - value = _value; - SCALAR_UNIT_DICT = new HashMap<>(); - SCALAR_UNIT_DEFAULT = ""; - } - - - private String _checkUnitInScalarStandardUnits(String inputUnit) { + }; + + private Object value; + private HashMap scalarUnitDict; + private String scalarUnitDefault; + + public ScalarUnit(Object value) { + this.value = value; + scalarUnitDict = new HashMap<>(); + scalarUnitDefault = ""; + } + + void putToScalarUnitDict(String key, Object value) { + scalarUnitDict.put(key, value); + } + + void setScalarUnitDefault(String scalarUnitDefault) { + this.scalarUnitDefault = scalarUnitDefault; + } + + private String checkUnitInScalarStandardUnits(String inputUnit) { // Check whether the input unit is following specified standard - + // If unit is not following specified standard, convert it to standard // unit after displaying a warning message. - - if(SCALAR_UNIT_DICT.get(inputUnit) != null) { - return inputUnit; - } - else { - for(String key: SCALAR_UNIT_DICT.keySet()) { - if(key.toUpperCase().equals(inputUnit.toUpperCase())) { - log.debug("ScalarUnit - _checkUnitInScalarStandardUnits - \n" + - "The unit {} does not follow scalar unit standards\n" + - "using {} instead", - inputUnit, key); - return key; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE007", String.format( - "'The unit \"%s\" is not valid. Valid units are \n%s", - inputUnit,SCALAR_UNIT_DICT.keySet().toString()))); + + if (scalarUnitDict.get(inputUnit) != null) { + return inputUnit; + } else { + for (String key : scalarUnitDict.keySet()) { + if (key.toUpperCase().equals(inputUnit.toUpperCase())) { + log.debug("ScalarUnit - checkUnitInScalarStandardUnits - \n" + + "The unit {} does not follow scalar unit standards\n" + + "using {} instead", + inputUnit, key); + return key; + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE007", String.format( + "'The unit \"%s\" is not valid. Valid units are \n%s", + inputUnit, scalarUnitDict.keySet().toString()))); return inputUnit; - } - } - - public Object validateScalarUnit() { - Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); - Matcher matcher = pattern.matcher(value.toString()); - if(matcher.find()) { - ValidateUtils.strToNum(matcher.group(1)); - String scalarUnit = _checkUnitInScalarStandardUnits(matcher.group(2)); - value = matcher.group(1) + " " + scalarUnit; - } - else { + } + } + + public Object validateScalarUnit() { + Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); + Matcher matcher = pattern.matcher(value.toString()); + if (matcher.find()) { + ValidateUtils.strToNum(matcher.group(1)); + String scalarUnit = checkUnitInScalarStandardUnits(matcher.group(2)); + value = matcher.group(1) + " " + scalarUnit; + } else { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE134", String.format( - "ValueError: \"%s\" is not a valid scalar-unit",value.toString()))); - } - return value; - } - - public double getNumFromScalarUnit(String unit) { - if(unit != null) { - unit = _checkUnitInScalarStandardUnits(unit); - } - else { - unit = SCALAR_UNIT_DEFAULT; - } - Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); - Matcher matcher = pattern.matcher(value.toString()); - if(matcher.find()) { - ValidateUtils.strToNum(matcher.group(1)); - String scalarUnit = _checkUnitInScalarStandardUnits(matcher.group(2)); - value = matcher.group(1) + " " + scalarUnit; - Object on1 = ValidateUtils.strToNum(matcher.group(1)) != null ? ValidateUtils.strToNum(matcher.group(1)) : 0; - Object on2 = SCALAR_UNIT_DICT.get(matcher.group(2)) != null ? SCALAR_UNIT_DICT.get(matcher.group(2)) : 0; - Object on3 = SCALAR_UNIT_DICT.get(unit) != null ? SCALAR_UNIT_DICT.get(unit) : 0; - - Double n1 = new Double(on1.toString()); - Double n2 = new Double(on2.toString()); - Double n3 = new Double(on3.toString()); - double converted = n1 * n2 / n3; - if(Math.abs(converted - Math.round(converted)) < 0.0000000000001 ) { - converted = Math.round(converted); - } - return converted; - } - return 0l; //??? - } - - protected static HashMap scalarunitMapping = _getScalarunitMappings(); - - private static HashMap _getScalarunitMappings() { - HashMap map = new HashMap<>(); - map.put(SCALAR_UNIT_FREQUENCY,"ScalarUnitFrequency"); - map.put(SCALAR_UNIT_SIZE, "ScalarUnitSize"); - map.put(SCALAR_UNIT_TIME, "ScalarUnit_Time"); - return map; - } - - public static ScalarUnit getScalarunitClass(String type,Object val) { - if(type.equals(SCALAR_UNIT_SIZE)) { - return new ScalarUnitSize(val); - } - else if(type.equals(SCALAR_UNIT_TIME)) { - return new ScalarUnitTime(val); - } - else if(type.equals(SCALAR_UNIT_FREQUENCY)) { - return new ScalarUnitFrequency(val); - } - return null; - } - - public static double getScalarunitValue(String type, Object value, String unit) { - if(type.equals(SCALAR_UNIT_SIZE)) { - return (new ScalarUnitSize(value)).getNumFromScalarUnit(unit); - } - if(type.equals(SCALAR_UNIT_TIME)) { - return (new ScalarUnitTime(value)).getNumFromScalarUnit(unit); - } - if(type.equals(SCALAR_UNIT_FREQUENCY)) { - return (new ScalarUnitFrequency(value)).getNumFromScalarUnit(unit); - } + "ValueError: \"%s\" is not a valid scalar-unit", value.toString()))); + } + return value; + } + + public double getNumFromScalarUnit(String unit) { + if (unit != null) { + unit = checkUnitInScalarStandardUnits(unit); + } else { + unit = scalarUnitDefault; + } + Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); + Matcher matcher = pattern.matcher(value.toString()); + if (matcher.find()) { + final double minimalNum = 0.0000000000001; + + ValidateUtils.strToNum(matcher.group(1)); + String scalarUnit = checkUnitInScalarStandardUnits(matcher.group(2)); + value = matcher.group(1) + " " + scalarUnit; + Object on1 = ValidateUtils.strToNum(matcher.group(1)) != null ? ValidateUtils.strToNum(matcher.group(1)) : 0; + Object on2 = scalarUnitDict.get(matcher.group(2)) != null ? scalarUnitDict.get(matcher.group(2)) : 0; + Object on3 = scalarUnitDict.get(unit) != null ? scalarUnitDict.get(unit) : 0; + + Double n1 = new Double(on1.toString()); + Double n2 = new Double(on2.toString()); + Double n3 = new Double(on3.toString()); + double converted = n1 * n2 / n3; + + if (Math.abs(converted - Math.round(converted)) < minimalNum) { + converted = Math.round(converted); + } + return converted; + } + return 0.0; + } + + private static HashMap scalarUnitMapping = getScalarUnitMappings(); + + private static HashMap getScalarUnitMappings() { + HashMap map = new HashMap<>(); + map.put(SCALAR_UNIT_FREQUENCY, "ScalarUnitFrequency"); + map.put(SCALAR_UNIT_SIZE, "ScalarUnitSize"); + map.put(SCALAR_UNIT_TIME, "ScalarUnit_Time"); + return map; + } + + public static ScalarUnit getScalarunitClass(String type, Object val) { + if (type.equals(SCALAR_UNIT_SIZE)) { + return new ScalarUnitSize(val); + } else if (type.equals(SCALAR_UNIT_TIME)) { + return new ScalarUnitTime(val); + } else if (type.equals(SCALAR_UNIT_FREQUENCY)) { + return new ScalarUnitFrequency(val); + } + return null; + } + + public static double getScalarunitValue(String type, Object value, String unit) { + if (type.equals(SCALAR_UNIT_SIZE)) { + return (new ScalarUnitSize(value)).getNumFromScalarUnit(unit); + } + if (type.equals(SCALAR_UNIT_TIME)) { + return (new ScalarUnitTime(value)).getNumFromScalarUnit(unit); + } + if (type.equals(SCALAR_UNIT_FREQUENCY)) { + return (new ScalarUnitFrequency(value)).getNumFromScalarUnit(unit); + } ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE135", String.format( - "TypeError: \"%s\" is not a valid scalar-unit type",type))); + "TypeError: \"%s\" is not a valid scalar-unit type", type))); return 0.0; - } - + } + } /*python @@ -190,10 +195,10 @@ class ScalarUnit(object): If unit is not following specified standard, convert it to standard unit after displaying a warning message. """ - if input_unit in self.SCALAR_UNIT_DICT.keys(): + if input_unit in self.scalarUnitDict.keys(): return input_unit else: - for key in self.SCALAR_UNIT_DICT.keys(): + for key in self.scalarUnitDict.keys(): if key.upper() == input_unit.upper(): log.warning(_('The unit "%(unit)s" does not follow ' 'scalar unit standards; using "%(key)s" ' @@ -203,7 +208,7 @@ class ScalarUnit(object): msg = (_('The unit "%(unit)s" is not valid. Valid units are ' '"%(valid_units)s".') % {'unit': input_unit, - 'valid_units': sorted(self.SCALAR_UNIT_DICT.keys())}) + 'valid_units': sorted(self.scalarUnitDict.keys())}) ValidationIssueCollector.appendException(ValueError(msg)) def validate_scalar_unit(self): @@ -224,14 +229,14 @@ class ScalarUnit(object): if unit: unit = self._check_unit_in_scalar_standard_units(unit) else: - unit = self.SCALAR_UNIT_DEFAULT + unit = self.scalarUnitDefault self.validate_scalar_unit() regex = re.compile('([0-9.]+)\s*(\w+)') result = regex.match(str(self.value)).groups() converted = (float(validateutils.str_to_num(result[0])) - * self.SCALAR_UNIT_DICT[result[1]] - / self.SCALAR_UNIT_DICT[unit]) + * self.scalarUnitDict[result[1]] + / self.scalarUnitDict[unit]) if converted - int(converted) < 0.0000000000001: converted = int(converted) return converted @@ -239,8 +244,8 @@ class ScalarUnit(object): class ScalarUnit_Size(ScalarUnit): - SCALAR_UNIT_DEFAULT = 'B' - SCALAR_UNIT_DICT = {'B': 1, 'kB': 1000, 'KiB': 1024, 'MB': 1000000, + scalarUnitDefault = 'B' + scalarUnitDict = {'B': 1, 'kB': 1000, 'KiB': 1024, 'MB': 1000000, 'MiB': 1048576, 'GB': 1000000000, 'GiB': 1073741824, 'TB': 1000000000000, 'TiB': 1099511627776} @@ -248,15 +253,15 @@ class ScalarUnit_Size(ScalarUnit): class ScalarUnit_Time(ScalarUnit): - SCALAR_UNIT_DEFAULT = 'ms' - SCALAR_UNIT_DICT = {'d': 86400, 'h': 3600, 'm': 60, 's': 1, + scalarUnitDefault = 'ms' + scalarUnitDict = {'d': 86400, 'h': 3600, 'm': 60, 's': 1, 'ms': 0.001, 'us': 0.000001, 'ns': 0.000000001} class ScalarUnit_Frequency(ScalarUnit): - SCALAR_UNIT_DEFAULT = 'GHz' - SCALAR_UNIT_DICT = {'Hz': 1, 'kHz': 1000, + scalarUnitDefault = 'GHz' + scalarUnitDict = {'Hz': 1, 'kHz': 1000, 'MHz': 1000000, 'GHz': 1000000000} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java index 59664ca..ed10da9 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,13 +22,18 @@ package org.onap.sdc.toscaparser.api.elements; public class ScalarUnitFrequency extends ScalarUnit { - public ScalarUnitFrequency(Object value) { - super(value); - SCALAR_UNIT_DEFAULT = "GHz"; - SCALAR_UNIT_DICT.put("Hz",1L); - SCALAR_UNIT_DICT.put("kHz",1000L); - SCALAR_UNIT_DICT.put("MHz",1000000L); - SCALAR_UNIT_DICT.put("GHz",1000000000L); - } + private static final Long HZ = 1L; + private static final Long KHZ = 1000L; + private static final Long MHZ = 1000000L; + private static final Long GHZ = 1000000000L; + + public ScalarUnitFrequency(Object value) { + super(value); + setScalarUnitDefault("GHz"); + putToScalarUnitDict("Hz", HZ); + putToScalarUnitDict("kHz", KHZ); + putToScalarUnitDict("MHz", MHZ); + putToScalarUnitDict("GHz", GHZ); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java index d29d8a2..78687a1 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,20 +20,24 @@ package org.onap.sdc.toscaparser.api.elements; +import org.onap.sdc.toscaparser.api.elements.enums.FileSize; + public class ScalarUnitSize extends ScalarUnit { - public ScalarUnitSize(Object value) { - super(value); - - SCALAR_UNIT_DEFAULT = "B"; - SCALAR_UNIT_DICT.put("B",1L); - SCALAR_UNIT_DICT.put("kB",1000L); - SCALAR_UNIT_DICT.put("kiB",1024L); - SCALAR_UNIT_DICT.put("MB",1000000L); - SCALAR_UNIT_DICT.put("MiB",1048576L); - SCALAR_UNIT_DICT.put("GB",1000000000L); - SCALAR_UNIT_DICT.put("GiB",1073741824L); - SCALAR_UNIT_DICT.put("TB",1000000000000L); - SCALAR_UNIT_DICT.put("TiB",1099511627776L); - } + + + public ScalarUnitSize(Object value) { + super(value); + + setScalarUnitDefault("B"); + putToScalarUnitDict("B", FileSize.B); + putToScalarUnitDict("kB", FileSize.KB); + putToScalarUnitDict("MB", FileSize.MB); + putToScalarUnitDict("GB", FileSize.GB); + putToScalarUnitDict("TB", FileSize.TB); + putToScalarUnitDict("kiB", FileSize.KIB); + putToScalarUnitDict("MiB", FileSize.MIB); + putToScalarUnitDict("GiB", FileSize.GIB); + putToScalarUnitDict("TiB", FileSize.TIB); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java index 45848af..8d2c13e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -22,16 +22,16 @@ package org.onap.sdc.toscaparser.api.elements; public class ScalarUnitTime extends ScalarUnit { - public ScalarUnitTime(Object value) { - super(value); - SCALAR_UNIT_DEFAULT = "ms"; - SCALAR_UNIT_DICT.put("d",86400L); - SCALAR_UNIT_DICT.put("h",3600L); - SCALAR_UNIT_DICT.put("m",60L); - SCALAR_UNIT_DICT.put("s",1L); - SCALAR_UNIT_DICT.put("ms",0.001); - SCALAR_UNIT_DICT.put("us",0.000001); - SCALAR_UNIT_DICT.put("ns",0.000000001); - } + public ScalarUnitTime(Object value) { + super(value); + setScalarUnitDefault("ms"); + putToScalarUnitDict("d", 86400L); + putToScalarUnitDict("h", 3600L); + putToScalarUnitDict("m", 60L); + putToScalarUnitDict("s", 1L); + putToScalarUnitDict("ms", 0.001); + putToScalarUnitDict("us", 0.000001); + putToScalarUnitDict("ns", 0.000000001); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java index ef9159f..b710dda 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,140 +20,136 @@ package org.onap.sdc.toscaparser.api.elements; +import org.onap.sdc.toscaparser.api.UnsupportedType; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; -import org.onap.sdc.toscaparser.api.UnsupportedType; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class StatefulEntityType extends EntityType { // Class representing TOSCA states - public static final String interfacesNodeLifecycleOperations[] = { - "create", "configure", "start", "stop", "delete"}; + public static final String[] INTERFACE_NODE_LIFECYCLE_OPERATIONS = { + "create", "configure", "start", "stop", "delete"}; - public static final String interfacesRelationshipConfigureOperations[] = { - "post_configure_source", "post_configure_target", "add_target", "remove_target"}; + public static final String[] INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS = { + "post_configure_source", "post_configure_target", "add_target", "remove_target"}; public StatefulEntityType() { - // void constructor for subclasses that don't want super + // void constructor for subclasses that don't want super } - - @SuppressWarnings("unchecked") - public StatefulEntityType(String entityType, String prefix, LinkedHashMap customDef) { + + @SuppressWarnings("unchecked") + public StatefulEntityType(String entityType, String prefix, LinkedHashMap customDef) { String entireEntityType = entityType; - if(UnsupportedType.validateType(entireEntityType)) { + if (UnsupportedType.validateType(entireEntityType)) { defs = null; - } - else { - if(entityType.startsWith(TOSCA + ":")) { - entityType = entityType.substring(TOSCA.length()+1); + } else { + if (entityType.startsWith(TOSCA + ":")) { + entityType = entityType.substring(TOSCA.length() + 1); entireEntityType = prefix + entityType; } - if(!entityType.startsWith(TOSCA)) { + if (!entityType.startsWith(TOSCA)) { entireEntityType = prefix + entityType; } - if(TOSCA_DEF.get(entireEntityType) != null) { - defs = (LinkedHashMap )TOSCA_DEF.get(entireEntityType); + if (TOSCA_DEF.get(entireEntityType) != null) { + defs = (LinkedHashMap) TOSCA_DEF.get(entireEntityType); entityType = entireEntityType; - } - else if(customDef != null && customDef.get(entityType) != null) { - defs = (LinkedHashMap )customDef.get(entityType); - } - else{ + } else if (customDef != null && customDef.get(entityType) != null) { + defs = (LinkedHashMap) customDef.get(entityType); + } else { defs = null; - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE136", String.format( - "InvalidTypeError: \"%s\" is not a valid type",entityType))); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE136", String.format( + "InvalidTypeError: \"%s\" is not a valid type", entityType))); } } type = entityType; - } - - @SuppressWarnings("unchecked") - public ArrayList getPropertiesDefObjects() { - // Return a list of property definition objects - ArrayList properties = new ArrayList(); - LinkedHashMap props = (LinkedHashMap)getDefinition(PROPERTIES); - if(props != null) { - for(Map.Entry me: props.entrySet()) { - String pdname = me.getKey(); - Object to = me.getValue(); - if(to == null || !(to instanceof LinkedHashMap)) { - String s = to == null ? "null" : to.getClass().getSimpleName(); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE137", String.format( - "Unexpected type error: property \"%s\" has type \"%s\" (expected dict)",pdname,s))); - continue; - } - LinkedHashMap pdschema = (LinkedHashMap)to; - properties.add(new PropertyDef(pdname,null,pdschema)); - } - } - return properties; - } - - public LinkedHashMap getPropertiesDef() { - LinkedHashMap pds = new LinkedHashMap(); - for(PropertyDef pd: getPropertiesDefObjects()) { - pds.put(pd.getName(),pd); - } - return pds; - } - - public PropertyDef getPropertyDefValue(String name) { - // Return the property definition associated with a given name - PropertyDef pd = null; - LinkedHashMap propsDef = getPropertiesDef(); - if(propsDef != null) { - pd = propsDef.get(name); - } - return pd; - } - - public ArrayList getAttributesDefObjects() { - // Return a list of attribute definition objects - @SuppressWarnings("unchecked") - LinkedHashMap attrs = (LinkedHashMap)getValue(ATTRIBUTES,null,true); - ArrayList ads = new ArrayList<>(); - if(attrs != null) { - for(Map.Entry me: attrs.entrySet()) { - String attr = me.getKey(); - @SuppressWarnings("unchecked") - LinkedHashMap adschema = (LinkedHashMap)me.getValue(); - ads.add(new AttributeDef(attr,null,adschema)); - } - } - return ads; - } - - public LinkedHashMap getAttributesDef() { - // Return a dictionary of attribute definition name-object pairs - - LinkedHashMap ads = new LinkedHashMap<>(); - for(AttributeDef ado: getAttributesDefObjects()) { - ads.put(((AttributeDef)ado).getName(),ado); - } - return ads; - } - - public AttributeDef getAttributeDefValue(String name) { - // Return the attribute definition associated with a given name - AttributeDef ad = null; - LinkedHashMap attrsDef = getAttributesDef(); - if(attrsDef != null) { - ad = attrsDef.get(name); - } - return ad; - } - - public String getType() { - return type; - } - } + } + + @SuppressWarnings("unchecked") + public ArrayList getPropertiesDefObjects() { + // Return a list of property definition objects + ArrayList properties = new ArrayList(); + LinkedHashMap props = (LinkedHashMap) getDefinition(PROPERTIES); + if (props != null) { + for (Map.Entry me : props.entrySet()) { + String pdname = me.getKey(); + Object to = me.getValue(); + if (to == null || !(to instanceof LinkedHashMap)) { + String s = to == null ? "null" : to.getClass().getSimpleName(); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE137", String.format( + "Unexpected type error: property \"%s\" has type \"%s\" (expected dict)", pdname, s))); + continue; + } + LinkedHashMap pdschema = (LinkedHashMap) to; + properties.add(new PropertyDef(pdname, null, pdschema)); + } + } + return properties; + } + + public LinkedHashMap getPropertiesDef() { + LinkedHashMap pds = new LinkedHashMap(); + for (PropertyDef pd : getPropertiesDefObjects()) { + pds.put(pd.getName(), pd); + } + return pds; + } + + public PropertyDef getPropertyDefValue(String name) { + // Return the property definition associated with a given name + PropertyDef pd = null; + LinkedHashMap propsDef = getPropertiesDef(); + if (propsDef != null) { + pd = propsDef.get(name); + } + return pd; + } + + public ArrayList getAttributesDefObjects() { + // Return a list of attribute definition objects + @SuppressWarnings("unchecked") + LinkedHashMap attrs = (LinkedHashMap) getValue(ATTRIBUTES, null, true); + ArrayList ads = new ArrayList<>(); + if (attrs != null) { + for (Map.Entry me : attrs.entrySet()) { + String attr = me.getKey(); + @SuppressWarnings("unchecked") + LinkedHashMap adschema = (LinkedHashMap) me.getValue(); + ads.add(new AttributeDef(attr, null, adschema)); + } + } + return ads; + } + + public LinkedHashMap getAttributesDef() { + // Return a dictionary of attribute definition name-object pairs + + LinkedHashMap ads = new LinkedHashMap<>(); + for (AttributeDef ado : getAttributesDefObjects()) { + ads.put(((AttributeDef) ado).getName(), ado); + } + return ads; + } + + public AttributeDef getAttributeDefValue(String name) { + // Return the attribute definition associated with a given name + AttributeDef ad = null; + LinkedHashMap attrsDef = getAttributesDef(); + if (attrsDef != null) { + ad = attrsDef.get(name); + } + return ad; + } + + public String getType() { + return type; + } +} /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java index 9321064..18dd5ca 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -30,25 +30,25 @@ import org.onap.sdc.toscaparser.api.extensions.ExtTools; public class TypeValidation { - private static final String DEFINITION_VERSION = "tosca_definitions_version"; - private static final String DESCRIPTION = "description"; - private static final String IMPORTS = "imports"; - private static final String DSL_DEFINITIONS = "dsl_definitions"; - private static final String NODE_TYPES = "node_types"; - private static final String REPOSITORIES = "repositories"; - private static final String DATA_TYPES = "data_types"; - private static final String ARTIFACT_TYPES = "artifact_types"; - private static final String GROUP_TYPES = "group_types"; - private static final String RELATIONSHIP_TYPES = "relationship_types"; - private static final String CAPABILITY_TYPES = "capability_types"; - private static final String INTERFACE_TYPES = "interface_types"; - private static final String POLICY_TYPES = "policy_types"; - private static final String TOPOLOGY_TEMPLATE = "topology_template"; - //Pavel - private static final String METADATA = "metadata"; - - private String ALLOWED_TYPE_SECTIONS[] = { - DEFINITION_VERSION, DESCRIPTION, IMPORTS, + private static final String DEFINITION_VERSION = "tosca_definitions_version"; + private static final String DESCRIPTION = "description"; + private static final String IMPORTS = "imports"; + private static final String DSL_DEFINITIONS = "dsl_definitions"; + private static final String NODE_TYPES = "node_types"; + private static final String REPOSITORIES = "repositories"; + private static final String DATA_TYPES = "data_types"; + private static final String ARTIFACT_TYPES = "artifact_types"; + private static final String GROUP_TYPES = "group_types"; + private static final String RELATIONSHIP_TYPES = "relationship_types"; + private static final String CAPABILITY_TYPES = "capability_types"; + private static final String INTERFACE_TYPES = "interface_types"; + private static final String POLICY_TYPES = "policy_types"; + private static final String TOPOLOGY_TEMPLATE = "topology_template"; + //Pavel + private static final String METADATA = "metadata"; + + private String ALLOWED_TYPE_SECTIONS[] = { + DEFINITION_VERSION, DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES, DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES, RELATIONSHIP_TYPES, CAPABILITY_TYPES, @@ -57,65 +57,65 @@ public class TypeValidation { }; private static ArrayList VALID_TEMPLATE_VERSIONS = _getVTV(); - + private static ArrayList _getVTV() { - ArrayList vtv = new ArrayList<>(); - vtv.add("tosca_simple_yaml_1_0"); - vtv.add("tosca_simple_yaml_1_1"); - ExtTools exttools = new ExtTools(); + ArrayList vtv = new ArrayList<>(); + vtv.add("tosca_simple_yaml_1_0"); + vtv.add("tosca_simple_yaml_1_1"); + ExtTools exttools = new ExtTools(); vtv.addAll(exttools.getVersions()); return vtv; } - + //private LinkedHashMap customTypes; private Object importDef; //private String version; - - public TypeValidation(LinkedHashMap _customTypes, - Object _importDef) { + + public TypeValidation(LinkedHashMap _customTypes, + Object _importDef) { importDef = _importDef; _validateTypeKeys(_customTypes); } - - private void _validateTypeKeys(LinkedHashMap customTypes) { - - String sVersion = (String)customTypes.get(DEFINITION_VERSION); - if(sVersion != null) { - _validateTypeVersion(sVersion); + + private void _validateTypeKeys(LinkedHashMap customTypes) { + + String sVersion = (String) customTypes.get(DEFINITION_VERSION); + if (sVersion != null) { + _validateTypeVersion(sVersion); //version = sVersion; } - for(String name: customTypes.keySet()) { - boolean bFound = false; - for(String ats: ALLOWED_TYPE_SECTIONS) { - if(name.equals(ats)) { - bFound = true; - break; - } - } - if(!bFound) { + for (String name : customTypes.keySet()) { + boolean bFound = false; + for (String ats : ALLOWED_TYPE_SECTIONS) { + if (name.equals(ats)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE138", String.format( - "UnknownFieldError: Template \"%s\" contains unknown field \"%s\"", - importDef.toString(),name))); - } + "UnknownFieldError: Template \"%s\" contains unknown field \"%s\"", + importDef.toString(), name))); + } } } - + private void _validateTypeVersion(String sVersion) { - boolean bFound = false; - String allowed = ""; - for(String atv: VALID_TEMPLATE_VERSIONS) { - allowed += "\"" + atv + "\" "; - if(sVersion.equals(atv)) { - bFound = true; - break; - } - } - if(!bFound) { + boolean bFound = false; + String allowed = ""; + for (String atv : VALID_TEMPLATE_VERSIONS) { + allowed += "\"" + atv + "\" "; + if (sVersion.equals(atv)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE139", String.format( - "InvalidTemplateVersion: version \"%s\" in \"%s\" is not supported\n" + - "Allowed versions: [%s]", - sVersion,importDef.toString(),allowed))); - } + "InvalidTemplateVersion: version \"%s\" in \"%s\" is not supported\n" + + "Allowed versions: [%s]", + sVersion, importDef.toString(), allowed))); + } } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java index 82f6718..dd77659 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,175 +20,221 @@ package org.onap.sdc.toscaparser.api.elements.constraints; -import java.util.ArrayList; -import java.util.LinkedHashMap; - import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.elements.ScalarUnit; import org.onap.sdc.toscaparser.api.functions.Function; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; + public abstract class Constraint { - - // Parent class for constraints for a Property or Input - - protected static final String EQUAL = "equal"; - protected static final String GREATER_THAN = "greater_than"; - protected static final String GREATER_OR_EQUAL = "greater_or_equal"; - protected static final String LESS_THAN = "less_than"; - protected static final String LESS_OR_EQUAL = "less_or_equal"; - protected static final String IN_RANGE = "in_range"; - protected static final String VALID_VALUES = "valid_values"; - protected static final String LENGTH = "length"; - protected static final String MIN_LENGTH = "min_length"; - protected static final String MAX_LENGTH = "max_length"; - protected static final String PATTERN = "pattern"; - - protected static final String CONSTRAINTS[] = { - EQUAL, GREATER_THAN,GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, - IN_RANGE, VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN}; - - @SuppressWarnings("unchecked") - public static Constraint factory(String constraintClass,String propname,String proptype,Object constraint) { - - // a factory for the different Constraint classes - // replaces Python's __new__() usage - - if(!(constraint instanceof LinkedHashMap) || - ((LinkedHashMap)constraint).size() != 1) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE101", - "InvalidSchemaError: Invalid constraint schema " + constraint.toString())); - } - - if(constraintClass.equals(EQUAL)) { - return new Equal(propname,proptype,constraint); - } - else if(constraintClass.equals(GREATER_THAN)) { - return new GreaterThan(propname,proptype,constraint); - } - else if(constraintClass.equals(GREATER_OR_EQUAL)) { - return new GreaterOrEqual(propname,proptype,constraint); - } - else if(constraintClass.equals(LESS_THAN)) { - return new LessThan(propname,proptype,constraint); - } - else if(constraintClass.equals(LESS_OR_EQUAL)) { - return new LessOrEqual(propname,proptype,constraint); - } - else if(constraintClass.equals(IN_RANGE)) { - return new InRange(propname,proptype,constraint); - } - else if(constraintClass.equals(VALID_VALUES)) { - return new ValidValues(propname,proptype,constraint); - } - else if(constraintClass.equals(LENGTH)) { - return new Length(propname,proptype,constraint); - } - else if(constraintClass.equals(MIN_LENGTH)) { - return new MinLength(propname,proptype,constraint); - } - else if(constraintClass.equals(MAX_LENGTH)) { - return new MaxLength(propname,proptype,constraint); - } - else if(constraintClass.equals(PATTERN)) { - return new Pattern(propname,proptype,constraint); + + // Parent class for constraints for a Property or Input + + protected static final String EQUAL = "equal"; + protected static final String GREATER_THAN = "greater_than"; + protected static final String GREATER_OR_EQUAL = "greater_or_equal"; + protected static final String LESS_THAN = "less_than"; + protected static final String LESS_OR_EQUAL = "less_or_equal"; + protected static final String IN_RANGE = "in_range"; + protected static final String VALID_VALUES = "valid_values"; + protected static final String LENGTH = "length"; + protected static final String MIN_LENGTH = "min_length"; + protected static final String MAX_LENGTH = "max_length"; + protected static final String PATTERN = "pattern"; + + protected static final String[] CONSTRAINTS = { + EQUAL, GREATER_THAN, GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, + IN_RANGE, VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN}; + + @SuppressWarnings("unchecked") + public static Constraint factory(String constraintClass, String propname, String proptype, Object constraint) { + + // a factory for the different Constraint classes + // replaces Python's __new__() usage + + if (!(constraint instanceof LinkedHashMap) + || ((LinkedHashMap) constraint).size() != 1) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE101", + "InvalidSchemaError: Invalid constraint schema " + constraint.toString())); } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE102", String.format( - "InvalidSchemaError: Invalid property \"%s\"",constraintClass))); - return null; + + switch (constraintClass) { + case EQUAL: + return new Equal(propname, proptype, constraint); + case GREATER_THAN: + return new GreaterThan(propname, proptype, constraint); + case GREATER_OR_EQUAL: + return new GreaterOrEqual(propname, proptype, constraint); + case LESS_THAN: + return new LessThan(propname, proptype, constraint); + case LESS_OR_EQUAL: + return new LessOrEqual(propname, proptype, constraint); + case IN_RANGE: + return new InRange(propname, proptype, constraint); + case VALID_VALUES: + return new ValidValues(propname, proptype, constraint); + case LENGTH: + return new Length(propname, proptype, constraint); + case MIN_LENGTH: + return new MinLength(propname, proptype, constraint); + case MAX_LENGTH: + return new MaxLength(propname, proptype, constraint); + case PATTERN: + return new Pattern(propname, proptype, constraint); + default: + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE102", String.format( + "InvalidSchemaError: Invalid property \"%s\"", constraintClass))); + return null; } - } - - protected String constraintKey = "TBD"; - protected ArrayList validTypes = new ArrayList<>(); - protected ArrayList validPropTypes = new ArrayList<>(); - - protected String propertyName; - protected String propertyType; - protected Object constraintValue; - protected Object constraintValueMsg; - protected Object valueMsg; - - @SuppressWarnings("unchecked") - public Constraint(String propname,String proptype,Object constraint) { - - _setValues(); - + } + + private String constraintKey = "TBD"; + protected ArrayList validTypes = new ArrayList<>(); + protected ArrayList validPropTypes = new ArrayList<>(); + + protected String propertyName; + private String propertyType; + protected Object constraintValue; + protected Object constraintValueMsg; + protected Object valueMsg; + + @SuppressWarnings("unchecked") + public Constraint(String propname, String proptype, Object constraint) { + + setValues(); + propertyName = propname; propertyType = proptype; - constraintValue = ((LinkedHashMap)constraint).get(constraintKey); + constraintValue = ((LinkedHashMap) constraint).get(constraintKey); constraintValueMsg = constraintValue; boolean bFound = false; - for(String s: ScalarUnit.SCALAR_UNIT_TYPES) { - if(s.equals(propertyType)) { - bFound = true; - break; - } + for (String s : ScalarUnit.SCALAR_UNIT_TYPES) { + if (s.equals(propertyType)) { + bFound = true; + break; + } } - if(bFound) { + if (bFound) { constraintValue = _getScalarUnitConstraintValue(); } // check if constraint is valid for property type bFound = false; - for(String s: validPropTypes) { - if(s.equals(propertyType)) { - bFound = true; - break; - } + for (String s : validPropTypes) { + if (s.equals(propertyType)) { + bFound = true; + break; + } } - if(!bFound) { + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE103", String.format( - "InvalidSchemaError: Property \"%s\" is not valid for data type \"%s\"", - constraintKey,propertyType))); + "InvalidSchemaError: Property \"%s\" is not valid for data type \"%s\"", + constraintKey, propertyType))); } - } - - @SuppressWarnings("unchecked") - private Object _getScalarUnitConstraintValue() { - // code differs from Python because of class creation - if(constraintValue instanceof ArrayList) { - ArrayList ret = new ArrayList<>(); - for(Object v: (ArrayList)constraintValue) { - ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType,v); - ret.add(su.getNumFromScalarUnit(null)); - } - return ret; - } - else { - ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType,constraintValue); - return su.getNumFromScalarUnit(null); - } - } - - public void validate(Object value) { - if (Function.isFunction(value)){ - //skipping constraints check for functions - return; - } - - valueMsg = value; + } + + public ArrayList getValidTypes() { + return validTypes; + } + + public void addValidTypes(List validTypes) { + this.validTypes.addAll(validTypes); + } + + public ArrayList getValidPropTypes() { + return validPropTypes; + } + + public String getPropertyType() { + return propertyType; + } + + public Object getConstraintValue() { + return constraintValue; + } + + public Object getConstraintValueMsg() { + return constraintValueMsg; + } + + public Object getValueMsg() { + return valueMsg; + } + + public void setConstraintKey(String constraintKey) { + this.constraintKey = constraintKey; + } + + public void setValidTypes(ArrayList validTypes) { + this.validTypes = validTypes; + } + + public void setValidPropTypes(ArrayList validPropTypes) { + this.validPropTypes = validPropTypes; + } + + public void setPropertyType(String propertyType) { + this.propertyType = propertyType; + } + + public void setConstraintValue(Object constraintValue) { + this.constraintValue = constraintValue; + } + + public void setConstraintValueMsg(Object constraintValueMsg) { + this.constraintValueMsg = constraintValueMsg; + } + + public void setValueMsg(Object valueMsg) { + this.valueMsg = valueMsg; + } + + @SuppressWarnings("unchecked") + private Object _getScalarUnitConstraintValue() { + // code differs from Python because of class creation + if (constraintValue instanceof ArrayList) { + ArrayList ret = new ArrayList<>(); + for (Object v : (ArrayList) constraintValue) { + ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType, v); + ret.add(su.getNumFromScalarUnit(null)); + } + return ret; + } else { + ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType, constraintValue); + return su.getNumFromScalarUnit(null); + } + } + + public void validate(Object value) { + if (Function.isFunction(value)) { + //skipping constraints check for functions + return; + } + + valueMsg = value; boolean bFound = false; - for(String s: ScalarUnit.SCALAR_UNIT_TYPES) { - if(s.equals(propertyType)) { - bFound = true; - break; - } + for (String s : ScalarUnit.SCALAR_UNIT_TYPES) { + if (s.equals(propertyType)) { + bFound = true; + break; + } } - if(bFound) { - value = ScalarUnit.getScalarunitValue(propertyType,value,null); + if (bFound) { + value = ScalarUnit.getScalarunitValue(propertyType, value, null); } - if(!_isValid(value)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE008", "ValidationError: " + _errMsg(value))); + if (!isValid(value)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE008", "ValidationError: " + errMsg(value))); } - } + } + + protected abstract boolean isValid(Object value); + + protected abstract void setValues(); - protected abstract boolean _isValid(Object value); - - protected abstract void _setValues(); + protected abstract String errMsg(Object value); - protected abstract String _errMsg(Object value); - } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java index 16e379a..f480099 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,36 +20,32 @@ package org.onap.sdc.toscaparser.api.elements.constraints; +import java.util.Arrays; + public class Equal extends Constraint { - protected void _setValues() { - - constraintKey = EQUAL; - - for(String s: Schema.PROPERTY_TYPES) { - validPropTypes.add(s); - } - - } - - public Equal(String name,String type,Object c) { - super(name,type,c); - - } - - protected boolean _isValid(Object val) { - // equality of objects is tricky so we're comparing - // the toString() representation - if(val.toString().equals(constraintValue.toString())) { - return true; - } - return false; - } - - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" is not equal to \"%s\"", - valueMsg,propertyName,constraintValueMsg); - } + protected void setValues() { + + setConstraintKey(EQUAL); + validPropTypes.addAll(Arrays.asList(Schema.PROPERTY_TYPES)); + + } + + public Equal(String name, String type, Object c) { + super(name, type, c); + + } + + protected boolean isValid(Object val) { + // equality of objects is tricky so we're comparing + // the toString() representation + return val.toString().equals(constraintValue.toString()); + } + + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" is not equal to \"%s\"", + valueMsg, propertyName, constraintValueMsg); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java index 4d6b1cf..0cb8f36 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,73 +21,69 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.Date; - import org.onap.sdc.toscaparser.api.functions.Function; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Arrays; +import java.util.Date; + public class GreaterOrEqual extends Constraint { - // Constraint class for "greater_or_equal" - - // Constrains a property or parameter to a value greater than or equal - // to ('>=') the value declared. - - protected void _setValues() { - - constraintKey = GREATER_OR_EQUAL; - - validTypes.add("Integer"); - validTypes.add("Double"); - validTypes.add("Float"); - // timestamps are loaded as Date objects - validTypes.add("Date"); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public GreaterOrEqual(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE104", "InvalidSchemaError: The property \"greater_or_equal\" expects comparable values")); - } - } - - - - @Override - protected boolean _isValid(Object value) { - if(Function.isFunction(value)) { - return true; - } - - // timestamps - if(value instanceof Date) { - if(constraintValue instanceof Date) { - return !((Date)value).before((Date)constraintValue); - } - return false; - } - // all others - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 >= n2; - } - - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be greater or equal to \"%s\"", - valueMsg,propertyName,constraintValueMsg); - } + // Constraint class for "greater_or_equal" + + // Constrains a property or parameter to a value greater than or equal + // to ('>=') the value declared. + + protected void setValues() { + + setConstraintKey(GREATER_OR_EQUAL); + + // timestamps are loaded as Date objects + addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public GreaterOrEqual(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE104", "InvalidSchemaError: The property \"greater_or_equal\" expects comparable values")); + } + } + + + @Override + protected boolean isValid(Object value) { + if (Function.isFunction(value)) { + return true; + } + + // timestamps + if (value instanceof Date) { + if (constraintValue instanceof Date) { + return !((Date) value).before((Date) constraintValue); + } + return false; + } + // all others + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 >= n2; + } + + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be greater or equal to \"%s\"", + valueMsg, propertyName, constraintValueMsg); + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java index c716821..b501907 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,65 +21,62 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Arrays; import java.util.Date; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class GreaterThan extends Constraint { - @Override - protected void _setValues() { - - constraintKey = GREATER_THAN; - - validTypes.add("Integer"); - validTypes.add("Double"); - validTypes.add("Float"); - // timestamps are loaded as Date objects - validTypes.add("Date"); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public GreaterThan(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE105", "InvalidSchemaError: The property \"greater_than\" expects comparable values")); - } - } - - @Override - protected boolean _isValid(Object value) { - - // timestamps - if(value instanceof Date) { - if(constraintValue instanceof Date) { - return ((Date)value).after((Date)constraintValue); - } - return false; - } - - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 > n2; - } - - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be greater than \"%s\"", - valueMsg,propertyName,constraintValueMsg); - } + @Override + protected void setValues() { + + setConstraintKey(GREATER_THAN); + + // timestamps are loaded as Date objects + addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public GreaterThan(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE105", "InvalidSchemaError: The property \"greater_than\" expects comparable values")); + } + } + + @Override + protected boolean isValid(Object value) { + + // timestamps + if (value instanceof Date) { + if (constraintValue instanceof Date) { + return ((Date) value).after((Date) constraintValue); + } + return false; + } + + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 > n2; + } + + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be greater than \"%s\"", + valueMsg, propertyName, constraintValueMsg); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java index 32719fa..4edf021 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,6 +23,7 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Arrays; import java.util.Date; import java.util.ArrayList; @@ -34,95 +35,89 @@ public class InRange extends Constraint { //the two values declared. private static final String UNBOUNDED = "UNBOUNDED"; - - private Object min,max; - - protected void _setValues() { - - constraintKey = IN_RANGE; - - validTypes.add("Integer"); - validTypes.add("Double"); - validTypes.add("Float"); - validTypes.add("String"); - // timestamps are loaded as Date objects - validTypes.add("Date"); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - validPropTypes.add(Schema.RANGE); - - } - - @SuppressWarnings("unchecked") - public InRange(String name,String type,Object c) { - super(name,type,c); - - if(!(constraintValue instanceof ArrayList) || ((ArrayList)constraintValue).size() != 2) { + + private Object min, max; + + protected void setValues() { + + setConstraintKey(IN_RANGE); + + // timestamps are loaded as Date objects + addValidTypes(Arrays.asList("Integer", "Double", "Float", "String", "Date")); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + validPropTypes.add(Schema.RANGE); + + } + + @SuppressWarnings("unchecked") + public InRange(String name, String type, Object c) { + super(name, type, c); + + if (!(constraintValue instanceof ArrayList) || ((ArrayList) constraintValue).size() != 2) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE106", "InvalidSchemaError: The property \"in_range\" expects a list")); - - } - ArrayList alcv = (ArrayList)constraintValue; + } + + ArrayList alcv = (ArrayList) constraintValue; String msg = "The property \"in_range\" expects comparable values"; - for(Object vo: alcv) { - if(!validTypes.contains(vo.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE107", "InvalidSchemaError: " + msg)); - } + for (Object vo : alcv) { + if (!validTypes.contains(vo.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE107", "InvalidSchemaError: " + msg)); + } // The only string we allow for range is the special value 'UNBOUNDED' - if((vo instanceof String) && !((String)vo).equals(UNBOUNDED)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE108", "InvalidSchemaError: " + msg)); + if ((vo instanceof String) && !((String) vo).equals(UNBOUNDED)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE108", "InvalidSchemaError: " + msg)); } } min = alcv.get(0); max = alcv.get(1); - - } - - @Override - protected boolean _isValid(Object value) { - - // timestamps - if(value instanceof Date) { - if(min instanceof Date && max instanceof Date) { - return !((Date)value).before((Date)min) && - !((Date)value).after((Date)max); - } - return false; - } - - Double dvalue = new Double(value.toString()); - if(!(min instanceof String)) { - if(dvalue < new Double(min.toString())) { - return false; + + } + + @Override + protected boolean isValid(Object value) { + + // timestamps + if (value instanceof Date) { + if (min instanceof Date && max instanceof Date) { + return !((Date) value).before((Date) min) + && !((Date) value).after((Date) max); } - } - else if(!((String)min).equals(UNBOUNDED)) { return false; } - if(!(max instanceof String)) { - if(dvalue > new Double(max.toString())) { + + Double dvalue = new Double(value.toString()); + if (!(min instanceof String)) { + if (dvalue < new Double(min.toString())) { return false; } + } else if (!((String) min).equals(UNBOUNDED)) { + return false; } - else if(!((String)max).equals(UNBOUNDED)) { + if (!(max instanceof String)) { + if (dvalue > new Double(max.toString())) { + return false; + } + } else if (!((String) max).equals(UNBOUNDED)) { return false; } return true; - } + } - @Override - protected String _errMsg(Object value) { + @Override + protected String errMsg(Object value) { return String.format("The value \"%s\" of property \"%s\" is out of range \"(min:%s, max:%s)\"", - valueMsg,propertyName,min.toString(),max.toString()); - } + valueMsg, propertyName, min.toString(), max.toString()); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java index 1abdcfd..7988cb8 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,44 +23,45 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Collections; + public class Length extends Constraint { - // Constraint class for "length" - - // Constrains the property or parameter to a value of a given length. + // Constraint class for "length" - @Override - protected void _setValues() { + // Constrains the property or parameter to a value of a given length. - constraintKey = LENGTH; + @Override + protected void setValues() { - validTypes.add("Integer"); - - validPropTypes.add(Schema.STRING); - - } - - public Length(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE109", "InvalidSchemaError: The property \"length\" expects an integer")); - } - } - - @Override - protected boolean _isValid(Object value) { - if(value instanceof String && constraintValue instanceof Integer && - ((String)value).length() == (Integer)constraintValue) { - return true; - } - return false; - } + setConstraintKey(LENGTH); + addValidTypes(Collections.singletonList("Integer")); + + validPropTypes.add(Schema.STRING); + + } + + public Length(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE109", "InvalidSchemaError: The property \"length\" expects an integer")); + } + } + + @Override + protected boolean isValid(Object value) { + if (value instanceof String && constraintValue instanceof Integer && + ((String) value).length() == (Integer) constraintValue) { + return true; + } + return false; + } - @Override - protected String _errMsg(Object value) { - return String.format("Length of value \"%s\" of property \"%s\" must be equal to \"%s\"", - value.toString(),propertyName,constraintValue.toString()); - } + @Override + protected String errMsg(Object value) { + return String.format("Length of value \"%s\" of property \"%s\" must be equal to \"%s\"", + value.toString(), propertyName, constraintValue.toString()); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java index 9f1cd65..37a4afc 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,68 +21,65 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Arrays; import java.util.Date; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class LessOrEqual extends Constraint { - // Constraint class for "less_or_equal" - - // Constrains a property or parameter to a value less than or equal - // to ('<=') the value declared. - - protected void _setValues() { - - constraintKey = LESS_OR_EQUAL; - - validTypes.add("Integer"); - validTypes.add("Double"); - validTypes.add("Float"); - // timestamps are loaded as Date objects - validTypes.add("Date"); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public LessOrEqual(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE110", "InvalidSchemaError: The property \"less_or_equal\" expects comparable values")); - } - } - - @Override - protected boolean _isValid(Object value) { - - // timestamps - if(value instanceof Date) { - if(constraintValue instanceof Date) { - return !((Date)value).after((Date)constraintValue); - } - return false; - } - - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 <= n2; - } - - @Override - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be less or equal to \"%s\"", - valueMsg,propertyName,constraintValueMsg); - } + // Constraint class for "less_or_equal" + + // Constrains a property or parameter to a value less than or equal + // to ('<=') the value declared. + + protected void setValues() { + + setConstraintKey(LESS_OR_EQUAL); + + // timestamps are loaded as Date objects + addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public LessOrEqual(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE110", "InvalidSchemaError: The property \"less_or_equal\" expects comparable values")); + } + } + + @Override + protected boolean isValid(Object value) { + + // timestamps + if (value instanceof Date) { + if (constraintValue instanceof Date) { + return !((Date) value).after((Date) constraintValue); + } + return false; + } + + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 <= n2; + } + + @Override + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be less or equal to \"%s\"", + valueMsg, propertyName, constraintValueMsg); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java index b893fea..952861d 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -23,63 +23,60 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Arrays; import java.util.Date; public class LessThan extends Constraint { - @Override - protected void _setValues() { - - constraintKey = LESS_THAN; - - validTypes.add("Integer"); - validTypes.add("Double"); - validTypes.add("Float"); - // timestamps are loaded as Date objects - validTypes.add("Date"); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public LessThan(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE111", "InvalidSchemaError: The property \"less_than\" expects comparable values")); - } - } - - @Override - protected boolean _isValid(Object value) { - - // timestamps - if(value instanceof Date) { - if(constraintValue instanceof Date) { - return ((Date)value).before((Date)constraintValue); - } - return false; - } - - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 < n2; - } - - @Override - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be less than \"%s\"", - valueMsg,propertyName,constraintValueMsg); - } + @Override + protected void setValues() { + + setConstraintKey(LESS_THAN); + // timestamps are loaded as Date objects + addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public LessThan(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE111", "InvalidSchemaError: The property \"less_than\" expects comparable values")); + } + } + + @Override + protected boolean isValid(Object value) { + + // timestamps + if (value instanceof Date) { + if (constraintValue instanceof Date) { + return ((Date) value).before((Date) constraintValue); + } + return false; + } + + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 < n2; + } + + @Override + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be less than \"%s\"", + valueMsg, propertyName, constraintValueMsg); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java index 2cb20eb..9068b65 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,55 +21,54 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Collections; import java.util.LinkedHashMap; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class MaxLength extends Constraint { - // Constraint class for "min_length" - - // Constrains the property or parameter to a value of a maximum length. - - @Override - protected void _setValues() { - - constraintKey = MAX_LENGTH; - - validTypes.add("Integer"); - - validPropTypes.add(Schema.STRING); - validPropTypes.add(Schema.MAP); - - } - - public MaxLength(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE112", "InvalidSchemaError: The property \"max_length\" expects an integer")); - } - } - - @SuppressWarnings("unchecked") - @Override - protected boolean _isValid(Object value) { - if(value instanceof String && constraintValue instanceof Integer && - ((String)value).length() <= (Integer)constraintValue) { - return true; - } - else if(value instanceof LinkedHashMap && constraintValue instanceof Integer && - ((LinkedHashMap)value).size() <= (Integer)constraintValue) { - return true; - } - return false; - } - - @Override - protected String _errMsg(Object value) { - return String.format("Length of value \"%s\" of property \"%s\" must be no greater than \"%s\"", - value.toString(),propertyName,constraintValue.toString()); - } + // Constraint class for "min_length" + + // Constrains the property or parameter to a value of a maximum length. + + @Override + protected void setValues() { + + setConstraintKey(MAX_LENGTH); + + addValidTypes(Collections.singletonList("Integer")); + + + validPropTypes.add(Schema.STRING); + validPropTypes.add(Schema.MAP); + + } + + public MaxLength(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE112", "InvalidSchemaError: The property \"max_length\" expects an integer")); + } + } + + @SuppressWarnings("unchecked") + @Override + protected boolean isValid(Object value) { + if (value instanceof String && constraintValue instanceof Integer + && ((String) value).length() <= (Integer) constraintValue) { + return true; + } else { + return value instanceof LinkedHashMap && constraintValue instanceof Integer + && ((LinkedHashMap) value).size() <= (Integer) constraintValue; + } + } + + @Override + protected String errMsg(Object value) { + return String.format("Length of value \"%s\" of property \"%s\" must be no greater than \"%s\"", + value.toString(), propertyName, constraintValue.toString()); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java index e7d0a9d..eb1d870 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,55 +21,53 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Collections; import java.util.LinkedHashMap; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class MinLength extends Constraint { - // Constraint class for "min_length" - - // Constrains the property or parameter to a value of a minimum length. - - @Override - protected void _setValues() { - - constraintKey = MIN_LENGTH; - - validTypes.add("Integer"); - - validPropTypes.add(Schema.STRING); - validPropTypes.add(Schema.MAP); - - } - - public MinLength(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE113", "InvalidSchemaError: The property \"min_length\" expects an integer")); - } - } - - @SuppressWarnings("unchecked") - @Override - protected boolean _isValid(Object value) { - if(value instanceof String && constraintValue instanceof Integer && - ((String)value).length() >= (Integer)constraintValue) { - return true; - } - else if(value instanceof LinkedHashMap && constraintValue instanceof Integer && - ((LinkedHashMap)value).size() >= (Integer)constraintValue) { - return true; - } - return false; - } - - @Override - protected String _errMsg(Object value) { - return String.format("Length of value \"%s\" of property \"%s\" must be at least \"%s\"", - value.toString(),propertyName,constraintValue.toString()); - } + // Constraint class for "min_length" + + // Constrains the property or parameter to a value of a minimum length. + + @Override + protected void setValues() { + + setConstraintKey(MIN_LENGTH); + + addValidTypes(Collections.singletonList("Integer")); + + validPropTypes.add(Schema.STRING); + validPropTypes.add(Schema.MAP); + + } + + public MinLength(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE113", "InvalidSchemaError: The property \"min_length\" expects an integer")); + } + } + + @SuppressWarnings("unchecked") + @Override + protected boolean isValid(Object value) { + if (value instanceof String && constraintValue instanceof Integer + && ((String) value).length() >= (Integer) constraintValue) { + return true; + } else { + return value instanceof LinkedHashMap && constraintValue instanceof Integer + && ((LinkedHashMap) value).size() >= (Integer) constraintValue; + } + } + + @Override + protected String errMsg(Object value) { + return String.format("Length of value \"%s\" of property \"%s\" must be at least \"%s\"", + value.toString(), propertyName, constraintValue.toString()); + } } @@ -77,16 +75,16 @@ public class MinLength extends Constraint { class MinLength(Constraint): """Constraint class for "min_length" - + Constrains the property or parameter to a value to a minimum length. """ - + constraint_key = Constraint.MIN_LENGTH - + valid_types = (int, ) - + valid_prop_types = (Schema.STRING, Schema.MAP) - + def __init__(self, property_name, property_type, constraint): super(MinLength, self).__init__(property_name, property_type, constraint) @@ -94,14 +92,14 @@ class MinLength(Constraint): ValidationIsshueCollector.appendException( InvalidSchemaError(message=_('The property "min_length" ' 'expects an integer.'))) - + def _is_valid(self, value): if ((isinstance(value, str) or isinstance(value, dict)) and len(value) >= self.constraint_value): return True - + return False - + def _err_msg(self, value): return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' 'must be at least "%(cvalue)s".') % diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java index f1b374e..913e922 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,63 +21,62 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.Collections; import java.util.regex.Matcher; import java.util.regex.PatternSyntaxException; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class Pattern extends Constraint { - @Override - protected void _setValues() { - - constraintKey = PATTERN; - - validTypes.add("String"); - - validPropTypes.add(Schema.STRING); - - } - - - public Pattern(String name,String type,Object c) { - super(name,type,c); - - if(!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE114", "InvalidSchemaError: The property \"pattern\" expects a string")); - } - } - - @Override - protected boolean _isValid(Object value) { - try { - if(!(value instanceof String)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE115", String.format("ValueError: Input value \"%s\" to \"pattern\" property \"%s\" must be a string", - value.toString(),propertyName))); - return false; - } - String strp = constraintValue.toString(); - String strm = value.toString(); - java.util.regex.Pattern pattern = java.util.regex.Pattern.compile(strp); - Matcher matcher = pattern.matcher(strm); - if(matcher.find() && matcher.end() == strm.length()) { - return true; - } - return false; - } - catch(PatternSyntaxException pse) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE116", String.format("ValueError: Invalid regex \"%s\" in \"pattern\" property \"%s\"", - constraintValue.toString(),propertyName))); - return false; - } - } - - @Override - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" does not match the pattern \"%s\"", - value.toString(),propertyName,constraintValue.toString()); - } + @Override + protected void setValues() { + + setConstraintKey(PATTERN); + + addValidTypes(Collections.singletonList("String")); + + validPropTypes.add(Schema.STRING); + + } + + + public Pattern(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE114", "InvalidSchemaError: The property \"pattern\" expects a string")); + } + } + + @Override + protected boolean isValid(Object value) { + try { + if (!(value instanceof String)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE115", String.format("ValueError: Input value \"%s\" to \"pattern\" property \"%s\" must be a string", + value.toString(), propertyName))); + return false; + } + String strp = constraintValue.toString(); + String strm = value.toString(); + java.util.regex.Pattern pattern = java.util.regex.Pattern.compile(strp); + Matcher matcher = pattern.matcher(strm); + if (matcher.find() && matcher.end() == strm.length()) { + return true; + } + return false; + } catch (PatternSyntaxException pse) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE116", String.format("ValueError: Invalid regex \"%s\" in \"pattern\" property \"%s\"", + constraintValue.toString(), propertyName))); + return false; + } + } + + @Override + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" does not match the pattern \"%s\"", + value.toString(), propertyName, constraintValue.toString()); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java index 06a9cd0..15ec597 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,109 +20,110 @@ package org.onap.sdc.toscaparser.api.elements.constraints; +import com.google.common.collect.ImmutableMap; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.enums.FileSize; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; public class Schema { - private static final String TYPE = "type"; - private static final String REQUIRED = "required"; - private static final String DESCRIPTION = "description"; - private static final String DEFAULT = "default"; - private static final String CONSTRAINTS = "constraints"; - private static final String STATUS = "status"; - private static final String ENTRYSCHEMA = "entry_schema"; - private static final String KEYS[] = { - TYPE, REQUIRED, DESCRIPTION,DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS}; - - public static final String INTEGER = "integer"; - public static final String STRING = "string"; - public static final String BOOLEAN = "boolean"; - public static final String FLOAT = "float"; - public static final String RANGE = "range"; - public static final String NUMBER = "number"; - public static final String TIMESTAMP = "timestamp"; - public static final String LIST = "list"; - public static final String MAP = "map"; - public static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; - public static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; - public static final String SCALAR_UNIT_TIME = "scalar-unit.time"; - public static final String VERSION = "version"; - public static final String PORTDEF = "PortDef"; - public static final String PORTSPEC = "PortSpec"; //??? PortSpec.SHORTNAME - public static final String JSON = "json"; - - public static final String PROPERTY_TYPES[] = { - INTEGER, STRING, BOOLEAN, FLOAT, RANGE,NUMBER, TIMESTAMP, LIST, MAP, - SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, - VERSION, PORTDEF, PORTSPEC, JSON}; - - public static final String SIMPLE_PROPERTY_TYPES[] = { - INTEGER, STRING, BOOLEAN, FLOAT, RANGE,NUMBER, TIMESTAMP, - SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, - VERSION}; - - @SuppressWarnings("unused") - private static final String SCALAR_UNIT_SIZE_DEFAULT = "B"; - - private static Map SCALAR_UNIT_SIZE_DICT = new HashMap<>(); - static { - SCALAR_UNIT_SIZE_DICT.put("B", 1L); - SCALAR_UNIT_SIZE_DICT.put("KB", 1000L); - SCALAR_UNIT_SIZE_DICT.put("KIB", 1024L); - SCALAR_UNIT_SIZE_DICT.put("MB", 1000000L); - SCALAR_UNIT_SIZE_DICT.put("MIB", 1048576L); - SCALAR_UNIT_SIZE_DICT.put("GB", 1000000000L); - SCALAR_UNIT_SIZE_DICT.put("GIB", 1073741824L); - SCALAR_UNIT_SIZE_DICT.put("TB", 1000000000000L); - SCALAR_UNIT_SIZE_DICT.put("TIB", 1099511627776L); - } - - private String name; - private LinkedHashMap schema; - private int _len; - private ArrayList constraintsList; - - - public Schema(String _name,LinkedHashMap _schemaDict) { - name = _name; - - if(!(_schemaDict instanceof LinkedHashMap)) { + private static final String TYPE = "type"; + private static final String REQUIRED = "required"; + private static final String DESCRIPTION = "description"; + private static final String DEFAULT = "default"; + private static final String CONSTRAINTS = "constraints"; + private static final String STATUS = "status"; + private static final String ENTRYSCHEMA = "entry_schema"; + private static final String[] KEYS = { + TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS}; + + public static final String INTEGER = "integer"; + public static final String STRING = "string"; + public static final String BOOLEAN = "boolean"; + public static final String FLOAT = "float"; + public static final String RANGE = "range"; + public static final String NUMBER = "number"; + public static final String TIMESTAMP = "timestamp"; + public static final String LIST = "list"; + public static final String MAP = "map"; + public static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; + public static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; + public static final String SCALAR_UNIT_TIME = "scalar-unit.time"; + public static final String VERSION = "version"; + public static final String PORTDEF = "PortDef"; + public static final String PORTSPEC = "PortSpec"; //??? PortSpec.SHORTNAME + public static final String JSON = "json"; + + public static final String[] PROPERTY_TYPES = { + INTEGER, STRING, BOOLEAN, FLOAT, RANGE, NUMBER, TIMESTAMP, LIST, MAP, + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, + VERSION, PORTDEF, PORTSPEC, JSON}; + + public static final String[] SIMPLE_PROPERTY_TYPES = { + INTEGER, STRING, BOOLEAN, FLOAT, RANGE, NUMBER, TIMESTAMP, + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, + VERSION}; + + @SuppressWarnings("unused") + private static final String SCALAR_UNIT_SIZE_DEFAULT = "B"; + + private static Map scalarUnitSizeDict = ImmutableMap.builder() + .put("B", FileSize.B) + .put("KB", FileSize.KB) + .put("MB", FileSize.MB) + .put("GB", FileSize.GB) + .put("TB", FileSize.TB) + .put("KIB", FileSize.KIB) + .put("MIB", FileSize.MIB) + .put("GIB", FileSize.GIB) + .put("TIB", FileSize.TIB) + .build(); + + + private String name; + private LinkedHashMap schema; + private int len; + private ArrayList constraintsList; + + + public Schema(String name, LinkedHashMap schemaDict) { + this.name = name; + + if (!(schemaDict instanceof LinkedHashMap)) { //msg = (_('Schema definition of "%(pname)s" must be a dict.') // % dict(pname=name)) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE117", String.format( - "InvalidSchemaError: Schema definition of \"%s\" must be a dict",name))); + "InvalidSchemaError: Schema definition of \"%s\" must be a dict", this.name))); } - if(_schemaDict.get("type") == null) { + if (schemaDict.get("type") == null) { //msg = (_('Schema definition of "%(pname)s" must have a "type" ' // 'attribute.') % dict(pname=name)) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE118", String.format( - "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name))); + "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute", this.name))); } - - schema = _schemaDict; - _len = 0; //??? None + + schema = schemaDict; + len = 0; //??? None constraintsList = new ArrayList<>(); - } + } public String getType() { - return (String)schema.get(TYPE); + return (String) schema.get(TYPE); } public boolean isRequired() { - return (boolean)schema.getOrDefault(REQUIRED, true); + return (boolean) schema.getOrDefault(REQUIRED, true); } public String getDescription() { - return (String)schema.getOrDefault(DESCRIPTION,""); + return (String) schema.getOrDefault(DESCRIPTION, ""); } public Object getDefault() { @@ -130,53 +131,52 @@ public class Schema { } public String getStatus() { - return (String)schema.getOrDefault(STATUS,""); + return (String) schema.getOrDefault(STATUS, ""); } public static boolean isRequestedTypeSimple(String type) { - return Arrays.stream(SIMPLE_PROPERTY_TYPES).anyMatch(t->t.equals(type)); - } + return Arrays.asList(SIMPLE_PROPERTY_TYPES).contains(type); + } @SuppressWarnings("unchecked") - public ArrayList getConstraints() { - if(constraintsList.size() == 0) { - Object cob = schema.get(CONSTRAINTS); - if(cob instanceof ArrayList) { - ArrayList constraintSchemata = (ArrayList)cob; - for(Object ob: constraintSchemata) { - if(ob instanceof LinkedHashMap) { - for(String cClass: ((LinkedHashMap)ob).keySet()) { - Constraint c = Constraint.factory(cClass,name,getType(),ob); - if(c != null) { - constraintsList.add(c); - } - else { - // error - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE119", String.format( - "UnknownFieldError: Constraint type \"%s\" for property \"%s\" is not supported", - cClass,name))); - } - break; - } - } - } - } + public ArrayList getConstraints() { + if (constraintsList.size() == 0) { + Object cob = schema.get(CONSTRAINTS); + if (cob instanceof ArrayList) { + ArrayList constraintSchemata = (ArrayList) cob; + for (Object ob : constraintSchemata) { + if (ob instanceof LinkedHashMap) { + for (String cClass : ((LinkedHashMap) ob).keySet()) { + Constraint c = Constraint.factory(cClass, name, getType(), ob); + if (c != null) { + constraintsList.add(c); + } else { + // error + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE119", String.format( + "UnknownFieldError: Constraint type \"%s\" for property \"%s\" is not supported", + cClass, name))); + } + break; + } + } + } + } } return constraintsList; } @SuppressWarnings("unchecked") - public LinkedHashMap getEntrySchema() { - return (LinkedHashMap)schema.get(ENTRYSCHEMA); + public LinkedHashMap getEntrySchema() { + return (LinkedHashMap) schema.get(ENTRYSCHEMA); } - + // Python intrinsic methods... // substitute for __getitem__ (aka self[key]) public Object getItem(String key) { - return schema.get(key); + return schema.get(key); } - + /* def __iter__(self): for k in self.KEYS: @@ -187,23 +187,24 @@ public class Schema { else: yield k */ - + // substitute for __len__ (aka self.len()) public int getLen() { - int len = 0; - for(String k: KEYS) { - if(schema.get(k) != null) { - len++; - } - _len = len; - } - return _len; + int len = 0; + for (String k : KEYS) { + if (schema.get(k) != null) { + len++; + } + this.len = len; + } + return this.len; } + // getter - public LinkedHashMap getSchema() { - return schema; + public LinkedHashMap getSchema() { + return schema; } - + } /*python @@ -231,7 +232,7 @@ PROPERTY_TYPES = ( ) SCALAR_UNIT_SIZE_DEFAULT = 'B' -SCALAR_UNIT_SIZE_DICT = {'B': 1, 'KB': 1000, 'KIB': 1024, 'MB': 1000000, +scalarUnitSizeDict = {'B': 1, 'KB': 1000, 'KIB': 1024, 'MB': 1000000, 'MIB': 1048576, 'GB': 1000000000, 'GIB': 1073741824, 'TB': 1000000000000, 'TIB': 1099511627776} @@ -251,7 +252,7 @@ def __init__(self, name, schema_dict): ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) self.schema = schema_dict - self._len = None + self.len = None self.constraints_list = [] @property @@ -302,7 +303,7 @@ def __iter__(self): yield k def __len__(self): - if self._len is None: - self._len = len(list(iter(self))) - return self._len + if self.len is None: + self.len = len(list(iter(self))) + return self.len */ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java index d09caae..c3a192d 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,49 +21,44 @@ package org.onap.sdc.toscaparser.api.elements.constraints; import java.util.ArrayList; +import java.util.Collections; public class ValidValues extends Constraint { - protected void _setValues() { + protected void setValues() { + setConstraintKey(VALID_VALUES); + Collections.addAll(validPropTypes, Schema.PROPERTY_TYPES); + } + - constraintKey = VALID_VALUES; - - for(String s: Schema.PROPERTY_TYPES) { - validPropTypes.add(s); - } - - } - - - public ValidValues(String name,String type,Object c) { - super(name,type,c); - - } + public ValidValues(String name, String type, Object c) { + super(name, type, c); + } @SuppressWarnings("unchecked") - protected boolean _isValid(Object val) { - if(!(constraintValue instanceof ArrayList)) { - return false; - } - if(val instanceof ArrayList) { - boolean bAll = true; - for(Object v: (ArrayList)val) { - if(!((ArrayList)constraintValue).contains(v)) { - bAll = false; - break; - }; - } - return bAll; - } - return ((ArrayList)constraintValue).contains(val); + protected boolean isValid(Object val) { + if (!(constraintValue instanceof ArrayList)) { + return false; + } + if (val instanceof ArrayList) { + boolean bAll = true; + for (Object v : (ArrayList) val) { + if (!((ArrayList) constraintValue).contains(v)) { + bAll = false; + break; + } + } + return bAll; + } + return ((ArrayList) constraintValue).contains(val); } - protected String _errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" is not valid. Expected a value from \"%s\"", - value.toString(),propertyName,constraintValue.toString()); + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" is not valid. Expected a value from \"%s\"", + value.toString(), propertyName, constraintValue.toString()); } - + } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java new file mode 100644 index 0000000..b07f7fa --- /dev/null +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java @@ -0,0 +1,32 @@ +/* +============LICENSE_START======================================================= + SDC + ================================================================================ + Copyright (C) 2019 Nokia. All rights reserved. + ================================================================================ + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + ============LICENSE_END========================================================= +*/ +package org.onap.sdc.toscaparser.api.elements.enums; + +public class FileSize { + public static final long B = 1L; + public static final long KB = 1000L; + public static final long MB = 1000000L; + public static final long GB = 1000000000L; + public static final long TB = 1000000000000L; + public static final long KIB = 1000L; + public static final long MIB = 1048576L; + public static final long GIB = 1073741824L; + public static final long TIB = 1099511627776L; +} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java index 715123b..ac0d837 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,20 +21,20 @@ package org.onap.sdc.toscaparser.api.elements.enums; public enum ToscaElementNames { - - TYPE ("type"), - PROPERTIES ("properties"), - ANNOTATIONS ("annotations"), - SOURCE_TYPE ("source_type"); - - private String name; - - ToscaElementNames(String name){ - this.name = name; - } - public String getName() { - return name; - } - + TYPE("type"), + PROPERTIES("properties"), + ANNOTATIONS("annotations"), + SOURCE_TYPE("source_type"); + + private String name; + + ToscaElementNames(String name) { + this.name = name; + } + + public String getName() { + return name; + } + } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java b/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java index 8e0915e..5fbfca0 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,100 +39,92 @@ import java.util.regex.Pattern; public class ExtTools { - private static Logger log = LoggerFactory.getLogger(ExtTools.class.getName()); - - private static LinkedHashMap EXTENSION_INFO = new LinkedHashMap<>(); - - public ExtTools() { - - EXTENSION_INFO = _loadExtensions(); - } - - private LinkedHashMap _loadExtensions() { - - LinkedHashMap extensions = new LinkedHashMap<>(); - - Reflections reflections = new Reflections("extensions", new ResourcesScanner()); - Set resourcePaths = reflections.getResources(Pattern.compile(".*\\.py$")); - - for(String resourcePath : resourcePaths) { - try (InputStream is = ExtTools.class.getClassLoader().getResourceAsStream(resourcePath); - InputStreamReader isr = new InputStreamReader(is, Charset.forName("UTF-8")); - BufferedReader br = new BufferedReader(isr);){ - String version = null; - ArrayList sections = null; - String defsFile = null; - String line; - - Pattern pattern = Pattern.compile("^([^#]\\S+)\\s*=\\s*(\\S.*)$"); - while ((line = br.readLine()) != null) { - line = line.replace("'", "\""); - Matcher matcher = pattern.matcher(line.toString()); - if (matcher.find()) { - if (matcher.group(1).equals("VERSION")) { - version = matcher.group(2); - if (version.startsWith("'") || version.startsWith("\"")) { - version = version.substring(1, version.length() - 1); - } - } - else if (matcher.group(1).equals("DEFS_FILE")) { - String fn = matcher.group(2); - if (fn.startsWith("'") || fn.startsWith("\"")) { - fn = fn.substring(1, fn.length() - 1); - } - defsFile = resourcePath.replaceFirst("\\w*.py$", fn); - } - else if (matcher.group(1).equals("SECTIONS")) { - sections = new ArrayList<>(); - Pattern secpat = Pattern.compile("\"([^\"]+)\""); - Matcher secmat = secpat.matcher(matcher.group(2)); - while (secmat.find()) { - sections.add(secmat.group(1)); - } - } - } - } - - if (version != null && defsFile != null) { - LinkedHashMap ext = new LinkedHashMap<>(); - ext.put("defs_file", defsFile); - if (sections != null) { - ext.put("sections", sections); - } - extensions.put(version, ext); - } - else { - // error - } - } - catch (Exception e) { - log.error("ExtTools - _loadExtensions - {}", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue - ("JE281", "Failed to load extensions" + e.getMessage())); - // ... - } - } - return extensions; - } - - public ArrayList getVersions() { - return new ArrayList(EXTENSION_INFO.keySet()); - } - - public LinkedHashMap> getSections() { - LinkedHashMap> sections = new LinkedHashMap<>(); - for(String version: EXTENSION_INFO.keySet()) { - LinkedHashMap eiv = (LinkedHashMap)EXTENSION_INFO.get(version); - sections.put(version,(ArrayList)eiv.get("sections")); + private static Logger log = LoggerFactory.getLogger(ExtTools.class.getName()); + + private static LinkedHashMap extensionInfo = new LinkedHashMap<>(); + + public ExtTools() { + extensionInfo = loadExtensions(); + } + + private LinkedHashMap loadExtensions() { + + LinkedHashMap extensions = new LinkedHashMap<>(); + + Reflections reflections = new Reflections("extensions", new ResourcesScanner()); + Set resourcePaths = reflections.getResources(Pattern.compile(".*\\.py$")); + + for (String resourcePath : resourcePaths) { + try (InputStream is = ExtTools.class.getClassLoader().getResourceAsStream(resourcePath); + InputStreamReader isr = new InputStreamReader(is, Charset.forName("UTF-8")); + BufferedReader br = new BufferedReader(isr);) { + String version = null; + ArrayList sections = null; + String defsFile = null; + String line; + + Pattern pattern = Pattern.compile("^([^#]\\S+)\\s*=\\s*(\\S.*)$"); + while ((line = br.readLine()) != null) { + line = line.replace("'", "\""); + Matcher matcher = pattern.matcher(line); + if (matcher.find()) { + if (matcher.group(1).equals("VERSION")) { + version = matcher.group(2); + if (version.startsWith("'") || version.startsWith("\"")) { + version = version.substring(1, version.length() - 1); + } + } else if (matcher.group(1).equals("DEFS_FILE")) { + String fn = matcher.group(2); + if (fn.startsWith("'") || fn.startsWith("\"")) { + fn = fn.substring(1, fn.length() - 1); + } + defsFile = resourcePath.replaceFirst("\\w*.py$", fn); + } else if (matcher.group(1).equals("SECTIONS")) { + sections = new ArrayList<>(); + Pattern secpat = Pattern.compile("\"([^\"]+)\""); + Matcher secmat = secpat.matcher(matcher.group(2)); + while (secmat.find()) { + sections.add(secmat.group(1)); + } + } + } + } + + if (version != null && defsFile != null) { + LinkedHashMap ext = new LinkedHashMap<>(); + ext.put("defs_file", defsFile); + if (sections != null) { + ext.put("sections", sections); + } + extensions.put(version, ext); + } + } catch (Exception e) { + log.error("ExtTools - loadExtensions - {}", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue( + "JE281", "Failed to load extensions" + e.getMessage())); + } + } + return extensions; + } + + public ArrayList getVersions() { + return new ArrayList(extensionInfo.keySet()); + } + + public LinkedHashMap> getSections() { + LinkedHashMap> sections = new LinkedHashMap<>(); + for (String version : extensionInfo.keySet()) { + LinkedHashMap eiv = (LinkedHashMap) extensionInfo.get(version); + sections.put(version, (ArrayList) eiv.get("sections")); } return sections; - } + } + + public String getDefsFile(String version) { + LinkedHashMap eiv = (LinkedHashMap) extensionInfo.get(version); + return (String) eiv.get("defs_file"); + } - public String getDefsFile(String version) { - LinkedHashMap eiv = (LinkedHashMap)EXTENSION_INFO.get(version); - return (String)eiv.get("defs_file"); - } - } /*python @@ -147,7 +139,7 @@ REQUIRED_ATTRIBUTES = ['VERSION', 'DEFS_FILE'] class ExtTools(object): def __init__(self): - self.EXTENSION_INFO = self._load_extensions() + self.extensionInfo = self._load_extensions() def _load_extensions(self): '''Dynamically load all the extensions .''' @@ -193,17 +185,17 @@ class ExtTools(object): return extensions def get_versions(self): - return self.EXTENSION_INFO.keys() + return self.extensionInfo.keys() def get_sections(self): sections = {} - for version in self.EXTENSION_INFO.keys(): - sections[version] = self.EXTENSION_INFO[version]['sections'] + for version in self.extensionInfo.keys(): + sections[version] = self.extensionInfo[version]['sections'] return sections def get_defs_file(self, version): - versiondata = self.EXTENSION_INFO.get(version) + versiondata = self.extensionInfo.get(version) if versiondata: return versiondata.get('defs_file') diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java index d47fd57..4ebeba9 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -44,24 +44,24 @@ public class Concat extends Function { // ':' , // get_attribute: [ server, port ] ] - - public Concat(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl,context,name,args); - } - - @Override - public Object result() { - return this; - } - - @Override - void validate() { - if(args.size() < 1) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE145", - "ValueError: Invalid arguments for function \"concat\". " + - "Expected at least one argument")); - } - } + + public Concat(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); + } + + @Override + public Object result() { + return this; + } + + @Override + void validate() { + if (args.size() < 1) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE145", + "ValueError: Invalid arguments for function \"concat\". " + + "Expected at least one argument")); + } + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java index 2b4759f..711a7ca 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,95 +20,99 @@ package org.onap.sdc.toscaparser.api.functions; -import java.util.*; import org.onap.sdc.toscaparser.api.TopologyTemplate; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + public abstract class Function { - protected static final String GET_PROPERTY = "get_property"; - protected static final String GET_ATTRIBUTE = "get_attribute"; - protected static final String GET_INPUT = "get_input"; - protected static final String GET_OPERATION_OUTPUT = "get_operation_output"; - protected static final String CONCAT = "concat"; - protected static final String TOKEN = "token"; - - protected static final String SELF = "SELF"; - protected static final String HOST = "HOST"; - protected static final String TARGET = "TARGET"; - protected static final String SOURCE = "SOURCE"; - - protected static final String HOSTED_ON = "tosca.relationships.HostedOn"; - - protected static HashMap functionMappings = _getFunctionMappings(); - - private static HashMap _getFunctionMappings() { - HashMap map = new HashMap<>(); - map.put(GET_PROPERTY,"GetProperty"); - map.put(GET_INPUT, "GetInput"); - map.put(GET_ATTRIBUTE, "GetAttribute"); - map.put(GET_OPERATION_OUTPUT, "GetOperationOutput"); - map.put(CONCAT, "Concat"); - map.put(TOKEN, "Token"); - return map; - } - - protected TopologyTemplate toscaTpl; - protected Object context; - protected String name; - protected ArrayList args; - - - public Function(TopologyTemplate _toscaTpl,Object _context,String _name,ArrayList _args) { + protected static final String GET_PROPERTY = "get_property"; + protected static final String GET_ATTRIBUTE = "get_attribute"; + protected static final String GET_INPUT = "get_input"; + protected static final String GET_OPERATION_OUTPUT = "get_operation_output"; + protected static final String CONCAT = "concat"; + protected static final String TOKEN = "token"; + + protected static final String SELF = "SELF"; + protected static final String HOST = "HOST"; + protected static final String TARGET = "TARGET"; + protected static final String SOURCE = "SOURCE"; + + protected static final String HOSTED_ON = "tosca.relationships.HostedOn"; + + protected static HashMap functionMappings = _getFunctionMappings(); + + private static HashMap _getFunctionMappings() { + HashMap map = new HashMap<>(); + map.put(GET_PROPERTY, "GetProperty"); + map.put(GET_INPUT, "GetInput"); + map.put(GET_ATTRIBUTE, "GetAttribute"); + map.put(GET_OPERATION_OUTPUT, "GetOperationOutput"); + map.put(CONCAT, "Concat"); + map.put(TOKEN, "Token"); + return map; + } + + protected TopologyTemplate toscaTpl; + protected Object context; + protected String name; + protected ArrayList args; + + + public Function(TopologyTemplate _toscaTpl, Object _context, String _name, ArrayList _args) { toscaTpl = _toscaTpl; context = _context; name = _name; args = _args; validate(); - - } - - abstract Object result(); - - abstract void validate(); - - @SuppressWarnings("unchecked") - public static boolean isFunction(Object funcObj) { - // Returns True if the provided function is a Tosca intrinsic function. - // - //Examples: - // - //* "{ get_property: { SELF, port } }" - //* "{ get_input: db_name }" - //* Function instance - - //:param function: Function as string or a Function instance. - //:return: True if function is a Tosca intrinsic function, otherwise False. - // - - if(funcObj instanceof LinkedHashMap) { - LinkedHashMap function = (LinkedHashMap)funcObj; - if(function.size() == 1) { - String funcName = (new ArrayList(function.keySet())).get(0); - return functionMappings.keySet().contains(funcName); - } - } - return (funcObj instanceof Function); - } - - @SuppressWarnings("unchecked") - public static Object getFunction(TopologyTemplate ttpl,Object context,Object rawFunctionObj, boolean resolveGetInput) { - // Gets a Function instance representing the provided template function. - - // If the format provided raw_function format is not relevant for template - // functions or if the function name doesn't exist in function mapping the - // method returns the provided raw_function. - // - // :param tosca_tpl: The tosca template. - // :param node_template: The node template the function is specified for. - // :param raw_function: The raw function as dict. - // :return: Template function as Function instance or the raw_function if - // parsing was unsuccessful. + + } + + abstract Object result(); + + abstract void validate(); + + @SuppressWarnings("unchecked") + public static boolean isFunction(Object funcObj) { + // Returns True if the provided function is a Tosca intrinsic function. + // + //Examples: + // + //* "{ get_property: { SELF, port } }" + //* "{ get_input: db_name }" + //* Function instance + + //:param function: Function as string or a Function instance. + //:return: True if function is a Tosca intrinsic function, otherwise False. + // + + if (funcObj instanceof LinkedHashMap) { + LinkedHashMap function = (LinkedHashMap) funcObj; + if (function.size() == 1) { + String funcName = (new ArrayList(function.keySet())).get(0); + return functionMappings.keySet().contains(funcName); + } + } + return (funcObj instanceof Function); + } + + @SuppressWarnings("unchecked") + public static Object getFunction(TopologyTemplate ttpl, Object context, Object rawFunctionObj, boolean resolveGetInput) { + // Gets a Function instance representing the provided template function. + + // If the format provided raw_function format is not relevant for template + // functions or if the function name doesn't exist in function mapping the + // method returns the provided raw_function. + // + // :param tosca_tpl: The tosca template. + // :param node_template: The node template the function is specified for. + // :param raw_function: The raw function as dict. + // :return: Template function as Function instance or the raw_function if + // parsing was unsuccessful. // iterate over leaves of the properties's tree and convert function leaves to function object, @@ -116,85 +120,85 @@ public abstract class Function { // assuming that leaf value of function is always map type contains 1 item (e.g. my_leaf: {get_input: xxx}). if (rawFunctionObj instanceof LinkedHashMap) { // In map type case - LinkedHashMap rawFunction = ((LinkedHashMap) rawFunctionObj); - if(rawFunction.size() == 1 && - !(rawFunction.values().iterator().next() instanceof LinkedHashMap)) { // End point - return getFunctionForObjectItem(ttpl, context, rawFunction, resolveGetInput); - } else { - return getFunctionForMap(ttpl, context, rawFunction, resolveGetInput); - } - } else if (rawFunctionObj instanceof ArrayList) { // In list type case - return getFunctionForList(ttpl, context, (ArrayList) rawFunctionObj, resolveGetInput); - } - - return rawFunctionObj; - } - - private static Object getFunctionForList(TopologyTemplate ttpl, Object context, ArrayList rawFunctionObj, boolean resolveGetInput) { - // iterate over list properties in recursion, convert leaves to function, - // and collect them in the same hierarchy as the original list. - ArrayList rawFunctionObjList = new ArrayList<>(); - for (Object rawFunctionObjItem: rawFunctionObj) { + LinkedHashMap rawFunction = ((LinkedHashMap) rawFunctionObj); + if (rawFunction.size() == 1 && + !(rawFunction.values().iterator().next() instanceof LinkedHashMap)) { // End point + return getFunctionForObjectItem(ttpl, context, rawFunction, resolveGetInput); + } else { + return getFunctionForMap(ttpl, context, rawFunction, resolveGetInput); + } + } else if (rawFunctionObj instanceof ArrayList) { // In list type case + return getFunctionForList(ttpl, context, (ArrayList) rawFunctionObj, resolveGetInput); + } + + return rawFunctionObj; + } + + private static Object getFunctionForList(TopologyTemplate ttpl, Object context, ArrayList rawFunctionObj, boolean resolveGetInput) { + // iterate over list properties in recursion, convert leaves to function, + // and collect them in the same hierarchy as the original list. + ArrayList rawFunctionObjList = new ArrayList<>(); + for (Object rawFunctionObjItem : rawFunctionObj) { rawFunctionObjList.add(getFunction(ttpl, context, rawFunctionObjItem, resolveGetInput)); } - return rawFunctionObjList; - } - - private static Object getFunctionForMap(TopologyTemplate ttpl, Object context, LinkedHashMap rawFunction, boolean resolveGetInput) { - // iterate over map nested properties in recursion, convert leaves to function, - // and collect them in the same hierarchy as the original map. - LinkedHashMap rawFunctionObjMap = new LinkedHashMap(); - for (Object rawFunctionObjItem: rawFunction.entrySet()) { - Object itemValue = getFunction(ttpl, context, ((Map.Entry)rawFunctionObjItem).getValue(), resolveGetInput); - rawFunctionObjMap.put(((Map.Entry)rawFunctionObjItem).getKey(), itemValue); + return rawFunctionObjList; + } + + private static Object getFunctionForMap(TopologyTemplate ttpl, Object context, LinkedHashMap rawFunction, boolean resolveGetInput) { + // iterate over map nested properties in recursion, convert leaves to function, + // and collect them in the same hierarchy as the original map. + LinkedHashMap rawFunctionObjMap = new LinkedHashMap(); + for (Object rawFunctionObjItem : rawFunction.entrySet()) { + Object itemValue = getFunction(ttpl, context, ((Map.Entry) rawFunctionObjItem).getValue(), resolveGetInput); + rawFunctionObjMap.put(((Map.Entry) rawFunctionObjItem).getKey(), itemValue); + } + return rawFunctionObjMap; + } + + private static Object getFunctionForObjectItem(TopologyTemplate ttpl, Object context, Object rawFunctionObjItem, boolean resolveGetInput) { + if (isFunction(rawFunctionObjItem)) { + LinkedHashMap rawFunction = (LinkedHashMap) rawFunctionObjItem; + String funcName = (new ArrayList(rawFunction.keySet())).get(0); + if (functionMappings.keySet().contains(funcName)) { + String funcType = functionMappings.get(funcName); + Object oargs = (new ArrayList(rawFunction.values())).get(0); + ArrayList funcArgs; + if (oargs instanceof ArrayList) { + funcArgs = (ArrayList) oargs; + } else { + funcArgs = new ArrayList<>(); + funcArgs.add(oargs); + } + + switch (funcType) { + case "GetInput": + if (resolveGetInput) { + GetInput input = new GetInput(ttpl, context, funcName, funcArgs); + return input.result(); + } + return new GetInput(ttpl, context, funcName, funcArgs); + case "GetAttribute": + return new GetAttribute(ttpl, context, funcName, funcArgs); + case "GetProperty": + return new GetProperty(ttpl, context, funcName, funcArgs); + case "GetOperationOutput": + return new GetOperationOutput(ttpl, context, funcName, funcArgs); + case "Concat": + return new Concat(ttpl, context, funcName, funcArgs); + case "Token": + return new Token(ttpl, context, funcName, funcArgs); + } + } } - return rawFunctionObjMap; - } - - private static Object getFunctionForObjectItem(TopologyTemplate ttpl, Object context, Object rawFunctionObjItem, boolean resolveGetInput) { - if(isFunction(rawFunctionObjItem)) { - LinkedHashMap rawFunction = (LinkedHashMap) rawFunctionObjItem; - String funcName = (new ArrayList(rawFunction.keySet())).get(0); - if (functionMappings.keySet().contains(funcName)) { - String funcType = functionMappings.get(funcName); - Object oargs = (new ArrayList(rawFunction.values())).get(0); - ArrayList funcArgs; - if (oargs instanceof ArrayList) { - funcArgs = (ArrayList) oargs; - } else { - funcArgs = new ArrayList<>(); - funcArgs.add(oargs); - } - - switch (funcType) { - case "GetInput": - if (resolveGetInput) { - GetInput input = new GetInput(ttpl, context, funcName, funcArgs); - return input.result(); - } - return new GetInput(ttpl, context, funcName, funcArgs); - case "GetAttribute": - return new GetAttribute(ttpl, context, funcName, funcArgs); - case "GetProperty": - return new GetProperty(ttpl, context, funcName, funcArgs); - case "GetOperationOutput": - return new GetOperationOutput(ttpl, context, funcName, funcArgs); - case "Concat": - return new Concat(ttpl, context, funcName, funcArgs); - case "Token": - return new Token(ttpl, context, funcName, funcArgs); - } - } - } - - return rawFunctionObjItem; - } - - @Override - public String toString() { - String argsStr = args.size() > 1 ? args.toString() : args.get(0).toString(); - return name + ":" + argsStr; - } + + return rawFunctionObjItem; + } + + @Override + public String toString() { + String argsStr = args.size() > 1 ? args.toString() : args.get(0).toString(); + return name + ":" + argsStr; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java index aa85eb2..564d410 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,272 +39,272 @@ import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; import org.onap.sdc.toscaparser.api.elements.constraints.Schema; public class GetAttribute extends Function { - // Get an attribute value of an entity defined in the service template - - // Node template attributes values are set in runtime and therefore its the - // responsibility of the Tosca engine to implement the evaluation of - // get_attribute functions. - - // Arguments: - - // * Node template name | HOST. - // * Attribute name. - - // If the HOST keyword is passed as the node template name argument the - // function will search each node template along the HostedOn relationship - // chain until a node which contains the attribute is found. - - // Examples: - - // * { get_attribute: [ server, private_address ] } - // * { get_attribute: [ HOST, private_address ] } - // * { get_attribute: [ HOST, private_address, 0 ] } - // * { get_attribute: [ HOST, private_address, 0, some_prop] } - - public GetAttribute(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl, context, name, args); - } - - @Override - void validate() { - if (args.size() < 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", - "ValueError: Illegal arguments for function \"get_attribute\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); - return; - } else if (args.size() == 2) { - _findNodeTemplateContainingAttribute(); - } else { - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - if (nodeTpl == null) { - return; - } - int index = 2; - AttributeDef attr = nodeTpl.getTypeDefinition().getAttributeDefValue((String) args.get(1)); - if (attr != null) { - // found - } else { - index = 3; - // then check the req or caps - if (!(args.get(1) instanceof String) || !(args.get(2) instanceof String)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", "ValueError: Illegal arguments for function \"get_attribute\". Expected a String argument")); - } - - attr = _findReqOrCapAttribute(args.get(1).toString(), args.get(2).toString()); - if (attr == null) { - return; - } - } - - - String valueType = (String) attr.getSchema().get("type"); - if (args.size() > index) { - for (Object elem : args.subList(index, args.size())) { - if (valueType.equals("list")) { - if (!(elem instanceof Integer)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE147", String.format( - "ValueError: Illegal arguments for function \"get_attribute\" \"%s\". Expected positive integer argument", - elem.toString()))); - } - Object ob = attr.getSchema().get("entry_schema"); - valueType = (String) - ((LinkedHashMap) ob).get("type"); - } else if (valueType.equals("map")) { - Object ob = attr.getSchema().get("entry_schema"); - valueType = (String) - ((LinkedHashMap) ob).get("type"); - } else { - boolean bFound = false; - for (String p : Schema.PROPERTY_TYPES) { - if (p.equals(valueType)) { - bFound = true; - break; - } - } - if (bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE148", String.format( - "ValueError: 'Illegal arguments for function \"get_attribute\". Unexpected attribute/index value \"%s\"", - elem))); - return; - } else { // It is a complex type - DataType dataType = new DataType(valueType, null); - LinkedHashMap props = - dataType.getAllProperties(); - PropertyDef prop = props.get((String) elem); - if (prop != null) { - valueType = (String) prop.getSchema().get("type"); - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE149", String.format( - "KeyError: Illegal arguments for function \"get_attribute\". Attribute name \"%s\" not found in \"%\"", - elem, valueType))); - } - } - } - } - } - } - } - - @Override - public Object result() { - return this; - } - - private NodeTemplate getReferencedNodeTemplate() { - // Gets the NodeTemplate instance the get_attribute function refers to - - // If HOST keyword was used as the node template argument, the node - // template which contains the attribute along the HostedOn relationship - // chain will be returned. - - return _findNodeTemplateContainingAttribute(); - - } - - // Attributes can be explicitly created as part of the type definition - // or a property name can be implicitly used as an attribute name - private NodeTemplate _findNodeTemplateContainingAttribute() { - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - if (nodeTpl != null && - !_attributeExistsInType(nodeTpl.getTypeDefinition()) && - !nodeTpl.getProperties().keySet().contains(getAttributeName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE150", String.format( - "KeyError: Attribute \"%s\" was not found in node template \"%s\"", - getAttributeName(), nodeTpl.getName()))); - } - return nodeTpl; - } - - private boolean _attributeExistsInType(StatefulEntityType typeDefinition) { - LinkedHashMap attrsDef = typeDefinition.getAttributesDef(); - return attrsDef.get(getAttributeName()) != null; - } - - private NodeTemplate _findHostContainingAttribute(String nodeTemplateName) { - NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); - if (nodeTemplate != null) { - LinkedHashMap hostedOnRel = - (LinkedHashMap) EntityType.TOSCA_DEF.get(HOSTED_ON); - for (RequirementAssignment r : nodeTemplate.getRequirements().getAll()) { - String targetName = r.getNodeTemplateName(); - NodeTemplate targetNode = _findNodeTemplate(targetName); - NodeType targetType = (NodeType) targetNode.getTypeDefinition(); - for (CapabilityTypeDef capability : targetType.getCapabilitiesObjects()) { + // Get an attribute value of an entity defined in the service template + + // Node template attributes values are set in runtime and therefore its the + // responsibility of the Tosca engine to implement the evaluation of + // get_attribute functions. + + // Arguments: + + // * Node template name | HOST. + // * Attribute name. + + // If the HOST keyword is passed as the node template name argument the + // function will search each node template along the HostedOn relationship + // chain until a node which contains the attribute is found. + + // Examples: + + // * { get_attribute: [ server, private_address ] } + // * { get_attribute: [ HOST, private_address ] } + // * { get_attribute: [ HOST, private_address, 0 ] } + // * { get_attribute: [ HOST, private_address, 0, some_prop] } + + public GetAttribute(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); + } + + @Override + void validate() { + if (args.size() < 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", + "ValueError: Illegal arguments for function \"get_attribute\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); + return; + } else if (args.size() == 2) { + _findNodeTemplateContainingAttribute(); + } else { + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl == null) { + return; + } + int index = 2; + AttributeDef attr = nodeTpl.getTypeDefinition().getAttributeDefValue((String) args.get(1)); + if (attr != null) { + // found + } else { + index = 3; + // then check the req or caps + if (!(args.get(1) instanceof String) || !(args.get(2) instanceof String)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", "ValueError: Illegal arguments for function \"get_attribute\". Expected a String argument")); + } + + attr = _findReqOrCapAttribute(args.get(1).toString(), args.get(2).toString()); + if (attr == null) { + return; + } + } + + + String valueType = (String) attr.getSchema().get("type"); + if (args.size() > index) { + for (Object elem : args.subList(index, args.size())) { + if (valueType.equals("list")) { + if (!(elem instanceof Integer)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE147", String.format( + "ValueError: Illegal arguments for function \"get_attribute\" \"%s\". Expected positive integer argument", + elem.toString()))); + } + Object ob = attr.getSchema().get("entry_schema"); + valueType = (String) + ((LinkedHashMap) ob).get("type"); + } else if (valueType.equals("map")) { + Object ob = attr.getSchema().get("entry_schema"); + valueType = (String) + ((LinkedHashMap) ob).get("type"); + } else { + boolean bFound = false; + for (String p : Schema.PROPERTY_TYPES) { + if (p.equals(valueType)) { + bFound = true; + break; + } + } + if (bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE148", String.format( + "ValueError: 'Illegal arguments for function \"get_attribute\". Unexpected attribute/index value \"%s\"", + elem))); + return; + } else { // It is a complex type + DataType dataType = new DataType(valueType, null); + LinkedHashMap props = + dataType.getAllProperties(); + PropertyDef prop = props.get((String) elem); + if (prop != null) { + valueType = (String) prop.getSchema().get("type"); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE149", String.format( + "KeyError: Illegal arguments for function \"get_attribute\". Attribute name \"%s\" not found in \"%\"", + elem, valueType))); + } + } + } + } + } + } + } + + @Override + public Object result() { + return this; + } + + private NodeTemplate getReferencedNodeTemplate() { + // Gets the NodeTemplate instance the get_attribute function refers to + + // If HOST keyword was used as the node template argument, the node + // template which contains the attribute along the HostedOn relationship + // chain will be returned. + + return _findNodeTemplateContainingAttribute(); + + } + + // Attributes can be explicitly created as part of the type definition + // or a property name can be implicitly used as an attribute name + private NodeTemplate _findNodeTemplateContainingAttribute() { + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl != null && + !_attributeExistsInType(nodeTpl.getTypeDefinition()) && + !nodeTpl.getProperties().keySet().contains(getAttributeName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE150", String.format( + "KeyError: Attribute \"%s\" was not found in node template \"%s\"", + getAttributeName(), nodeTpl.getName()))); + } + return nodeTpl; + } + + private boolean _attributeExistsInType(StatefulEntityType typeDefinition) { + LinkedHashMap attrsDef = typeDefinition.getAttributesDef(); + return attrsDef.get(getAttributeName()) != null; + } + + private NodeTemplate _findHostContainingAttribute(String nodeTemplateName) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); + if (nodeTemplate != null) { + LinkedHashMap hostedOnRel = + (LinkedHashMap) EntityType.TOSCA_DEF.get(HOSTED_ON); + for (RequirementAssignment r : nodeTemplate.getRequirements().getAll()) { + String targetName = r.getNodeTemplateName(); + NodeTemplate targetNode = _findNodeTemplate(targetName); + NodeType targetType = (NodeType) targetNode.getTypeDefinition(); + for (CapabilityTypeDef capability : targetType.getCapabilitiesObjects()) { // if(((ArrayList)hostedOnRel.get("valid_target_types")).contains(capability.getType())) { - if (capability.inheritsFrom((ArrayList) hostedOnRel.get("valid_target_types"))) { - if (_attributeExistsInType(targetType)) { - return targetNode; - } - return _findHostContainingAttribute(targetName); - } - } - } - } - return null; - } - - - private NodeTemplate _findNodeTemplate(String nodeTemplateName) { - if (nodeTemplateName.equals(HOST)) { - // Currently this is the only way to tell whether the function - // is used within the outputs section of the TOSCA template. - if (context instanceof ArrayList) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE151", - "ValueError: \"get_attribute: [ HOST, ... ]\" is not allowed in \"outputs\" section of the TOSCA template")); - return null; - } - NodeTemplate nodeTpl = _findHostContainingAttribute(SELF); - if (nodeTpl == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE152", String.format( - "ValueError: \"get_attribute: [ HOST, ... ]\" was used in " + - "node template \"%s\" but \"%s\" was not found in " + - "the relationship chain", ((NodeTemplate) context).getName(), HOSTED_ON))); - return null; - } - return nodeTpl; - } - if (nodeTemplateName.equals(TARGET)) { - if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE153", - "KeyError: \"TARGET\" keyword can only be used in context " + - " to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate) context).getTarget(); - } - if (nodeTemplateName.equals(SOURCE)) { - if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE154", - "KeyError: \"SOURCE\" keyword can only be used in context " + - " to \"Relationships\" source node")); - return null; - } - return ((RelationshipTemplate) context).getTarget(); - } - String name; - if (nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { - name = ((NodeTemplate) context).getName(); - } else { - name = nodeTemplateName; - } - for (NodeTemplate nt : toscaTpl.getNodeTemplates()) { - if (nt.getName().equals(name)) { - return nt; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE155", String.format( - "KeyError: Node template \"%s\" was not found", nodeTemplateName))); - return null; - } - - public AttributeDef _findReqOrCapAttribute(String reqOrCap, String attrName) { - - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - // Find attribute in node template's requirements - for (RequirementAssignment r : nodeTpl.getRequirements().getAll()) { - String nodeName = r.getNodeTemplateName(); - if (r.getName().equals(reqOrCap)) { - NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); - return _getCapabilityAttribute(nodeTemplate, r.getName(), attrName); - } - } - // If requirement was not found, look in node template's capabilities - return _getCapabilityAttribute(nodeTpl, reqOrCap, attrName); - } - - private AttributeDef _getCapabilityAttribute(NodeTemplate nodeTemplate, - String capabilityName, - String attrName) { - // Gets a node template capability attribute - CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); - - if (cap != null) { - AttributeDef attribute = null; - LinkedHashMap attrs = - cap.getDefinition().getAttributesDef(); - if (attrs != null && attrs.keySet().contains(attrName)) { - attribute = attrs.get(attrName); - } - if (attribute == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE156", String.format( - "KeyError: Attribute \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - attrName, capabilityName, nodeTemplate.getName(), ((NodeTemplate) context).getName()))); - } - return attribute; - } - String msg = String.format( - "Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", - capabilityName, ((NodeTemplate) context).getName(), nodeTemplate.getName()); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE157", "KeyError: " + msg)); - return null; - } - - String getNodeTemplateName() { - return (String) args.get(0); - } - - String getAttributeName() { - return (String) args.get(1); - } + if (capability.inheritsFrom((ArrayList) hostedOnRel.get("valid_target_types"))) { + if (_attributeExistsInType(targetType)) { + return targetNode; + } + return _findHostContainingAttribute(targetName); + } + } + } + } + return null; + } + + + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { + if (nodeTemplateName.equals(HOST)) { + // Currently this is the only way to tell whether the function + // is used within the outputs section of the TOSCA template. + if (context instanceof ArrayList) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE151", + "ValueError: \"get_attribute: [ HOST, ... ]\" is not allowed in \"outputs\" section of the TOSCA template")); + return null; + } + NodeTemplate nodeTpl = _findHostContainingAttribute(SELF); + if (nodeTpl == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE152", String.format( + "ValueError: \"get_attribute: [ HOST, ... ]\" was used in " + + "node template \"%s\" but \"%s\" was not found in " + + "the relationship chain", ((NodeTemplate) context).getName(), HOSTED_ON))); + return null; + } + return nodeTpl; + } + if (nodeTemplateName.equals(TARGET)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE153", + "KeyError: \"TARGET\" keyword can only be used in context " + + " to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + if (nodeTemplateName.equals(SOURCE)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE154", + "KeyError: \"SOURCE\" keyword can only be used in context " + + " to \"Relationships\" source node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + String name; + if (nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { + name = ((NodeTemplate) context).getName(); + } else { + name = nodeTemplateName; + } + for (NodeTemplate nt : toscaTpl.getNodeTemplates()) { + if (nt.getName().equals(name)) { + return nt; + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE155", String.format( + "KeyError: Node template \"%s\" was not found", nodeTemplateName))); + return null; + } + + public AttributeDef _findReqOrCapAttribute(String reqOrCap, String attrName) { + + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + // Find attribute in node template's requirements + for (RequirementAssignment r : nodeTpl.getRequirements().getAll()) { + String nodeName = r.getNodeTemplateName(); + if (r.getName().equals(reqOrCap)) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); + return _getCapabilityAttribute(nodeTemplate, r.getName(), attrName); + } + } + // If requirement was not found, look in node template's capabilities + return _getCapabilityAttribute(nodeTpl, reqOrCap, attrName); + } + + private AttributeDef _getCapabilityAttribute(NodeTemplate nodeTemplate, + String capabilityName, + String attrName) { + // Gets a node template capability attribute + CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); + + if (cap != null) { + AttributeDef attribute = null; + LinkedHashMap attrs = + cap.getDefinition().getAttributesDef(); + if (attrs != null && attrs.keySet().contains(attrName)) { + attribute = attrs.get(attrName); + } + if (attribute == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE156", String.format( + "KeyError: Attribute \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + attrName, capabilityName, nodeTemplate.getName(), ((NodeTemplate) context).getName()))); + } + return attribute; + } + String msg = String.format( + "Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", + capabilityName, ((NodeTemplate) context).getName(), nodeTemplate.getName()); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE157", "KeyError: " + msg)); + return null; + } + + String getNodeTemplateName() { + return (String) args.get(0); + } + + String getAttributeName() { + return (String) args.get(1); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java index 026113e..ee5be17 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java @@ -5,9 +5,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -30,19 +30,19 @@ import java.util.LinkedHashMap; public class GetInput extends Function { - public static final String INDEX = "INDEX"; - public static final String INPUTS = "inputs"; - public static final String TYPE = "type"; - public static final String PROPERTIES = "properties"; - public static final String ENTRY_SCHEMA = "entry_schema"; + public static final String INDEX = "INDEX"; + public static final String INPUTS = "inputs"; + public static final String TYPE = "type"; + public static final String PROPERTIES = "properties"; + public static final String ENTRY_SCHEMA = "entry_schema"; - public GetInput(TopologyTemplate toscaTpl, Object context, String name, ArrayList _args) { - super(toscaTpl,context,name,_args); - - } + public GetInput(TopologyTemplate toscaTpl, Object context, String name, ArrayList _args) { + super(toscaTpl, context, name, _args); - @Override - void validate() { + } + + @Override + void validate() { // if(args.size() != 1) { // //PA - changed to WARNING from CRITICAL after talking to Renana, 22/05/2017 @@ -50,116 +50,114 @@ public class GetInput extends Function { // "ValueError: Expected one argument for function \"get_input\" but received \"%s\"", // args.toString())); // } - boolean bFound = false; - for(Input inp: toscaTpl.getInputs()) { - if(inp.getName().equals(args.get(0))) { - bFound = true; - break; - } - } - if(!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE158", String.format( - "UnknownInputError: Unknown input \"%s\"",args.get(0)))); - } - else if(args.size() > 2){ - LinkedHashMap inputs = (LinkedHashMap)toscaTpl.getTpl().get(INPUTS); - LinkedHashMap data = (LinkedHashMap)inputs.get(getInputName()); - String type ; - - for(int argumentNumber=1;argumentNumber schema = (LinkedHashMap)data.get(ENTRY_SCHEMA); - dataTypeName=(String)schema.get(TYPE); - }else{ - dataTypeName=type; - } - //check property name - LinkedHashMap dataType = (LinkedHashMap)toscaTpl.getCustomDefs().get(dataTypeName); - if(dataType != null) { - LinkedHashMap props = (LinkedHashMap) dataType.get(PROPERTIES); - data = (LinkedHashMap)props.get(args.get(argumentNumber).toString()); - if(data != null) { - bFound = true; - } - } - } - if(!bFound){ - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE282", String.format( - "UnknownDataType: Unknown data type \"%s\"",args.get(argumentNumber)))); - } - } - } - } - - public Object result() { - if(toscaTpl.getParsedParams() != null && - toscaTpl.getParsedParams().get(getInputName()) != null) { - LinkedHashMap ttinp = (LinkedHashMap)toscaTpl.getTpl().get(INPUTS); - LinkedHashMap ttinpinp = (LinkedHashMap)ttinp.get(getInputName()); - String type = (String)ttinpinp.get("type"); - - Object value = DataEntity.validateDatatype( - type, toscaTpl.getParsedParams().get(getInputName()),null,toscaTpl.getCustomDefs(),null); - //SDC resolving Get Input - if (value instanceof ArrayList){ - if(args.size() == 2 && args.get(1) instanceof Integer && ((ArrayList) value).size()> (Integer)args.get(1)){ - return ((ArrayList) value).get((Integer) args.get(1)); - } + boolean bFound = false; + for (Input inp : toscaTpl.getInputs()) { + if (inp.getName().equals(args.get(0))) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE158", String.format( + "UnknownInputError: Unknown input \"%s\"", args.get(0)))); + } else if (args.size() > 2) { + LinkedHashMap inputs = (LinkedHashMap) toscaTpl.getTpl().get(INPUTS); + LinkedHashMap data = (LinkedHashMap) inputs.get(getInputName()); + String type; + + for (int argumentNumber = 1; argumentNumber < args.size(); argumentNumber++) { + String dataTypeName = ""; + bFound = false; + if (INDEX.equals(args.get(argumentNumber).toString()) || (args.get(argumentNumber) instanceof Integer)) { + bFound = true; + } else { + type = (String) data.get(TYPE); + //get type name + if (type.equals("list") || type.equals("map")) { + LinkedHashMap schema = (LinkedHashMap) data.get(ENTRY_SCHEMA); + dataTypeName = (String) schema.get(TYPE); + } else { + dataTypeName = type; + } + //check property name + LinkedHashMap dataType = (LinkedHashMap) toscaTpl.getCustomDefs().get(dataTypeName); + if (dataType != null) { + LinkedHashMap props = (LinkedHashMap) dataType.get(PROPERTIES); + data = (LinkedHashMap) props.get(args.get(argumentNumber).toString()); + if (data != null) { + bFound = true; + } + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE282", String.format( + "UnknownDataType: Unknown data type \"%s\"", args.get(argumentNumber)))); + } + } + } + } + + public Object result() { + if (toscaTpl.getParsedParams() != null && + toscaTpl.getParsedParams().get(getInputName()) != null) { + LinkedHashMap ttinp = (LinkedHashMap) toscaTpl.getTpl().get(INPUTS); + LinkedHashMap ttinpinp = (LinkedHashMap) ttinp.get(getInputName()); + String type = (String) ttinpinp.get("type"); + + Object value = DataEntity.validateDatatype( + type, toscaTpl.getParsedParams().get(getInputName()), null, toscaTpl.getCustomDefs(), null); + //SDC resolving Get Input + if (value instanceof ArrayList) { + if (args.size() == 2 && args.get(1) instanceof Integer && ((ArrayList) value).size() > (Integer) args.get(1)) { + return ((ArrayList) value).get((Integer) args.get(1)); + } /* commented out for network cloud (SDNC) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE273",String.format( "GetInputError: cannot resolve input name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0)))); return null; */ - } - return value; - } - - Input inputDef = null; - for(Input inpDef: toscaTpl.getInputs()) { - if(getInputName().equals(inpDef.getName())) { - inputDef = inpDef; - break; - } - } - if(inputDef != null) { - if (args.size() == 2 && inputDef.getDefault() != null && inputDef.getDefault() instanceof ArrayList){ - if ( args.get(1) instanceof Integer - && ((ArrayList) inputDef.getDefault()).size()> ((Integer)args.get(1)).intValue()) { - return ((ArrayList) inputDef.getDefault()).get(((Integer)args.get(1)).intValue()); - } + } + return value; + } + + Input inputDef = null; + for (Input inpDef : toscaTpl.getInputs()) { + if (getInputName().equals(inpDef.getName())) { + inputDef = inpDef; + break; + } + } + if (inputDef != null) { + if (args.size() == 2 && inputDef.getDefault() != null && inputDef.getDefault() instanceof ArrayList) { + if (args.get(1) instanceof Integer + && ((ArrayList) inputDef.getDefault()).size() > ((Integer) args.get(1)).intValue()) { + return ((ArrayList) inputDef.getDefault()).get(((Integer) args.get(1)).intValue()); + } /* commented out for network cloud (SDNC) ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE274",(String.format( "GetInputError: cannot resolve input Def name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0))))); return null; */ - } - return inputDef.getDefault(); - } - return null; - } - - public String getInputName() { - return (String)args.get(0); - } - - public LinkedHashMap getEntrySchema() { - LinkedHashMap inputs = (LinkedHashMap)toscaTpl.getTpl().get(INPUTS); - LinkedHashMap inputValue = (LinkedHashMap)inputs.get(getInputName()); - return (LinkedHashMap)inputValue.get(ENTRY_SCHEMA); - } - - public ArrayList getArguments(){ - return args; - } + } + return inputDef.getDefault(); + } + return null; + } + + public String getInputName() { + return (String) args.get(0); + } + + public LinkedHashMap getEntrySchema() { + LinkedHashMap inputs = (LinkedHashMap) toscaTpl.getTpl().get(INPUTS); + LinkedHashMap inputValue = (LinkedHashMap) inputs.get(getInputName()); + return (LinkedHashMap) inputValue.get(ENTRY_SCHEMA); + } + + public ArrayList getArguments() { + return args; + } } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java index 2acc79a..06a28d6 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -35,132 +35,126 @@ import java.util.ArrayList; public class GetOperationOutput extends Function { - public GetOperationOutput(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl,context,name,args); - } - - @Override - public void validate() { - if(args.size() == 4) { - _findNodeTemplate((String)args.get(0)); - String interfaceName = _findInterfaceName((String)args.get(1)); - _findOperationName(interfaceName,(String)args.get(2)); - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE159", - "ValueError: Illegal arguments for function \"get_operation_output\". " + - "Expected arguments: \"template_name\",\"interface_name\"," + - "\"operation_name\",\"output_variable_name\"")); - } - } - - private String _findInterfaceName(String _interfaceName) { - boolean bFound = false; - for(String sect: InterfacesDef.SECTIONS) { - if(sect.equals(_interfaceName)) { - bFound = true; - break; - } - } - if(bFound) { - return _interfaceName; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE160", String.format( - "ValueError: invalid interface name \"%s\" in \"get_operation_output\"", - _interfaceName))); - return null; - } - } - - private String _findOperationName(String interfaceName,String operationName) { - - if(interfaceName.equals("Configure") || - interfaceName.equals("tosca.interfaces.node.relationship.Configure")) { - boolean bFound = false; - for(String sect: StatefulEntityType.interfacesRelationshipConfigureOperations) { - if(sect.equals(operationName)) { - bFound = true; - break; - } - } - if(bFound) { - return operationName; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE161", String.format( - "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", - operationName))); - return null; - } - } - if(interfaceName.equals("Standard") || - interfaceName.equals("tosca.interfaces.node.lifecycle.Standard")) { - boolean bFound = false; - for(String sect: StatefulEntityType.interfacesNodeLifecycleOperations) { - if(sect.equals(operationName)) { - bFound = true; - break; - } - } - if(bFound) { - return operationName; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE162", String.format( - "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", - operationName))); - return null; - } - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE163", String.format( - "ValueError: Invalid interface name \"%s\" in \"get_operation_output\"", - interfaceName))); - return null; - } - } - - private NodeTemplate _findNodeTemplate(String nodeTemplateName) { - if(nodeTemplateName.equals(TARGET)) { - if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE164", - "KeyError: \"TARGET\" keyword can only be used in context " + - " to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate)context).getTarget(); - } - if(nodeTemplateName.equals(SOURCE)) { - if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE165", - "KeyError: \"SOURCE\" keyword can only be used in context " + - " to \"Relationships\" source node")); - return null; - } - return ((RelationshipTemplate)context).getTarget(); - } - String name; - if(nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { - name = ((NodeTemplate)context).getName(); - } - else { - name = nodeTemplateName; - } - for(NodeTemplate nt: toscaTpl.getNodeTemplates()) { - if(nodeTemplateName.equals(name)) { - return nt; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE166", String.format( - "KeyError: Node template \"%s\" was not found",nodeTemplateName))); - return null; + public GetOperationOutput(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); } - @Override - public Object result() { - return this; - } + @Override + public void validate() { + if (args.size() == 4) { + _findNodeTemplate((String) args.get(0)); + String interfaceName = _findInterfaceName((String) args.get(1)); + _findOperationName(interfaceName, (String) args.get(2)); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE159", + "ValueError: Illegal arguments for function \"get_operation_output\". " + + "Expected arguments: \"template_name\",\"interface_name\"," + + "\"operation_name\",\"output_variable_name\"")); + } + } + + private String _findInterfaceName(String _interfaceName) { + boolean bFound = false; + for (String sect : InterfacesDef.SECTIONS) { + if (sect.equals(_interfaceName)) { + bFound = true; + break; + } + } + if (bFound) { + return _interfaceName; + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE160", String.format( + "ValueError: invalid interface name \"%s\" in \"get_operation_output\"", + _interfaceName))); + return null; + } + } + + private String _findOperationName(String interfaceName, String operationName) { + + if (interfaceName.equals("Configure") || + interfaceName.equals("tosca.interfaces.node.relationship.Configure")) { + boolean bFound = false; + for (String sect : StatefulEntityType.INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS) { + if (sect.equals(operationName)) { + bFound = true; + break; + } + } + if (bFound) { + return operationName; + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE161", String.format( + "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", + operationName))); + return null; + } + } + if (interfaceName.equals("Standard") || + interfaceName.equals("tosca.interfaces.node.lifecycle.Standard")) { + boolean bFound = false; + for (String sect : StatefulEntityType.INTERFACE_NODE_LIFECYCLE_OPERATIONS) { + if (sect.equals(operationName)) { + bFound = true; + break; + } + } + if (bFound) { + return operationName; + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE162", String.format( + "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", + operationName))); + return null; + } + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE163", String.format( + "ValueError: Invalid interface name \"%s\" in \"get_operation_output\"", + interfaceName))); + return null; + } + } + + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { + if (nodeTemplateName.equals(TARGET)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE164", + "KeyError: \"TARGET\" keyword can only be used in context " + + " to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + if (nodeTemplateName.equals(SOURCE)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE165", + "KeyError: \"SOURCE\" keyword can only be used in context " + + " to \"Relationships\" source node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + String name; + if (nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { + name = ((NodeTemplate) context).getName(); + } else { + name = nodeTemplateName; + } + for (NodeTemplate nt : toscaTpl.getNodeTemplates()) { + if (nodeTemplateName.equals(name)) { + return nt; + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE166", String.format( + "KeyError: Node template \"%s\" was not found", nodeTemplateName))); + return null; + } + + @Override + public Object result() { + return this; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java index 2da57ef..90e0a8e 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,14 +20,14 @@ package org.onap.sdc.toscaparser.api.functions; -import org.onap.sdc.toscaparser.api.*; -import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef; +import org.onap.sdc.toscaparser.api.CapabilityAssignment; +import org.onap.sdc.toscaparser.api.NodeTemplate; +import org.onap.sdc.toscaparser.api.Property; +import org.onap.sdc.toscaparser.api.RelationshipTemplate; +import org.onap.sdc.toscaparser.api.RequirementAssignment; +import org.onap.sdc.toscaparser.api.TopologyTemplate; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -import org.onap.sdc.toscaparser.api.*; +import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef; import org.onap.sdc.toscaparser.api.elements.EntityType; import org.onap.sdc.toscaparser.api.elements.NodeType; import org.onap.sdc.toscaparser.api.elements.PropertyDef; @@ -35,340 +35,331 @@ import org.onap.sdc.toscaparser.api.elements.RelationshipType; import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.ArrayList; +import java.util.LinkedHashMap; + public class GetProperty extends Function { - // Get a property value of an entity defined in the same service template - - // Arguments: - - // * Node template name | SELF | HOST | SOURCE | TARGET. - // * Requirement or capability name (optional). - // * Property name. - - // If requirement or capability name is specified, the behavior is as follows: - // The req or cap name is first looked up in the specified node template's - // requirements. - // If found, it would search for a matching capability - // of an other node template and get its property as specified in function - // arguments. - // Otherwise, the req or cap name would be looked up in the specified - // node template's capabilities and if found, it would return the property of - // the capability as specified in function arguments. - - // Examples: - - // * { get_property: [ mysql_server, port ] } - // * { get_property: [ SELF, db_port ] } - // * { get_property: [ SELF, database_endpoint, port ] } - // * { get_property: [ SELF, database_endpoint, port, 1 ] } - - - public GetProperty(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl,context,name,args); - } - - @Override - void validate() { - if(args.size() < 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE167", - "ValueError: Illegal arguments for function \"get_property\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); - return; - } - if(args.size() == 2) { - Property foundProp = _findProperty((String)args.get(1)); - if(foundProp == null) { - return; - } - Object prop = foundProp.getValue(); - if(prop instanceof Function) { - getFunction(toscaTpl,context, prop, toscaTpl.getResolveGetInput()); - } - } - else if(args.size() >= 3) { - // do not use _find_property to avoid raise KeyError - // if the prop is not found - // First check if there is property with this name - NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); - LinkedHashMap props; - if(nodeTpl != null) { - props = nodeTpl.getProperties(); - } - else { - props = new LinkedHashMap<>(); - } - int index = 2; - Object propertyValue; - if(props.get(args.get(1)) != null) { - propertyValue = ((Property)props.get(args.get(1))).getValue(); - } - else { - index = 3; - // then check the req or caps - propertyValue = _findReqOrCapProperty((String)args.get(1),(String)args.get(2)); - } - - if(args.size() > index) { - for(Object elem: args.subList(index,args.size()-1)) { - if(propertyValue instanceof ArrayList) { - int intElem = (int)elem; - propertyValue = _getIndexValue(propertyValue,intElem); - } - else { - propertyValue = _getAttributeValue(propertyValue,(String)elem); - } - } - } - } - } - - @SuppressWarnings("unchecked") - private Object _findReqOrCapProperty(String reqOrCap,String propertyName) { - NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); - if(nodeTpl == null) { - return null; + // Get a property value of an entity defined in the same service template + + // Arguments: + + // * Node template name | SELF | HOST | SOURCE | TARGET. + // * Requirement or capability name (optional). + // * Property name. + + // If requirement or capability name is specified, the behavior is as follows: + // The req or cap name is first looked up in the specified node template's + // requirements. + // If found, it would search for a matching capability + // of an other node template and get its property as specified in function + // arguments. + // Otherwise, the req or cap name would be looked up in the specified + // node template's capabilities and if found, it would return the property of + // the capability as specified in function arguments. + + // Examples: + + // * { get_property: [ mysql_server, port ] } + // * { get_property: [ SELF, db_port ] } + // * { get_property: [ SELF, database_endpoint, port ] } + // * { get_property: [ SELF, database_endpoint, port, 1 ] } + + + public GetProperty(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); + } + + @Override + void validate() { + if (args.size() < 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE167", + "ValueError: Illegal arguments for function \"get_property\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); + return; + } + if (args.size() == 2) { + Property foundProp = _findProperty((String) args.get(1)); + if (foundProp == null) { + return; + } + Object prop = foundProp.getValue(); + if (prop instanceof Function) { + getFunction(toscaTpl, context, prop, toscaTpl.getResolveGetInput()); + } + } else if (args.size() >= 3) { + // do not use _find_property to avoid raise KeyError + // if the prop is not found + // First check if there is property with this name + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + LinkedHashMap props; + if (nodeTpl != null) { + props = nodeTpl.getProperties(); + } else { + props = new LinkedHashMap<>(); + } + int index = 2; + Object propertyValue; + if (props.get(args.get(1)) != null) { + propertyValue = ((Property) props.get(args.get(1))).getValue(); + } else { + index = 3; + // then check the req or caps + propertyValue = _findReqOrCapProperty((String) args.get(1), (String) args.get(2)); + } + + if (args.size() > index) { + for (Object elem : args.subList(index, args.size() - 1)) { + if (propertyValue instanceof ArrayList) { + int intElem = (int) elem; + propertyValue = _getIndexValue(propertyValue, intElem); + } else { + propertyValue = _getAttributeValue(propertyValue, (String) elem); + } + } + } + } + } + + @SuppressWarnings("unchecked") + private Object _findReqOrCapProperty(String reqOrCap, String propertyName) { + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl == null) { + return null; + } + // look for property in node template's requirements + for (RequirementAssignment req : nodeTpl.getRequirements().getAll()) { + String nodeName = req.getNodeTemplateName(); + if (req.getName().equals(reqOrCap)) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); + return _getCapabilityProperty(nodeTemplate, req.getName(), propertyName, true); + } + } + // If requirement was not found, look in node template's capabilities + return _getCapabilityProperty(nodeTpl, reqOrCap, propertyName, true); + } + + private Object _getCapabilityProperty(NodeTemplate nodeTemplate, + String capabilityName, + String propertyName, + boolean throwErrors) { + + // Gets a node template capability property + Object property = null; + CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); + if (cap != null) { + LinkedHashMap props = cap.getProperties(); + if (props != null && props.get(propertyName) != null) { + property = ((Property) props.get(propertyName)).getValue(); + } + if (property == null && throwErrors) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE168", String.format( + "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + propertyName, capabilityName, nodeTemplate.getName(), ((NodeTemplate) context).getName()))); + } + return property; + } + if (throwErrors) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE169", String.format( + "KeyError: Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", + capabilityName, ((NodeTemplate) context).getName(), nodeTemplate.getName()))); + } + + return null; + } + + private Property _findProperty(String propertyName) { + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl == null) { + return null; + } + LinkedHashMap props = nodeTpl.getProperties(); + Property found = props.get(propertyName); + if (found == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE170", String.format( + "KeyError: Property \"%s\" was not found in node template \"%s\"", + propertyName, nodeTpl.getName()))); + } + return found; + } + + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { + if (nodeTemplateName.equals(SELF)) { + return (NodeTemplate) context; + } + // enable the HOST value in the function + if (nodeTemplateName.equals(HOST)) { + NodeTemplate node = _findHostContainingProperty(null); + if (node == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE171", String.format( + "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + (String) args.get(2), (String) args.get(1), ((NodeTemplate) context).getName()))); + return null; + } + return node; + } + if (nodeTemplateName.equals(TARGET)) { + if (!(((RelationshipTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE172", + "KeyError: \"TARGET\" keyword can only be used in context to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + if (nodeTemplateName.equals(SOURCE)) { + if (!(((RelationshipTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE173", + "KeyError: \"SOURCE\" keyword can only be used in context to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate) context).getSource(); + } + if (toscaTpl.getNodeTemplates() == null) { + return null; } - // look for property in node template's requirements - for(RequirementAssignment req: nodeTpl.getRequirements().getAll()) { - String nodeName = req.getNodeTemplateName(); - if(req.getName().equals(reqOrCap)) { - NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); - return _getCapabilityProperty(nodeTemplate,req.getName(),propertyName,true); - } - } - // If requirement was not found, look in node template's capabilities - return _getCapabilityProperty(nodeTpl,reqOrCap,propertyName,true); - } - - private Object _getCapabilityProperty(NodeTemplate nodeTemplate, - String capabilityName, - String propertyName, - boolean throwErrors) { - - // Gets a node template capability property - Object property = null; - CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); - if(cap != null) { - LinkedHashMap props = cap.getProperties(); - if(props != null && props.get(propertyName) != null) { - property = ((Property)props.get(propertyName)).getValue(); - } - if(property == null && throwErrors) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE168", String.format( - "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - propertyName,capabilityName,nodeTemplate.getName(),((NodeTemplate)context).getName()))); - } - return property; - } - if(throwErrors) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE169", String.format( - "KeyError: Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", - capabilityName,((NodeTemplate)context).getName(),nodeTemplate.getName()))); - } - - return null; - } - - private Property _findProperty(String propertyName) { - NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); - if(nodeTpl == null) { - return null; + for (NodeTemplate nodeTemplate : toscaTpl.getNodeTemplates()) { + if (nodeTemplate.getName().equals(nodeTemplateName)) { + return nodeTemplate; + } } - LinkedHashMap props = nodeTpl.getProperties(); - Property found = props.get(propertyName); - if(found == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE170", String.format( - "KeyError: Property \"%s\" was not found in node template \"%s\"", - propertyName,nodeTpl.getName()))); - } - return found; - } - - private NodeTemplate _findNodeTemplate(String nodeTemplateName) { - if(nodeTemplateName.equals(SELF)) { - return (NodeTemplate)context; - } - // enable the HOST value in the function - if(nodeTemplateName.equals(HOST)) { - NodeTemplate node = _findHostContainingProperty(null); - if(node == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE171", String.format( - "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - (String)args.get(2),(String)args.get(1),((NodeTemplate)context).getName()))); - return null; - } - return node; - } - if(nodeTemplateName.equals(TARGET)) { - if(!(((RelationshipTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE172", - "KeyError: \"TARGET\" keyword can only be used in context to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate)context).getTarget(); - } - if(nodeTemplateName.equals(SOURCE)) { - if(!(((RelationshipTemplate)context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE173", - "KeyError: \"SOURCE\" keyword can only be used in context to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate)context).getSource(); - } - if(toscaTpl.getNodeTemplates() == null) { - return null; - } - for(NodeTemplate nodeTemplate: toscaTpl.getNodeTemplates()) { - if(nodeTemplate.getName().equals(nodeTemplateName)) { - return nodeTemplate; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE174", String.format( - "KeyError: Node template \"%s\" was not found. Referenced from Node Template \"%s\"", - nodeTemplateName,((NodeTemplate)context).getName()))); - - return null; - } - - @SuppressWarnings("rawtypes") - private Object _getIndexValue(Object value,int index) { - if(value instanceof ArrayList) { - if(index < ((ArrayList)value).size()) { - return ((ArrayList)value).get(index); - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE175", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an element with index %d", - args.get(2),args.get(1),((NodeTemplate)context).getName(),index))); - - } - } - else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE174", String.format( + "KeyError: Node template \"%s\" was not found. Referenced from Node Template \"%s\"", + nodeTemplateName, ((NodeTemplate) context).getName()))); + + return null; + } + + @SuppressWarnings("rawtypes") + private Object _getIndexValue(Object value, int index) { + if (value instanceof ArrayList) { + if (index < ((ArrayList) value).size()) { + return ((ArrayList) value).get(index); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE175", String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an element with index %d", + args.get(2), args.get(1), ((NodeTemplate) context).getName(), index))); + + } + } else { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE176", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a list", - args.get(2),args.get(1),((NodeTemplate)context).getName()))); - } - return null; - } - - @SuppressWarnings("unchecked") - private Object _getAttributeValue(Object value,String attribute) { - if(value instanceof LinkedHashMap) { - Object ov = ((LinkedHashMap)value).get(attribute); - if(ov != null) { - return ov; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE177", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an attribute named \"%s\"", - args.get(2),args.get(1),((NodeTemplate)context).getName(),attribute))); - } - } - else { + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a list", + args.get(2), args.get(1), ((NodeTemplate) context).getName()))); + } + return null; + } + + @SuppressWarnings("unchecked") + private Object _getAttributeValue(Object value, String attribute) { + if (value instanceof LinkedHashMap) { + Object ov = ((LinkedHashMap) value).get(attribute); + if (ov != null) { + return ov; + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE177", String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an attribute named \"%s\"", + args.get(2), args.get(1), ((NodeTemplate) context).getName(), attribute))); + } + } else { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE178", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a dict", - args.get(2),args.get(1),((NodeTemplate)context).getName()))); - } - return null; - } - - // Add this functions similar to get_attribute case - private NodeTemplate _findHostContainingProperty(String nodeTemplateName) { - if(nodeTemplateName == null) { - nodeTemplateName = SELF; - } - NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); - LinkedHashMap hostedOnRel = (LinkedHashMap) - EntityType.TOSCA_DEF.get(HOSTED_ON); - for(RequirementAssignment requirement: nodeTemplate.getRequirements().getAll()) { - String targetName = requirement.getNodeTemplateName(); - NodeTemplate targetNode = _findNodeTemplate(targetName); - NodeType targetType = (NodeType)targetNode.getTypeDefinition(); - for(CapabilityTypeDef capDef: targetType.getCapabilitiesObjects()) { - if(capDef.inheritsFrom((ArrayList)hostedOnRel.get("valid_target_types"))) { - if(_propertyExistsInType(targetType)) { - return targetNode; - } - // If requirement was not found, look in node - // template's capabilities - if(args.size() > 2 && - _getCapabilityProperty(targetNode,(String)args.get(1),(String)args.get(2),false) != null) { - return targetNode; - } - - return _findHostContainingProperty(targetName); - } - } - - } - return null; - } - - private boolean _propertyExistsInType(StatefulEntityType typeDefinition) { - LinkedHashMap propsDef = typeDefinition.getPropertiesDef(); - return propsDef.keySet().contains((String)args.get(1)); - } - - @Override - public Object result() { + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a dict", + args.get(2), args.get(1), ((NodeTemplate) context).getName()))); + } + return null; + } + + // Add this functions similar to get_attribute case + private NodeTemplate _findHostContainingProperty(String nodeTemplateName) { + if (nodeTemplateName == null) { + nodeTemplateName = SELF; + } + NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); + LinkedHashMap hostedOnRel = (LinkedHashMap) + EntityType.TOSCA_DEF.get(HOSTED_ON); + for (RequirementAssignment requirement : nodeTemplate.getRequirements().getAll()) { + String targetName = requirement.getNodeTemplateName(); + NodeTemplate targetNode = _findNodeTemplate(targetName); + NodeType targetType = (NodeType) targetNode.getTypeDefinition(); + for (CapabilityTypeDef capDef : targetType.getCapabilitiesObjects()) { + if (capDef.inheritsFrom((ArrayList) hostedOnRel.get("valid_target_types"))) { + if (_propertyExistsInType(targetType)) { + return targetNode; + } + // If requirement was not found, look in node + // template's capabilities + if (args.size() > 2 && + _getCapabilityProperty(targetNode, (String) args.get(1), (String) args.get(2), false) != null) { + return targetNode; + } + + return _findHostContainingProperty(targetName); + } + } + + } + return null; + } + + private boolean _propertyExistsInType(StatefulEntityType typeDefinition) { + LinkedHashMap propsDef = typeDefinition.getPropertiesDef(); + return propsDef.keySet().contains((String) args.get(1)); + } + + @Override + public Object result() { Object propertyValue; - if(args.size() >= 3) { - // First check if there is property with this name - NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0)); - LinkedHashMap props; - if(nodeTpl != null) { - props = nodeTpl.getProperties(); - } - else { - props = new LinkedHashMap<>(); - } - int index = 2; - if(props.get(args.get(1)) != null) { - propertyValue = ((Property)props.get(args.get(1))).getValue(); - } - else { - index = 3; - // then check the req or caps - propertyValue = _findReqOrCapProperty((String)args.get(1),(String)args.get(2)); - } - - if(args.size() > index) { - for(Object elem: args.subList(index,args.size()-1)) { - if(propertyValue instanceof ArrayList) { - int intElem = (int)elem; - propertyValue = _getIndexValue(propertyValue,intElem); - } - else { - propertyValue = _getAttributeValue(propertyValue,(String)elem); - } - } - } - } - else { - propertyValue = _findProperty((String)args.get(1)).getValue(); - } - if(propertyValue instanceof Function) { - return ((Function)propertyValue).result(); + if (args.size() >= 3) { + // First check if there is property with this name + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + LinkedHashMap props; + if (nodeTpl != null) { + props = nodeTpl.getProperties(); + } else { + props = new LinkedHashMap<>(); + } + int index = 2; + if (props.get(args.get(1)) != null) { + propertyValue = ((Property) props.get(args.get(1))).getValue(); + } else { + index = 3; + // then check the req or caps + propertyValue = _findReqOrCapProperty((String) args.get(1), (String) args.get(2)); + } + + if (args.size() > index) { + for (Object elem : args.subList(index, args.size() - 1)) { + if (propertyValue instanceof ArrayList) { + int intElem = (int) elem; + propertyValue = _getIndexValue(propertyValue, intElem); + } else { + propertyValue = _getAttributeValue(propertyValue, (String) elem); + } + } + } + } else { + propertyValue = _findProperty((String) args.get(1)).getValue(); + } + if (propertyValue instanceof Function) { + return ((Function) propertyValue).result(); + } + return getFunction(toscaTpl, context, propertyValue, toscaTpl.getResolveGetInput()); + } + + public String getNodeTemplateName() { + return (String) args.get(0); + } + + public String getPropertyName() { + if (args.size() > 2) { + return (String) args.get(2); } - return getFunction(toscaTpl,context,propertyValue, toscaTpl.getResolveGetInput()); - } - - public String getNodeTemplateName() { - return (String)args.get(0); - } - - public String getPropertyName() { - if(args.size() > 2) { - return (String)args.get(2); - } - return (String)args.get(1); - } - - public String getReqorCap() { - if(args.size() > 2) { - return (String)args.get(1); - } - return null; - } - + return (String) args.get(1); + } + + public String getReqorCap() { + if (args.size() > 2) { + return (String) args.get(1); + } + return null; + } + } /*python diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java index e8e160e..240ce85 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -46,39 +46,38 @@ public class Token extends Function { //Example: - // [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ] + // [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ] - public Token(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl,context,name,args); - } - - @Override - public Object result() { - return this; - } + public Token(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); + } - @Override - void validate() { - if(args.size() < 3) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE180", - "ValueError: Invalid arguments for function \"token\". " + - "Expected at least three arguments")); - } - else { - if(!(args.get(1) instanceof String) || - ((String)args.get(1)).length() != 1) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE181", + @Override + public Object result() { + return this; + } + + @Override + void validate() { + if (args.size() < 3) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE180", "ValueError: Invalid arguments for function \"token\". " + - "Expected single char value as second argument")); + "Expected at least three arguments")); + } else { + if (!(args.get(1) instanceof String) || + ((String) args.get(1)).length() != 1) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE181", + "ValueError: Invalid arguments for function \"token\". " + + "Expected single char value as second argument")); } - if(!(args.get(2) instanceof Integer)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE182", - "ValueError: Invalid arguments for function \"token\"" + - "Expected integer value as third argument")); - } - } - } + if (!(args.get(2) instanceof Integer)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE182", + "ValueError: Invalid arguments for function \"token\"" + + "Expected integer value as third argument")); + } + } + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java index 397c637..a34ebb5 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,77 +20,79 @@ package org.onap.sdc.toscaparser.api.parameters; +import org.onap.sdc.toscaparser.api.Property; +import org.onap.sdc.toscaparser.api.elements.enums.ToscaElementNames; + import java.util.ArrayList; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; -import org.onap.sdc.toscaparser.api.Property; -import org.onap.sdc.toscaparser.api.elements.enums.ToscaElementNames; +public class Annotation { + + private static final String HEAT = "HEAT"; + private String name; + private String type; + private ArrayList properties; + + + public Annotation() { + } + + @SuppressWarnings("unchecked") + public Annotation(Map.Entry annotationEntry) { + if (annotationEntry != null) { + name = annotationEntry.getKey(); + Map annValue = (Map) annotationEntry.getValue(); + type = (String) annValue.get(ToscaElementNames.TYPE.getName()); + properties = fetchProperties((Map) annValue.get(ToscaElementNames.PROPERTIES.getName())); + } + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public ArrayList getProperties() { + return properties; + } + + public void setProperties(ArrayList properties) { + this.properties = properties; + } + + private ArrayList fetchProperties(Map properties) { + if (properties != null) { + return (ArrayList) properties.entrySet().stream() + .map(Property::new) + .collect(Collectors.toList()); + } + return null; + } + + public boolean isHeatSourceType() { + if (properties == null) { + return false; + } + Optional sourceType = properties.stream() + .filter(p -> p.getName().equals(ToscaElementNames.SOURCE_TYPE.getName())) + .findFirst(); + if (!sourceType.isPresent()) { + return false; + } + return sourceType.get().getValue() != null && ((String) sourceType.get().getValue()).equals(HEAT); + } -public class Annotation{ - - private final static String HEAT = "HEAT"; - - private String name; - private String type; - private ArrayList properties; - - public Annotation(){} - @SuppressWarnings("unchecked") - public Annotation(Map.Entry annotationEntry){ - if(annotationEntry != null){ - name = annotationEntry.getKey(); - Map annValue = (Map) annotationEntry.getValue(); - type = (String) annValue.get(ToscaElementNames.TYPE.getName()); - properties = fetchProperties((Map) annValue.get(ToscaElementNames.PROPERTIES.getName())); - } - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public ArrayList getProperties() { - return properties; - } - - public void setProperties(ArrayList properties) { - this.properties = properties; - } - - private ArrayList fetchProperties(Map properties) { - if(properties != null){ - return (ArrayList) properties.entrySet().stream() - .map(Property::new) - .collect(Collectors.toList()); - } - return null; - } - - public boolean isHeatSourceType(){ - if(properties == null){ - return false; - } - Optional sourceType = properties.stream() - .filter(p -> p.getName().equals(ToscaElementNames.SOURCE_TYPE.getName())) - .findFirst(); - if(!sourceType.isPresent()){ - return false; - } - return sourceType.get().getValue() != null && ((String)sourceType.get().getValue()).equals(HEAT); - } - } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java index 106fe94..5d3ecb4 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -20,12 +20,6 @@ package org.onap.sdc.toscaparser.api.parameters; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.stream.Collectors; - import org.onap.sdc.toscaparser.api.DataEntity; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.elements.EntityType; @@ -34,171 +28,172 @@ import org.onap.sdc.toscaparser.api.elements.constraints.Schema; import org.onap.sdc.toscaparser.api.elements.enums.ToscaElementNames; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.stream.Collectors; + public class Input { - - private static final String TYPE = "type"; - private static final String DESCRIPTION = "description"; - private static final String DEFAULT = "default"; - private static final String CONSTRAINTS = "constraints"; - private static final String REQUIRED = "required"; - private static final String STATUS = "status"; - private static final String ENTRY_SCHEMA = "entry_schema"; - - public static final String INTEGER = "integer"; - public static final String STRING = "string"; - public static final String BOOLEAN = "boolean"; - public static final String FLOAT = "float"; - public static final String LIST = "list"; - public static final String MAP = "map"; - public static final String JSON = "json"; - - private static String INPUTFIELD[] = { - TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED,STATUS, ENTRY_SCHEMA + + private static final String TYPE = "type"; + private static final String DESCRIPTION = "description"; + private static final String DEFAULT = "default"; + private static final String CONSTRAINTS = "constraints"; + private static final String REQUIRED = "required"; + private static final String STATUS = "status"; + private static final String ENTRY_SCHEMA = "entry_schema"; + + public static final String INTEGER = "integer"; + public static final String STRING = "string"; + public static final String BOOLEAN = "boolean"; + public static final String FLOAT = "float"; + public static final String LIST = "list"; + public static final String MAP = "map"; + public static final String JSON = "json"; + + private static String[] inputField = { + TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED, STATUS, ENTRY_SCHEMA }; - - private static String PRIMITIVE_TYPES[] = { - INTEGER, STRING, BOOLEAN, FLOAT, LIST, MAP, JSON + + private static String[] primitiveTypes = { + INTEGER, STRING, BOOLEAN, FLOAT, LIST, MAP, JSON }; - + private String name; private Schema schema; - private LinkedHashMap customDefs; - private Map annotations; - - public Input(){ - /** - * Added to support Input serialization - */ - } - - public Input(String _name,LinkedHashMap _schemaDict,LinkedHashMap _customDefs) { - name = _name; - schema = new Schema(_name,_schemaDict); - customDefs = _customDefs; - } - - @SuppressWarnings("unchecked") - public void parseAnnotations() { - if(schema.getSchema() != null){ - LinkedHashMap annotations = (LinkedHashMap) schema.getSchema().get(ToscaElementNames.ANNOTATIONS.getName()); - if(annotations != null){ - setAnnotations(annotations.entrySet().stream() - .map(Annotation::new) - .filter(Annotation::isHeatSourceType) - .collect(Collectors.toMap(a -> a.getName(), a -> a))); - } - } - } - - public String getName() { - return name; - } - - public String getType() { - return schema.getType(); - } - - public String getDescription() { - return schema.getDescription(); - } - - public boolean isRequired() { - return schema.isRequired(); - } - - public Object getDefault() { - return schema.getDefault(); - } - - public ArrayList getConstraints() { - return schema.getConstraints(); - } + private LinkedHashMap customDefs; + private Map annotations; + + public Input() { + } + + public Input(String name, LinkedHashMap schema, LinkedHashMap customDefinitions) { + this.name = name; + this.schema = new Schema(name, schema); + customDefs = customDefinitions; + } + + @SuppressWarnings("unchecked") + public void parseAnnotations() { + if (schema.getSchema() != null) { + LinkedHashMap annotations = (LinkedHashMap) schema.getSchema().get(ToscaElementNames.ANNOTATIONS.getName()); + if (annotations != null) { + setAnnotations(annotations.entrySet().stream() + .map(Annotation::new) + .filter(Annotation::isHeatSourceType) + .collect(Collectors.toMap(Annotation::getName, a -> a))); + } + } + } + + public String getName() { + return name; + } + + public String getType() { + return schema.getType(); + } + + public String getDescription() { + return schema.getDescription(); + } + + public boolean isRequired() { + return schema.isRequired(); + } + + public Object getDefault() { + return schema.getDefault(); + } + + public ArrayList getConstraints() { + return schema.getConstraints(); + } public void validate(Object value) { - _validateField(); - _validateType(getType()); - if(value != null) { - _validateValue(value); + validateField(); + validateType(getType()); + if (value != null) { + validateValue(value); } } - private void _validateField() { - for(String key: schema.getSchema().keySet()) { - boolean bFound = false; - for(String ifld: INPUTFIELD) { - if(key.equals(ifld)) { - bFound = true; - break; - } - } - if(!bFound) { + private void validateField() { + for (String key : schema.getSchema().keySet()) { + boolean bFound = false; + for (String ifld : inputField) { + if (key.equals(ifld)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE214", String.format( - "UnknownFieldError: Input \"%s\" contains unknown field \"%s\"", - name,key))); - } - } - } - - private void _validateType(String inputType) { - boolean bFound = false; - for(String pt: Schema.PROPERTY_TYPES) { - if(pt.equals(inputType)) { - bFound = true; - break; - } - } - - if(!bFound) { - if(customDefs.get(inputType) != null) { - bFound = true; - } - } - - if(!bFound) { + "UnknownFieldError: Input \"%s\" contains unknown field \"%s\"", + name, key))); + } + } + } + + private void validateType(String inputType) { + boolean bFound = false; + for (String pt : Schema.PROPERTY_TYPES) { + if (pt.equals(inputType)) { + bFound = true; + break; + } + } + + if (!bFound) { + if (customDefs.get(inputType) != null) { + bFound = true; + } + } + + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE215", String.format( - "ValueError: Invalid type \"%s\"",inputType))); - } + "ValueError: Invalid type \"%s\"", inputType))); + } } - + @SuppressWarnings("unchecked") - private void _validateValue(Object value) { - Object datatype = null; - if(EntityType.TOSCA_DEF.get(getType()) != null) { - datatype = EntityType.TOSCA_DEF.get(getType()); - } - else if(EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()) != null) { - datatype = EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()); - } - - String type = getType(); - // if it's one of the basic types DON'T look in customDefs - if(Arrays.asList(PRIMITIVE_TYPES).contains(type)) { - DataEntity.validateDatatype(getType(), value, null, customDefs, null); - return; - } - else if(customDefs.get(getType()) != null) { - datatype = customDefs.get(getType()); - DataEntity.validateDatatype(getType(), value, (LinkedHashMap)datatype, customDefs, null); - return; - } - - DataEntity.validateDatatype(getType(), value, null, customDefs, null); - } - - public Map getAnnotations() { - return annotations; - } - - private void setAnnotations(Map annotations) { - this.annotations = annotations; - } - - public void resetAnnotaions(){ - annotations = null; - } - - public LinkedHashMap getEntrySchema() { - return schema.getEntrySchema(); - } + private void validateValue(Object value) { + Object datatype; + if (EntityType.TOSCA_DEF.get(getType()) != null) { + datatype = EntityType.TOSCA_DEF.get(getType()); + } else if (EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()) != null) { + datatype = EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()); + } + + String type = getType(); + // if it's one of the basic types DON'T look in customDefs + if (Arrays.asList(primitiveTypes).contains(type)) { + DataEntity.validateDatatype(getType(), value, null, customDefs, null); + return; + } else if (customDefs.get(getType()) != null) { + datatype = customDefs.get(getType()); + DataEntity.validateDatatype(getType(), value, (LinkedHashMap) datatype, customDefs, null); + return; + } + + DataEntity.validateDatatype(getType(), value, null, customDefs, null); + } + + public Map getAnnotations() { + return annotations; + } + + private void setAnnotations(Map annotations) { + this.annotations = annotations; + } + + public void resetAnnotaions() { + annotations = null; + } + + public LinkedHashMap getEntrySchema() { + return schema.getEntrySchema(); + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java index df122f0..8ef82b3 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -21,100 +21,99 @@ package org.onap.sdc.toscaparser.api.parameters; import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; import java.util.LinkedHashMap; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - public class Output { - - private static final String DESCRIPTION = "description"; - public static final String VALUE = "value"; - private static final String OUTPUTFIELD[] = {DESCRIPTION, VALUE}; - - private String name; - private LinkedHashMap attrs;//TYPE??? - - public Output(String oname,LinkedHashMap oattrs) { - name = oname; - attrs = oattrs; - } - - public String getDescription() { - return (String)attrs.get(DESCRIPTION); - } - - public Object getValue() { - return attrs.get(VALUE); - } - - public void validate() { - _validateField(); - } - - private void _validateField() { - if(!(attrs instanceof LinkedHashMap)) { - //TODO wrong error message... + + private static final String DESCRIPTION = "description"; + public static final String VALUE = "value"; + private static final String[] OUTPUT_FIELD = {DESCRIPTION, VALUE}; + + private String name; + private LinkedHashMap attributes; + + public Output(String name, LinkedHashMap attributes) { + this.name = name; + this.attributes = attributes; + } + + public String getDescription() { + return (String) attributes.get(DESCRIPTION); + } + + public Object getValue() { + return attributes.get(VALUE); + } + + public void validate() { + validateField(); + } + + private void validateField() { + if (attributes == null) { + //TODO wrong error message... ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE216", String.format( "ValidationError: Output \"%s\" has wrong type. Expecting a dict", - name))); - } - - if(getValue() == null) { + name))); + } + + if (getValue() == null) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE217", String.format( "MissingRequiredFieldError: Output \"%s\" is missing required \"%s\"", - name,VALUE))); - } - for(String key: attrs.keySet()) { - boolean bFound = false; - for(String of: OUTPUTFIELD) { - if(key.equals(of)) { - bFound = true; - break; - } - } - if(!bFound) { + name, VALUE))); + } + for (String key : attributes.keySet()) { + boolean bFound = false; + for (String of : OUTPUT_FIELD) { + if (key.equals(of)) { + bFound = true; + break; + } + } + if (!bFound) { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE218", String.format( - "UnknownFieldError: Output \"%s\" contains unknown field \"%s\"", - name,key))); + "UnknownFieldError: Output \"%s\" contains unknown field \"%s\"", + name, key))); } } - } - - // getter/setter - - public String getName() { - return name; - } - - public void setAttr(String name,Object value) { - attrs.put(name, value); - } + } + + // getter/setter + + public String getName() { + return name; + } + + public void setAttr(String name, Object value) { + attributes.put(name, value); + } } /*python class Output(object): - OUTPUTFIELD = (DESCRIPTION, VALUE) = ('description', 'value') + OUTPUT_FIELD = (DESCRIPTION, VALUE) = ('description', 'value') - def __init__(self, name, attrs): + def __init__(self, name, attributes): self.name = name - self.attrs = attrs + self.attributes = attributes @property def description(self): - return self.attrs.get(self.DESCRIPTION) + return self.attributes.get(self.DESCRIPTION) @property def value(self): - return self.attrs.get(self.VALUE) + return self.attributes.get(self.VALUE) def validate(self): self._validate_field() def _validate_field(self): - if not isinstance(self.attrs, dict): + if not isinstance(self.attributes, dict): ValidationIssueCollector.appendException( MissingRequiredFieldError(what='Output "%s"' % self.name, required=self.VALUE)) @@ -122,8 +121,8 @@ class Output(object): ValidationIssueCollector.appendException( MissingRequiredFieldError(what='Output "%s"' % self.name, required=self.VALUE)) - for name in self.attrs: - if name not in self.OUTPUTFIELD: + for name in self.attributes: + if name not in self.OUTPUT_FIELD: ValidationIssueCollector.appendException( UnknownFieldError(what='Output "%s"' % self.name, field=name)) diff --git a/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java b/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java index 92d5194..4ada267 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -50,204 +50,199 @@ import org.yaml.snakeyaml.Yaml; public class CSAR { - private static Logger log = LoggerFactory.getLogger(CSAR.class.getName()); - private static final ArrayList META_PROPERTIES_FILES = new ArrayList<>(Arrays.asList("TOSCA-Metadata/TOSCA.meta", "csar.meta")); + private static Logger log = LoggerFactory.getLogger(CSAR.class.getName()); + private static final ArrayList META_PROPERTIES_FILES = new ArrayList<>(Arrays.asList("TOSCA-Metadata/TOSCA.meta", "csar.meta")); - private String path; + private String path; private boolean isFile; private boolean isValidated; private boolean errorCaught; private String csar; private String tempDir; -// private Metadata metaData; + // private Metadata metaData; private File tempFile; - private LinkedHashMap> metaProperties; + private LinkedHashMap> metaProperties; - public CSAR(String csarPath, boolean aFile) { - path = csarPath; - isFile = aFile; + public CSAR(String csarPath, boolean aFile) { + path = csarPath; + isFile = aFile; isValidated = false; errorCaught = false; csar = null; tempDir = null; tempFile = null; - metaProperties = new LinkedHashMap<>(); - } + metaProperties = new LinkedHashMap<>(); + } + + public boolean validate() throws JToscaException { + isValidated = true; - public boolean validate() throws JToscaException { - isValidated = true; - //validate that the file or URL exists - - if(isFile) { - File f = new File(path); - if (!f.isFile()) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE220", String.format("\"%s\" is not a file", path))); - return false; - } - else { - this.csar = path; - } - } - else { - if(!UrlUtils.validateUrl(path)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE221", String.format("ImportError: \"%s\" does not exist",path))); - return false; - } - // get it to a local file - try { - File tempFile = File.createTempFile("csartmp",".csar"); - Path ptf = Paths.get(tempFile.getPath()); - URL webfile = new URL(path); - InputStream in = webfile.openStream(); - Files.copy(in,ptf,StandardCopyOption.REPLACE_EXISTING); - } - catch(Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE222", "ImportError: failed to load CSAR from " + path)); - return false; - } - - log.debug("CSAR - validate - currently only files are supported"); - return false; - } - - _parseAndValidateMetaProperties(); - - if(errorCaught) { - return false; - } - + + if (isFile) { + File f = new File(path); + if (!f.isFile()) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE220", String.format("\"%s\" is not a file", path))); + return false; + } else { + this.csar = path; + } + } else { + if (!UrlUtils.validateUrl(path)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE221", String.format("ImportError: \"%s\" does not exist", path))); + return false; + } + // get it to a local file + try { + File tempFile = File.createTempFile("csartmp", ".csar"); + Path ptf = Paths.get(tempFile.getPath()); + URL webfile = new URL(path); + InputStream in = webfile.openStream(); + Files.copy(in, ptf, StandardCopyOption.REPLACE_EXISTING); + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE222", "ImportError: failed to load CSAR from " + path)); + return false; + } + + log.debug("CSAR - validate - currently only files are supported"); + return false; + } + + _parseAndValidateMetaProperties(); + + if (errorCaught) { + return false; + } + // validate that external references in the main template actually exist and are accessible _validateExternalReferences(); - + return !errorCaught; - } - - private void _parseAndValidateMetaProperties() throws JToscaException { - - ZipFile zf = null; - - try { - - // validate that it is a valid zip file - RandomAccessFile raf = new RandomAccessFile(csar, "r"); - long n = raf.readInt(); - raf.close(); - // check if Zip's magic number - if (n != 0x504B0304) { - String errorString = String.format("\"%s\" is not a valid zip file", csar); - log.error(errorString); - throw new JToscaException(errorString , JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue()); - } - - // validate that it contains the metadata file in the correct location - zf = new ZipFile(csar); - ZipEntry ze = zf.getEntry("TOSCA-Metadata/TOSCA.meta"); - if (ze == null) { - - String errorString = String.format( - "\"%s\" is not a valid CSAR as it does not contain the " + - "required file \"TOSCA.meta\" in the folder \"TOSCA-Metadata\"", csar); - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.MISSING_META_FILE.getValue()); - } - - //Going over expected metadata files and parsing them - for (String metaFile: META_PROPERTIES_FILES) { - - byte ba[] = new byte[4096]; - ze = zf.getEntry(metaFile); - if (ze != null) { - InputStream inputStream = zf.getInputStream(ze); - n = inputStream.read(ba, 0, 4096); - String md = new String(ba); - md = md.substring(0, (int) n); - - String errorString = String.format( - "The file \"%s\" in the" + - " CSAR \"%s\" does not contain valid YAML content", ze.getName(), csar); - - try { - Yaml yaml = new Yaml(); - Object mdo = yaml.load(md); - if (!(mdo instanceof LinkedHashMap)) { - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); - } - - String[] split = ze.getName().split("/"); - String fileName = split[split.length - 1]; - - if (!metaProperties.containsKey(fileName)) { - metaProperties.put(fileName, (LinkedHashMap) mdo); - } - } - catch(Exception e) { - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); - } - } - } - - // verify it has "Entry-Definition" - String edf = _getMetadata("Entry-Definitions"); - if (edf == null) { - String errorString = String.format( - "The CSAR \"%s\" is missing the required metadata " + - "\"Entry-Definitions\" in \"TOSCA-Metadata/TOSCA.meta\"", csar); - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.ENTRY_DEFINITION_NOT_DEFINED.getValue()); - } - - //validate that "Entry-Definitions' metadata value points to an existing file in the CSAR - boolean foundEDF = false; - Enumeration entries = zf.entries(); - while (entries.hasMoreElements()) { - ze = entries.nextElement(); - if (ze.getName().equals(edf)) { - foundEDF = true; - break; - } - } - if (!foundEDF) { - String errorString = String.format( - "The \"Entry-Definitions\" file defined in the CSAR \"%s\" does not exist", csar); - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.MISSING_ENTRY_DEFINITION_FILE.getValue()); - } - } catch (JToscaException e) { - //ThreadLocalsHolder.getCollector().appendCriticalException(e.getMessage()); - throw e; - } catch (Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE223", "ValidationError: " + e.getMessage())); - errorCaught = true; - } - - try { - if (zf != null) { - zf.close(); - } - } catch (IOException e) { - } - } - - public void cleanup() { - try { - if(tempFile != null) { - tempFile.delete(); - } - } - catch(Exception e) { - } - } - + } + + private void _parseAndValidateMetaProperties() throws JToscaException { + + ZipFile zf = null; + + try { + + // validate that it is a valid zip file + RandomAccessFile raf = new RandomAccessFile(csar, "r"); + long n = raf.readInt(); + raf.close(); + // check if Zip's magic number + if (n != 0x504B0304) { + String errorString = String.format("\"%s\" is not a valid zip file", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue()); + } + + // validate that it contains the metadata file in the correct location + zf = new ZipFile(csar); + ZipEntry ze = zf.getEntry("TOSCA-Metadata/TOSCA.meta"); + if (ze == null) { + + String errorString = String.format( + "\"%s\" is not a valid CSAR as it does not contain the " + + "required file \"TOSCA.meta\" in the folder \"TOSCA-Metadata\"", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.MISSING_META_FILE.getValue()); + } + + //Going over expected metadata files and parsing them + for (String metaFile : META_PROPERTIES_FILES) { + + byte ba[] = new byte[4096]; + ze = zf.getEntry(metaFile); + if (ze != null) { + InputStream inputStream = zf.getInputStream(ze); + n = inputStream.read(ba, 0, 4096); + String md = new String(ba); + md = md.substring(0, (int) n); + + String errorString = String.format( + "The file \"%s\" in the" + + " CSAR \"%s\" does not contain valid YAML content", ze.getName(), csar); + + try { + Yaml yaml = new Yaml(); + Object mdo = yaml.load(md); + if (!(mdo instanceof LinkedHashMap)) { + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); + } + + String[] split = ze.getName().split("/"); + String fileName = split[split.length - 1]; + + if (!metaProperties.containsKey(fileName)) { + metaProperties.put(fileName, (LinkedHashMap) mdo); + } + } catch (Exception e) { + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); + } + } + } + + // verify it has "Entry-Definition" + String edf = _getMetadata("Entry-Definitions"); + if (edf == null) { + String errorString = String.format( + "The CSAR \"%s\" is missing the required metadata " + + "\"Entry-Definitions\" in \"TOSCA-Metadata/TOSCA.meta\"", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.ENTRY_DEFINITION_NOT_DEFINED.getValue()); + } + + //validate that "Entry-Definitions' metadata value points to an existing file in the CSAR + boolean foundEDF = false; + Enumeration entries = zf.entries(); + while (entries.hasMoreElements()) { + ze = entries.nextElement(); + if (ze.getName().equals(edf)) { + foundEDF = true; + break; + } + } + if (!foundEDF) { + String errorString = String.format( + "The \"Entry-Definitions\" file defined in the CSAR \"%s\" does not exist", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.MISSING_ENTRY_DEFINITION_FILE.getValue()); + } + } catch (JToscaException e) { + //ThreadLocalsHolder.getCollector().appendCriticalException(e.getMessage()); + throw e; + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE223", "ValidationError: " + e.getMessage())); + errorCaught = true; + } + + try { + if (zf != null) { + zf.close(); + } + } catch (IOException e) { + } + } + + public void cleanup() { + try { + if (tempFile != null) { + tempFile.delete(); + } + } catch (Exception e) { + } + } + private String _getMetadata(String key) throws JToscaException { - if(!isValidated) { - validate(); - } - Object value = _getMetaProperty("TOSCA.meta").get(key); - return value != null ? value.toString() : null; + if (!isValidated) { + validate(); + } + Object value = _getMetaProperty("TOSCA.meta").get(key); + return value != null ? value.toString() : null; } public String getAuthor() throws JToscaException { @@ -258,276 +253,266 @@ public class CSAR { return _getMetadata("CSAR-Version"); } - public LinkedHashMap> getMetaProperties() { - return metaProperties; - } - - private LinkedHashMap _getMetaProperty(String propertiesFile) { - return metaProperties.get(propertiesFile); - } - - public String getMainTemplate() throws JToscaException { - String entryDef = _getMetadata("Entry-Definitions"); - ZipFile zf; - boolean ok = false; - try { - zf = new ZipFile(path); - ok = (zf.getEntry(entryDef) != null); - zf.close(); - } - catch(IOException e) { - if(!ok) { - log.error("CSAR - getMainTemplate - failed to open {}", path); - } - } - if(ok) { - return entryDef; - } - else { - return null; - } + public LinkedHashMap> getMetaProperties() { + return metaProperties; + } + + private LinkedHashMap _getMetaProperty(String propertiesFile) { + return metaProperties.get(propertiesFile); + } + + public String getMainTemplate() throws JToscaException { + String entryDef = _getMetadata("Entry-Definitions"); + ZipFile zf; + boolean ok = false; + try { + zf = new ZipFile(path); + ok = (zf.getEntry(entryDef) != null); + zf.close(); + } catch (IOException e) { + if (!ok) { + log.error("CSAR - getMainTemplate - failed to open {}", path); + } + } + if (ok) { + return entryDef; + } else { + return null; + } } - @SuppressWarnings("unchecked") - public LinkedHashMap getMainTemplateYaml() throws JToscaException { - String mainTemplate = tempDir + File.separator + getMainTemplate(); - if(mainTemplate != null) { - try (InputStream input = new FileInputStream(new File(mainTemplate));){ - Yaml yaml = new Yaml(); - Object data = yaml.load(input); - if(!(data instanceof LinkedHashMap)) { - throw new IOException(); - } - return (LinkedHashMap)data; - } - catch(Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE224", String.format( - "The file \"%s\" in the CSAR \"%s\" does not " + - "contain valid TOSCA YAML content", - mainTemplate,csar))); - } - } - return null; + @SuppressWarnings("unchecked") + public LinkedHashMap getMainTemplateYaml() throws JToscaException { + String mainTemplate = tempDir + File.separator + getMainTemplate(); + if (mainTemplate != null) { + try (InputStream input = new FileInputStream(new File(mainTemplate));) { + Yaml yaml = new Yaml(); + Object data = yaml.load(input); + if (!(data instanceof LinkedHashMap)) { + throw new IOException(); + } + return (LinkedHashMap) data; + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE224", String.format( + "The file \"%s\" in the CSAR \"%s\" does not " + + "contain valid TOSCA YAML content", + mainTemplate, csar))); + } + } + return null; } - + public String getDescription() throws JToscaException { String desc = _getMetadata("Description"); - if(desc != null) { + if (desc != null) { return desc; } - Map metaData = metaProperties.get("TOSCA.meta"); - metaData.put("Description", getMainTemplateYaml().get("description")); - return _getMetadata("Description"); + Map metaData = metaProperties.get("TOSCA.meta"); + metaData.put("Description", getMainTemplateYaml().get("description")); + return _getMetadata("Description"); } public String getTempDir() { - return tempDir; + return tempDir; } - + public void decompress() throws IOException, JToscaException { - if(!isValidated) { + if (!isValidated) { validate(); } - - if(tempDir == null || tempDir.isEmpty()) { - tempDir = Files.createTempDirectory("JTP").toString(); - unzip(path,tempDir); + + if (tempDir == null || tempDir.isEmpty()) { + tempDir = Files.createTempDirectory("JTP").toString(); + unzip(path, tempDir); } } - - private void _validateExternalReferences() throws JToscaException { + + private void _validateExternalReferences() throws JToscaException { // Extracts files referenced in the main template - // These references are currently supported: + // These references are currently supported: // * imports // * interface implementations // * artifacts try { decompress(); String mainTplFile = getMainTemplate(); - if(mainTplFile == null) { + if (mainTplFile == null) { return; } - - LinkedHashMap mainTpl = getMainTemplateYaml(); - if(mainTpl.get("imports") != null) { - // this loads the imports - ImportsLoader il = new ImportsLoader((ArrayList)mainTpl.get("imports"), - tempDir + File.separator + mainTplFile, - (Object)null, - (LinkedHashMap)null); + + LinkedHashMap mainTpl = getMainTemplateYaml(); + if (mainTpl.get("imports") != null) { + // this loads the imports + ImportsLoader il = new ImportsLoader((ArrayList) mainTpl.get("imports"), + tempDir + File.separator + mainTplFile, + (Object) null, + (LinkedHashMap) null); } - - if(mainTpl.get("topology_template") != null) { - LinkedHashMap topologyTemplate = - (LinkedHashMap)mainTpl.get("topology_template"); - - if(topologyTemplate.get("node_templates") != null) { - LinkedHashMap nodeTemplates = - (LinkedHashMap)topologyTemplate.get("node_templates"); - for(String nodeTemplateKey: nodeTemplates.keySet()) { - LinkedHashMap nodeTemplate = - (LinkedHashMap)nodeTemplates.get(nodeTemplateKey); - if(nodeTemplate.get("artifacts") != null) { - LinkedHashMap artifacts = - (LinkedHashMap)nodeTemplate.get("artifacts"); - for(String artifactKey: artifacts.keySet()) { - Object artifact = artifacts.get(artifactKey); - if(artifact instanceof String) { - _validateExternalReference(mainTplFile,(String)artifact,true); - } - else if(artifact instanceof LinkedHashMap) { - String file = (String)((LinkedHashMap)artifact).get("file"); - if(file != null) { - _validateExternalReference(mainTplFile,file,true); - } - } - else { + + if (mainTpl.get("topology_template") != null) { + LinkedHashMap topologyTemplate = + (LinkedHashMap) mainTpl.get("topology_template"); + + if (topologyTemplate.get("node_templates") != null) { + LinkedHashMap nodeTemplates = + (LinkedHashMap) topologyTemplate.get("node_templates"); + for (String nodeTemplateKey : nodeTemplates.keySet()) { + LinkedHashMap nodeTemplate = + (LinkedHashMap) nodeTemplates.get(nodeTemplateKey); + if (nodeTemplate.get("artifacts") != null) { + LinkedHashMap artifacts = + (LinkedHashMap) nodeTemplate.get("artifacts"); + for (String artifactKey : artifacts.keySet()) { + Object artifact = artifacts.get(artifactKey); + if (artifact instanceof String) { + _validateExternalReference(mainTplFile, (String) artifact, true); + } else if (artifact instanceof LinkedHashMap) { + String file = (String) ((LinkedHashMap) artifact).get("file"); + if (file != null) { + _validateExternalReference(mainTplFile, file, true); + } + } else { ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE225", String.format( - "ValueError: Unexpected artifact definition for \"%s\"", - artifactKey))); - errorCaught = true; - } - } - } - if(nodeTemplate.get("interfaces") != null) { - LinkedHashMap interfaces = - (LinkedHashMap)nodeTemplate.get("interfaces"); - for(String interfaceKey: interfaces.keySet()) { - LinkedHashMap _interface = - (LinkedHashMap)interfaces.get(interfaceKey); - for(String operationKey: _interface.keySet()) { - Object operation = _interface.get(operationKey); - if(operation instanceof String) { - _validateExternalReference(mainTplFile,(String)operation,false); - } - else if(operation instanceof LinkedHashMap) { - String imp = (String)((LinkedHashMap)operation).get("implementation"); - if(imp != null) { - _validateExternalReference(mainTplFile,imp,true); - } - } - } - } - } - } - } + "ValueError: Unexpected artifact definition for \"%s\"", + artifactKey))); + errorCaught = true; + } + } + } + if (nodeTemplate.get("interfaces") != null) { + LinkedHashMap interfaces = + (LinkedHashMap) nodeTemplate.get("interfaces"); + for (String interfaceKey : interfaces.keySet()) { + LinkedHashMap _interface = + (LinkedHashMap) interfaces.get(interfaceKey); + for (String operationKey : _interface.keySet()) { + Object operation = _interface.get(operationKey); + if (operation instanceof String) { + _validateExternalReference(mainTplFile, (String) operation, false); + } else if (operation instanceof LinkedHashMap) { + String imp = (String) ((LinkedHashMap) operation).get("implementation"); + if (imp != null) { + _validateExternalReference(mainTplFile, imp, true); + } + } + } + } + } + } + } } + } catch (IOException e) { + errorCaught = true; + } finally { + // delete tempDir (only here?!?) + File fdir = new File(tempDir); + deleteDir(fdir); + tempDir = null; } - catch(IOException e) { - errorCaught = true; - } - finally { - // delete tempDir (only here?!?) - File fdir = new File(tempDir); - deleteDir(fdir); - tempDir = null; + } + + public static void deleteDir(File fdir) { + try { + if (fdir.isDirectory()) { + for (File c : fdir.listFiles()) + deleteDir(c); + } + fdir.delete(); + } catch (Exception e) { } - } - - public static void deleteDir(File fdir) { - try { - if (fdir.isDirectory()) { - for (File c : fdir.listFiles()) - deleteDir(c); - } - fdir.delete(); - } - catch(Exception e) { - } - } - - private void _validateExternalReference(String tplFile,String resourceFile,boolean raiseExc) { + } + + private void _validateExternalReference(String tplFile, String resourceFile, boolean raiseExc) { // Verify that the external resource exists // If resource_file is a URL verify that the URL is valid. // If resource_file is a relative path verify that the path is valid // considering base folder (self.temp_dir) and tpl_file. // Note that in a CSAR resource_file cannot be an absolute path. - if(UrlUtils.validateUrl(resourceFile)) { - String msg = String.format("URLException: The resource at \"%s\" cannot be accessed",resourceFile); + if (UrlUtils.validateUrl(resourceFile)) { + String msg = String.format("URLException: The resource at \"%s\" cannot be accessed", resourceFile); try { - if(UrlUtils.isUrlAccessible(resourceFile)) { + if (UrlUtils.isUrlAccessible(resourceFile)) { return; - } - else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE226", msg)); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE226", msg)); errorCaught = true; } - } - catch (Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE227", msg)); + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE227", msg)); } } - String dirPath = Paths.get(tplFile).getParent().toString(); - String filePath = tempDir + File.separator + dirPath + File.separator + resourceFile; - File f = new File(filePath); - if(f.isFile()) { - return; - } - - if(raiseExc) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE228", String.format( - "ValueError: The resource \"%s\" does not exist",resourceFile))); - } - errorCaught = true; - } - + String dirPath = Paths.get(tplFile).getParent().toString(); + String filePath = tempDir + File.separator + dirPath + File.separator + resourceFile; + File f = new File(filePath); + if (f.isFile()) { + return; + } + + if (raiseExc) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE228", String.format( + "ValueError: The resource \"%s\" does not exist", resourceFile))); + } + errorCaught = true; + } + private void unzip(String zipFilePath, String destDirectory) throws IOException { File destDir = new File(destDirectory); if (!destDir.exists()) { destDir.mkdir(); } - try (ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFilePath));){ - ZipEntry entry = zipIn.getNextEntry(); - // iterates over entries in the zip file - while (entry != null) { - // create all directories needed for nested items - String[] parts = entry.getName().split("/"); - String s = destDirectory + File.separator ; - for(int i=0; i< parts.length-1; i++) { - s += parts[i]; - File idir = new File(s); - if(!idir.exists()) { - idir.mkdir(); - } - s += File.separator; - } - String filePath = destDirectory + File.separator + entry.getName(); - if (!entry.isDirectory()) { - // if the entry is a file, extracts it - extractFile(zipIn, filePath); - } else { - // if the entry is a directory, make the directory - File dir = new File(filePath); - dir.mkdir(); - } - zipIn.closeEntry(); - entry = zipIn.getNextEntry(); - } - } + try (ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFilePath));) { + ZipEntry entry = zipIn.getNextEntry(); + // iterates over entries in the zip file + while (entry != null) { + // create all directories needed for nested items + String[] parts = entry.getName().split("/"); + String s = destDirectory + File.separator; + for (int i = 0; i < parts.length - 1; i++) { + s += parts[i]; + File idir = new File(s); + if (!idir.exists()) { + idir.mkdir(); + } + s += File.separator; + } + String filePath = destDirectory + File.separator + entry.getName(); + if (!entry.isDirectory()) { + // if the entry is a file, extracts it + extractFile(zipIn, filePath); + } else { + // if the entry is a directory, make the directory + File dir = new File(filePath); + dir.mkdir(); + } + zipIn.closeEntry(); + entry = zipIn.getNextEntry(); + } + } } - + /** * Extracts a zip entry (file entry) + * * @param zipIn * @param filePath * @throws IOException */ private static final int BUFFER_SIZE = 4096; - + private void extractFile(ZipInputStream zipIn, String filePath) throws IOException { //BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath)); - try (FileOutputStream fos = new FileOutputStream(filePath); - BufferedOutputStream bos = new BufferedOutputStream(fos);){ - byte[] bytesIn = new byte[BUFFER_SIZE]; - int read = 0; - while ((read = zipIn.read(bytesIn)) != -1) { - bos.write(bytesIn, 0, read); - } - } + try (FileOutputStream fos = new FileOutputStream(filePath); + BufferedOutputStream bos = new BufferedOutputStream(fos);) { + byte[] bytesIn = new byte[BUFFER_SIZE]; + int read = 0; + while ((read = zipIn.read(bytesIn)) != -1) { + bos.write(bytesIn, 0, read); + } + } } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java index a15afe4..237b738 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,24 +26,25 @@ import java.util.Map; public class CopyUtils { + private CopyUtils() { + } + @SuppressWarnings("unchecked") - public static Object copyLhmOrAl(Object src) { - if(src instanceof LinkedHashMap) { - LinkedHashMap dst = new LinkedHashMap(); - for(Map.Entry me: ((LinkedHashMap)src).entrySet()) { - dst.put(me.getKey(),me.getValue()); - } - return dst; - } - else if(src instanceof ArrayList) { - ArrayList dst = new ArrayList(); - for(Object o: (ArrayList)src) { - dst.add(o); - } - return dst; - } - else { - return null; - } + public static Object copyLhmOrAl(Object src) { + if (src instanceof LinkedHashMap) { + LinkedHashMap dst = new LinkedHashMap(); + for (Map.Entry me : ((LinkedHashMap) src).entrySet()) { + dst.put(me.getKey(), me.getValue()); + } + return dst; + } else if (src instanceof ArrayList) { + ArrayList dst = new ArrayList(); + for (Object o : (ArrayList) src) { + dst.add(o); + } + return dst; + } else { + return null; + } } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java index d87103b..158a3e1 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,51 +25,44 @@ import java.util.LinkedHashMap; import java.util.Map; public class DumpUtils { - - @SuppressWarnings("unchecked") - public static void dumpYaml(Object yo,int level) { - final String indent = " "; - try { - if(yo == null) { - System.out.println(""); - return; - } - String cname = yo.getClass().getSimpleName(); - System.out.print(cname); - if(cname.equals("LinkedHashMap")) { - LinkedHashMap lhm = (LinkedHashMap)yo; - System.out.println(); - for(Map.Entry me: lhm.entrySet()) { - System.out.print(indent.substring(0,level) + me.getKey() + ": "); - dumpYaml(me.getValue(),level+2); - } - } - else if(cname.equals("ArrayList")) { - ArrayList al = (ArrayList)yo; - System.out.println(); - for (int i=0; i \"" + (String)yo + "\""); - } - else if(cname.equals("Integer")) { - System.out.println(" ==> " + (int)yo); - } - else if(cname.equals("Boolean")) { - System.out.println(" ==> " + (boolean)yo); - } - else if(cname.equals("Double")) { - System.out.println(" ==> " + (double)yo); - } - else { - System.out.println(" !! unexpected type"); - } - } - catch(Exception e) { - System.out.println("Exception!! " + e.getMessage()); - } - } + + @SuppressWarnings("unchecked") + private static void dumpYaml(Object yo, int level) { + final String indent = " "; + try { + if (yo == null) { + System.out.println(""); + return; + } + String cname = yo.getClass().getSimpleName(); + System.out.print(cname); + if (cname.equals("LinkedHashMap")) { + LinkedHashMap lhm = (LinkedHashMap) yo; + System.out.println(); + for (Map.Entry me : lhm.entrySet()) { + System.out.print(indent.substring(0, level) + me.getKey() + ": "); + dumpYaml(me.getValue(), level + 2); + } + } else if (cname.equals("ArrayList")) { + ArrayList al = (ArrayList) yo; + System.out.println(); + for (int i = 0; i < al.size(); i++) { + System.out.format("%s[%d] ", indent.substring(0, level), i); + dumpYaml(al.get(i), level + 2); + } + } else if (cname.equals("String")) { + System.out.println(" ==> \"" + (String) yo + "\""); + } else if (cname.equals("Integer")) { + System.out.println(" ==> " + (int) yo); + } else if (cname.equals("Boolean")) { + System.out.println(" ==> " + (boolean) yo); + } else if (cname.equals("Double")) { + System.out.println(" ==> " + (double) yo); + } else { + System.out.println(" !! unexpected type"); + } + } catch (Exception e) { + System.out.println("Exception!! " + e.getMessage()); + } + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java index 3515ed0..3849ce0 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,7 +25,7 @@ public enum JToscaErrorCodes { MISSING_META_FILE("JE1001"), INVALID_META_YAML_CONTENT("JE1002"), ENTRY_DEFINITION_NOT_DEFINED("JE1003"), - MISSING_ENTRY_DEFINITION_FILE ("JE1004"), + MISSING_ENTRY_DEFINITION_FILE("JE1004"), GENERAL_ERROR("JE1005"), PATH_NOT_VALID("JE1006"), CSAR_TOSCA_VALIDATION_ERROR("JE1007"), @@ -33,7 +33,7 @@ public enum JToscaErrorCodes { private String value; - private JToscaErrorCodes(String value) { + JToscaErrorCodes(String value) { this.value = value; } @@ -42,8 +42,8 @@ public enum JToscaErrorCodes { } public static JToscaErrorCodes getByCode(String code) { - for(JToscaErrorCodes v : values()){ - if( v.getValue().equals(code)){ + for (JToscaErrorCodes v : values()) { + if (v.getValue().equals(code)) { return v; } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java index 838fb07..a753d62 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -25,104 +25,111 @@ import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import java.util.regex.Matcher; import java.util.regex.Pattern; -public class TOSCAVersionProperty {// test with functions/test_concat.yaml - - private String version; - - private static final String versionRe = - "^(?([0-9][0-9]*))" + - "(\\.(?([0-9][0-9]*)))?" + - "(\\.(?([0-9][0-9]*)))?" + - "(\\.(?([0-9A-Za-z]+)))?" + - "(\\-(?[0-9])*)?$"; - - private String minorVersion = null; - private String majorVersion = null; - private String fixVersion = null; - private String qualifier = null; - private String buildVersion = null; - - - public TOSCAVersionProperty(Object _version) { - version = _version.toString(); - - if(version.equals("0") || version.equals("0.0") || version.equals("0.0.0")) { - //log.warning(_('Version assumed as not provided')) - version = ""; +// test with functions/test_concat.yaml +public class TOSCAVersionProperty { + + private String version; + + private static final String VERSION_RE = + "^(?([0-9][0-9]*))" + + "(\\.(?([0-9][0-9]*)))?" + + "(\\.(?([0-9][0-9]*)))?" + + "(\\.(?([0-9A-Za-z]+)))?" + + "(\\-(?[0-9])*)?$"; + + private String minorVersion = null; + private String majorVersion = null; + private String fixVersion = null; + private String qualifier = null; + private String buildVersion = null; + + + public TOSCAVersionProperty(String version) { + + if (version.equals("0") || version.equals("0.0") || version.equals("0.0.0")) { return; } - Pattern pattern = Pattern.compile(versionRe); - Matcher matcher = pattern.matcher(version); - if(!matcher.find()) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE252", String.format( - "InvalidTOSCAVersionPropertyException: " + - "Value of TOSCA version property \"%s\" is invalid", - version))); + Pattern pattern = Pattern.compile(VERSION_RE); + Matcher matcher = pattern.matcher(version); + if (!matcher.find()) { + ThreadLocalsHolder.getCollector().appendValidationIssue( + new JToscaValidationIssue( + "JE252", + "InvalidTOSCAVersionPropertyException: " + + "Value of TOSCA version property \"" + version + "\" is invalid" + )); return; - } + } minorVersion = matcher.group("gMinorVersion"); majorVersion = matcher.group("gMajorVersion"); fixVersion = matcher.group("gFixVersion"); - qualifier = _validateQualifier(matcher.group("gQualifier")); - buildVersion = _validateBuild(matcher.group("gBuildVersion")); - _validateMajorVersion(majorVersion); - - } - - private String _validateMajorVersion(String value) { + qualifier = validateQualifier(matcher.group("gQualifier")); + buildVersion = validateBuild(matcher.group("gBuildVersion")); + validateMajorVersion(majorVersion); + + this.version = version; + + } + + private String validateMajorVersion(String value) { // Validate major version // Checks if only major version is provided and assumes // minor version as 0. // Eg: If version = 18, then it returns version = '18.0' - if(minorVersion == null && buildVersion == null && !value.equals("0")) { + if (minorVersion == null && buildVersion == null && !value.equals("0")) { //log.warning(_('Minor version assumed "0".')) version = version + "0"; } return value; - } - - private String _validateQualifier(String value) { - // Validate qualifier - - // TOSCA version is invalid if a qualifier is present without the - // fix version or with all of major, minor and fix version 0s. - - // For example, the following versions are invalid - // 18.0.abc - // 0.0.0.abc - - if((fixVersion == null && value != null) || - (minorVersion.equals("0") && majorVersion.equals("0") && - fixVersion.equals("0") && value != null)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE253", String.format( - "InvalidTOSCAVersionPropertyException: " + - "Value of TOSCA version property \"%s\" is invalid", - version))); - } - return value; - } - - private String _validateBuild(String value) { + } + + private String validateQualifier(String value) { + // Validate qualifier + + // TOSCA version is invalid if a qualifier is present without the + // fix version or with all of major, minor and fix version 0s. + + // For example, the following versions are invalid + // 18.0.abc + // 0.0.0.abc + + if ((fixVersion == null && value != null) || (minorVersion.equals("0") && majorVersion.equals("0") + && fixVersion.equals("0") && value != null)) { + ThreadLocalsHolder.getCollector().appendValidationIssue( + new JToscaValidationIssue( + "JE253", + "InvalidTOSCAVersionPropertyException: Value of TOSCA version property \"" + + version + + "\" is invalid" + )); + } + return value; + } + + private String validateBuild(String value) { // Validate build version // TOSCA version is invalid if build version is present without the qualifier. // Eg: version = 18.0.0-1 is invalid. - if(qualifier == null && value != null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE254", String.format( - "InvalidTOSCAVersionPropertyException: " + - "Value of TOSCA version property \"%s\" is invalid", - version))); - } + if (qualifier == null && value != null) { + ThreadLocalsHolder.getCollector().appendValidationIssue( + new JToscaValidationIssue( + "JE254", + "InvalidTOSCAVersionPropertyException: " + + "Value of TOSCA version property \"" + version + "\" is invalid" + ) + ); + } return value; } - public Object getVersion() { - return version; - } + public Object getVersion() { + return version; + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java index 2ea8d08..4c4581b 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -24,21 +24,22 @@ import org.onap.sdc.toscaparser.api.common.ValidationIssueCollector; public class ThreadLocalsHolder { - private static final ThreadLocal exceptionCollectorThreadLocal = new ThreadLocal<>(); + private static final ThreadLocal EXCEPTION_COLLECTOR_THREAD_LOCAL = new ThreadLocal<>(); - private ThreadLocalsHolder(){} + private ThreadLocalsHolder() { + } public static ValidationIssueCollector getCollector() { - return exceptionCollectorThreadLocal.get(); + return EXCEPTION_COLLECTOR_THREAD_LOCAL.get(); } public static void setCollector(ValidationIssueCollector validationIssueCollector) { cleanup(); - exceptionCollectorThreadLocal.set(validationIssueCollector); + EXCEPTION_COLLECTOR_THREAD_LOCAL.set(validationIssueCollector); } - public static void cleanup(){ - exceptionCollectorThreadLocal.remove(); + public static void cleanup() { + EXCEPTION_COLLECTOR_THREAD_LOCAL.remove(); } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java index 72e5122..d081d91 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -28,61 +28,63 @@ import java.net.MalformedURLException; import java.net.URL; public class UrlUtils { - - public static boolean validateUrl(String sUrl) { + + private static final int HTTP_STATUS_OK = 200; + + private UrlUtils() { + } + + public static boolean validateUrl(String sUrl) { // Validates whether the given path is a URL or not // If the given path includes a scheme (http, https, ftp, ...) and a net // location (a domain name such as www.github.com) it is validated as a URL - try { - URL url = new URL(sUrl); - if(url.getProtocol().equals("file")) { - return true; - } - return url.getAuthority() != null; - } - catch(MalformedURLException e) { - return false; - } - } - - public static String joinUrl(String sUrl,String relativePath) { + try { + URL url = new URL(sUrl); + if (url.getProtocol().equals("file")) { + return true; + } + return url.getAuthority() != null; + } catch (MalformedURLException e) { + return false; + } + } + + public static String joinUrl(String sUrl, String relativePath) { // Builds a new URL from the given URL and the relative path // Example: // url: http://www.githib.com/openstack/heat // relative_path: heat-translator // - joined: http://www.githib.com/openstack/heat-translator - if(!validateUrl(sUrl)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE255", String.format( - "ValueError: The URL \"%s\" is malformed",sUrl))); - } - try { - URL base = new URL(sUrl); - return (new URL(base,relativePath)).toString(); - } - catch(MalformedURLException e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE256", String.format( - "ValueError: Joining URL \"%s\" and relative path \"%s\" caused an exception",sUrl,relativePath))); - return sUrl; - } - } - - public static boolean isUrlAccessible(String sUrl) { + if (!validateUrl(sUrl)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE255", String.format( + "ValueError: The URL \"%s\" is malformed", sUrl))); + } + try { + URL base = new URL(sUrl); + return (new URL(base, relativePath)).toString(); + } catch (MalformedURLException e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE256", String.format( + "ValueError: Joining URL \"%s\" and relative path \"%s\" caused an exception", sUrl, relativePath))); + return sUrl; + } + } + + public static boolean isUrlAccessible(String sUrl) { // Validates whether the given URL is accessible // Returns true if the get call returns a 200 response code. // Otherwise, returns false. - try { - HttpURLConnection connection = (HttpURLConnection) new URL(sUrl).openConnection(); - connection.setRequestMethod("HEAD"); - int responseCode = connection.getResponseCode(); - return responseCode == 200; - } - catch(IOException e) { - return false; - } - } + try { + HttpURLConnection connection = (HttpURLConnection) new URL(sUrl).openConnection(); + connection.setRequestMethod("HEAD"); + int responseCode = connection.getResponseCode(); + return responseCode == HTTP_STATUS_OK; + } catch (IOException e) { + return false; + } + } } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java index a9786ae..b90d882 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -27,246 +27,241 @@ import java.util.Date; import java.util.LinkedHashMap; public class ValidateUtils { - - private static final String RANGE_UNBOUNDED = "UNBOUNDED"; - - public static Object strToNum(Object value) { - // Convert a string representation of a number into a numeric type - // TODO(TBD) we should not allow numeric values in, input should be str - if(value instanceof Number) { - return value; - } - if(!(value instanceof String)) { - - } - try { - return Integer.parseInt((String)value); - } - catch(NumberFormatException e) { - } - try { - return Float.parseFloat((String)value); - } - catch(Exception e) { - } - return null; - } - - public static Object validateNumeric(Object value) { - if(value != null) { - if (!(value instanceof Number)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE257", String.format( - "ValueError: \"%s\" is not a numeric",value.toString()))); - } - } - return value; - } - - public static Object validateInteger(Object value) { - if(value != null) { - if (!(value instanceof Integer)) { - // allow "true" and "false" - if (value instanceof Boolean) { - return (Boolean) value ? 1 : 0; - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE258", String.format( - "ValueError: \"%s\" is not an integer",value.toString()))); - } - } - return value; - } - - public static Object validateFloat(Object value) { - if(value != null) { - if (!(value instanceof Float || value instanceof Double)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE259", String.format( - "ValueError: \"%s\" is not a float",value.toString()))); - } - } - return value; - } - - public static Object validateString(Object value) { - if(value != null) { - if (!(value instanceof String)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE260", String.format( - "ValueError: \'%s\' is not a string",value.toString()))); - } - } - return value; - } - - public static Object validateList(Object value) { - if(value != null) { - if (!(value instanceof ArrayList)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE261", String.format( - "ValueError: \"%s\" is not a list",value.toString()))); - } - } - return value; - } - - - @SuppressWarnings("unchecked") - public static Object validateRange(Object range) { - // list class check - validateList(range); - // validate range list has a min and max - if(range instanceof ArrayList && ((ArrayList)range).size() != 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE262", String.format( - "ValueError: \"%s\" is not a valid range",range.toString()))); - // too dangerous to continue... - return range; - } - // validate min and max are numerics or the keyword UNBOUNDED - boolean minTest = false; - boolean maxTest = false; - Object r0 = ((ArrayList)range).get(0); - Object r1 = ((ArrayList)range).get(1); - - if(!(r0 instanceof Integer) && !(r0 instanceof Float) || - !(r1 instanceof Integer) && !(r1 instanceof Float)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE263", String.format( - "ValueError: \"%s\" is not a valid range",range.toString()))); - // too dangerous to continue... - return range; - } - - Float min = 0.0F; - Float max = 0.0F; - if(r0 instanceof String && ((String)r0).equals(RANGE_UNBOUNDED)) { - minTest = true; - } - else { - min = r0 instanceof Integer ? ((Integer)r0).floatValue() : (Float)r0; - } - if(r1 instanceof String && ((String)r1).equals(RANGE_UNBOUNDED)) { - maxTest = true; - } - else { - max = r1 instanceof Integer ? ((Integer)r1).floatValue() : (Float)r1; - } - - // validate the max > min (account for UNBOUNDED) - if(!minTest && !maxTest) { - // Note: min == max is allowed - if(min > max) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE264", String.format( - "ValueError:\"%s\" is not a valid range",range.toString()))); - } - } - return range; - } - - @SuppressWarnings("unchecked") - public static Object validateValueInRange(Object value,Object range,String propName) { - // verify all 3 are numeric and convert to Floats - if(!(value instanceof Integer || value instanceof Float)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE265", String.format( - "ValueError: validateInRange: \"%s\" is not a number",range.toString()))); + + private static final String RANGE_UNBOUNDED = "UNBOUNDED"; + + private ValidateUtils() { + } + + public static Object strToNum(Object value) { + // Convert a string representation of a number into a numeric type + // TODO(TBD) we should not allow numeric values in, input should be str + if (value instanceof Number) { + return value; + } + try { + return Integer.parseInt((String) value); + } catch (NumberFormatException e) { + } + try { + return Float.parseFloat((String) value); + } catch (Exception e) { + } + return null; + } + + public static Object validateNumeric(Object value) { + if (value != null) { + if (!(value instanceof Number)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE257", String.format( + "ValueError: \"%s\" is not a numeric", value.toString()))); + } + } + return value; + } + + public static Object validateInteger(Object value) { + if (value != null) { + if (!(value instanceof Integer)) { + // allow "true" and "false" + if (value instanceof Boolean) { + return (Boolean) value ? 1 : 0; + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE258", String.format( + "ValueError: \"%s\" is not an integer", value.toString()))); + } + } + return value; + } + + public static Object validateFloat(Object value) { + if (value != null) { + if (!(value instanceof Float || value instanceof Double)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE259", String.format( + "ValueError: \"%s\" is not a float", value.toString()))); + } + } + return value; + } + + public static Object validateString(Object value) { + if (value != null) { + if (!(value instanceof String)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE260", String.format( + "ValueError: \'%s\' is not a string", value.toString()))); + } + } + return value; + } + + public static Object validateList(Object value) { + if (value != null) { + if (!(value instanceof ArrayList)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE261", String.format( + "ValueError: \"%s\" is not a list", value.toString()))); + } + } + return value; + } + + + @SuppressWarnings("unchecked") + public static Object validateRange(Object range) { + // list class check + validateList(range); + // validate range list has a min and max + if (range instanceof ArrayList && ((ArrayList) range).size() != 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE262", String.format( + "ValueError: \"%s\" is not a valid range", range.toString()))); + // too dangerous to continue... + return range; + } + // validate min and max are numerics or the keyword UNBOUNDED + boolean minTest = false; + boolean maxTest = false; + Object r0 = ((ArrayList) range).get(0); + Object r1 = ((ArrayList) range).get(1); + + if (!(r0 instanceof Integer) && !(r0 instanceof Float) + || !(r1 instanceof Integer) && !(r1 instanceof Float)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE263", String.format( + "ValueError: \"%s\" is not a valid range", range.toString()))); + // too dangerous to continue... + return range; + } + + Float min = 0.0F; + Float max = 0.0F; + if (r0 instanceof String && ((String) r0).equals(RANGE_UNBOUNDED)) { + minTest = true; + } else { + min = r0 instanceof Integer ? ((Integer) r0).floatValue() : (Float) r0; + } + if (r1 instanceof String && ((String) r1).equals(RANGE_UNBOUNDED)) { + maxTest = true; + } else { + max = r1 instanceof Integer ? ((Integer) r1).floatValue() : (Float) r1; + } + + // validate the max > min (account for UNBOUNDED) + if (!minTest && !maxTest) { + // Note: min == max is allowed + if (min > max) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE264", String.format( + "ValueError:\"%s\" is not a valid range", range.toString()))); + } + } + return range; + } + + @SuppressWarnings("unchecked") + public static Object validateValueInRange(Object value, Object range, String propName) { + // verify all 3 are numeric and convert to Floats + if (!(value instanceof Integer || value instanceof Float)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE265", String.format( + "ValueError: validateInRange: \"%s\" is not a number", range.toString()))); return value; - } - Float fval = value instanceof Integer ? ((Integer)value).floatValue() : (Float)value; - - ////////////////////////// - //"validateRange(range);" - ////////////////////////// - // better safe than sorry... - // validate that range list has a min and max - if(range instanceof ArrayList && ((ArrayList)range).size() != 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE266", String.format( - "ValueError: \"%s\" is not a valid range",range.toString()))); - // too dangerous to continue... - return value; - } - // validate min and max are numerics or the keyword UNBOUNDED - boolean minTest = false; - boolean maxTest = false; - Object r0 = ((ArrayList)range).get(0); - Object r1 = ((ArrayList)range).get(1); - - if(!(r0 instanceof Integer) && !(r0 instanceof Float) || - !(r1 instanceof Integer) && !(r1 instanceof Float)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE267", String.format( - "ValueError: \"%s\" is not a valid range",range.toString()))); - // too dangerous to continue... - return value; - } - - Float min = 0.0F; - Float max = 0.0F; - if(r0 instanceof String && ((String)r0).equals(RANGE_UNBOUNDED)) { - minTest = true; - } - else { - min = r0 instanceof Integer ? ((Integer)r0).floatValue() : (Float)r0; - } - if(r1 instanceof String && ((String)r1).equals(RANGE_UNBOUNDED)) { - maxTest = true; - } - else { - max = r1 instanceof Integer ? ((Integer)r1).floatValue() : (Float)r1; - } - - // validate the max > min (account for UNBOUNDED) - if(!minTest && !maxTest) { - // Note: min == max is allowed - if(min > max) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE268", String.format( - "ValueError:\"%s\" is not a valid range",range.toString()))); - } - } - // finally... - boolean bError = false; - //Note: value is valid if equal to min - if(!minTest) { - if(fval < min) { - bError = true; - } - } - // Note: value is valid if equal to max - if(!maxTest) { - if(fval > max) { - bError = true; - } - } - if(bError) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE269", String.format( - "RangeValueError: Property \"%s\", \"%s\" not in range [\"%s\" - \"%s\"", - propName,value.toString(),r0.toString(),r1.toString()))); - } - return value; - } - - public static Object validateMap(Object ob) { - if(ob != null) { - if (!(ob instanceof LinkedHashMap)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE270", String.format( - "ValueError\"%s\" is not a map.",ob.toString()))); - } - } - return ob; - } - - public static Object validateBoolean(Object value) { - if(value != null) { - if (value instanceof Boolean) { - return value; - } - if (value instanceof String) { - String normalized = ((String) value).toLowerCase(); - if (normalized.equals("true") || normalized.equals("false")) { - return normalized.equals("true"); - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE271", String.format( - "ValueError: \"%s\" is not a boolean",value.toString()))); - } - return value; - } - - public static Object validateTimestamp(Object value) { - /* + } + Float fval = value instanceof Integer ? ((Integer) value).floatValue() : (Float) value; + + ////////////////////////// + //"validateRange(range);" + ////////////////////////// + // better safe than sorry... + // validate that range list has a min and max + if (range instanceof ArrayList && ((ArrayList) range).size() != 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE266", String.format( + "ValueError: \"%s\" is not a valid range", range.toString()))); + // too dangerous to continue... + return value; + } + // validate min and max are numerics or the keyword UNBOUNDED + boolean minTest = false; + boolean maxTest = false; + Object r0 = ((ArrayList) range).get(0); + Object r1 = ((ArrayList) range).get(1); + + if (!(r0 instanceof Integer) && !(r0 instanceof Float) + || !(r1 instanceof Integer) && !(r1 instanceof Float)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE267", String.format( + "ValueError: \"%s\" is not a valid range", range.toString()))); + // too dangerous to continue... + return value; + } + + Float min = 0.0F; + Float max = 0.0F; + if (r0 instanceof String && ((String) r0).equals(RANGE_UNBOUNDED)) { + minTest = true; + } else { + min = r0 instanceof Integer ? ((Integer) r0).floatValue() : (Float) r0; + } + if (r1 instanceof String && ((String) r1).equals(RANGE_UNBOUNDED)) { + maxTest = true; + } else { + max = r1 instanceof Integer ? ((Integer) r1).floatValue() : (Float) r1; + } + + // validate the max > min (account for UNBOUNDED) + if (!minTest && !maxTest) { + // Note: min == max is allowed + if (min > max) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE268", String.format( + "ValueError:\"%s\" is not a valid range", range.toString()))); + } + } + // finally... + boolean bError = false; + //Note: value is valid if equal to min + if (!minTest) { + if (fval < min) { + bError = true; + } + } + // Note: value is valid if equal to max + if (!maxTest) { + if (fval > max) { + bError = true; + } + } + if (bError) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE269", String.format( + "RangeValueError: Property \"%s\", \"%s\" not in range [\"%s\" - \"%s\"", + propName, value.toString(), r0.toString(), r1.toString()))); + } + return value; + } + + public static Object validateMap(Object ob) { + if (ob != null) { + if (!(ob instanceof LinkedHashMap)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE270", String.format( + "ValueError\"%s\" is not a map.", ob.toString()))); + } + } + return ob; + } + + public static Object validateBoolean(Object value) { + if (value != null) { + if (value instanceof Boolean) { + return value; + } + if (value instanceof String) { + String normalized = ((String) value).toLowerCase(); + if (normalized.equals("true") || normalized.equals("false")) { + return normalized.equals("true"); + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE271", String.format( + "ValueError: \"%s\" is not a boolean", value.toString()))); + } + return value; + } + + public static Object validateTimestamp(Object value) { + + /* try: # Note: we must return our own exception message # as dateutil's parser returns different types / values on @@ -280,19 +275,18 @@ public class ValidateUtils { ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') % {'val': value, 'msg': original_err_msg})) */ - - // timestamps are loaded as Date objects by the YAML parser - if(value != null) { - if (!(value instanceof Date)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE272", String.format( - "ValueError: \"%s\" is not a valid timestamp", - value.toString()))); - - } - } - return value; - } - + // timestamps are loaded as Date objects by the YAML parser + if (value != null) { + if (!(value instanceof Date)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE272", String.format( + "ValueError: \"%s\" is not a valid timestamp", + value.toString()))); + + } + } + return value; + } + } /*python diff --git a/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java b/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java index 3902219..140a6e9 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -29,6 +29,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Scanner; + //Generate excel file, include all validation issues errors in jtosca //the error java code, the line number and file name for each error. public class GetValidationIssues { @@ -37,13 +38,13 @@ public class GetValidationIssues { public static List data = new ArrayList<>(); public static void main(String[] args) { - System.out.println("GetAllValidationIssues - path to project files Directory is " + Arrays.toString(args)); - File jtoscaFiles = new File(args[0]+ "\\jtosca\\src\\main\\java\\org\\onap\\sdc\\toscaparser\\api"); + System.out.println("GetAllValidationIssues - path to project files Directory is " + Arrays.toString(args)); + File jtoscaFiles = new File(args[0] + "\\jtosca\\src\\main\\java\\org\\onap\\sdc\\toscaparser\\api"); try { printFiles(jtoscaFiles); - fileWriter = new CSVWriter(new FileWriter(args[1]+"\\JToscaValidationIssues_"+System.currentTimeMillis()+".csv"), '\t'); - fileWriter.writeNext(new String[] {"Error Message", "Class Name", "Line No."}, false); + fileWriter = new CSVWriter(new FileWriter(args[1] + "\\JToscaValidationIssues_" + System.currentTimeMillis() + ".csv"), '\t'); + fileWriter.writeNext(new String[]{"Error Message", "Class Name", "Line No."}, false); fileWriter.writeAll(data, false); } catch (IOException e) { e.printStackTrace(); diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java index 13e17ce..5876ac7 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java @@ -5,9 +5,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -39,81 +39,81 @@ import static org.junit.Assert.*; public class JToscaImportTest { - @Test - public void testNoMissingTypeValidationError() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/sdc-onboarding_csar.csar") - .getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List missingTypeErrors = ThreadLocalsHolder.getCollector().getValidationIssueReport().stream() - .filter(s -> s.contains("JE136")).collect(Collectors.toList()); - assertEquals(0, missingTypeErrors.size()); - } - - @Test - public void testNoStackOverFlowError() { - Exception jte = null; - try { - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/sdc-onboarding_csar.csar") - .getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - } catch (Exception e) { - jte = e; - } - assertEquals(null, jte); - } - - @Test - public void testNoInvalidImports() throws JToscaException { - List fileNames = new ArrayList<>(); - fileNames.add("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); - fileNames.add("csars/sdc-onboarding_csar.csar"); - fileNames.add("csars/resource-Spgw-csar-ZTE.csar"); - - for (String fileName : fileNames) { - String fileStr = JToscaImportTest.class.getClassLoader().getResource(fileName).getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List invalidImportErrors = ThreadLocalsHolder.getCollector().getValidationIssueReport().stream() - .filter(s -> s.contains("JE195")).collect(Collectors.toList()); - assertEquals(0, invalidImportErrors.size()); - } - } - - @Test - public void testParseAnnotations() throws JToscaException { - - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - - List inputs = toscaTemplate.getInputs(); - assertNotNull(inputs); - assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); - - inputs.forEach(Input::parseAnnotations); - assertTrue(!inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); - } - - @Test - public void testGetInputsWithAndWithoutAnnotations() throws JToscaException { - - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List inputs = toscaTemplate.getInputs(); - assertNotNull(inputs); - assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); - - inputs = toscaTemplate.getInputs(true); - assertNotNull(inputs); - validateInputsAnnotations(inputs); - - inputs = toscaTemplate.getInputs(false); - assertNotNull(inputs); - assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); - } + @Test + public void testNoMissingTypeValidationError() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/sdc-onboarding_csar.csar") + .getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List missingTypeErrors = ThreadLocalsHolder.getCollector().getValidationIssueReport().stream() + .filter(s -> s.contains("JE136")).collect(Collectors.toList()); + assertEquals(0, missingTypeErrors.size()); + } + + @Test + public void testNoStackOverFlowError() { + Exception jte = null; + try { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/sdc-onboarding_csar.csar") + .getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + } catch (Exception e) { + jte = e; + } + assertEquals(null, jte); + } + + @Test + public void testNoInvalidImports() throws JToscaException { + List fileNames = new ArrayList<>(); + fileNames.add("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); + fileNames.add("csars/sdc-onboarding_csar.csar"); + fileNames.add("csars/resource-Spgw-csar-ZTE.csar"); + + for (String fileName : fileNames) { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(fileName).getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List invalidImportErrors = ThreadLocalsHolder.getCollector().getValidationIssueReport().stream() + .filter(s -> s.contains("JE195")).collect(Collectors.toList()); + assertEquals(0, invalidImportErrors.size()); + } + } + + @Test + public void testParseAnnotations() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + + List inputs = toscaTemplate.getInputs(); + assertNotNull(inputs); + assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + + inputs.forEach(Input::parseAnnotations); + assertTrue(!inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + } + + @Test + public void testGetInputsWithAndWithoutAnnotations() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List inputs = toscaTemplate.getInputs(); + assertNotNull(inputs); + assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + + inputs = toscaTemplate.getInputs(true); + assertNotNull(inputs); + validateInputsAnnotations(inputs); + + inputs = toscaTemplate.getInputs(false); + assertNotNull(inputs); + assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + } @Test public void testGetPropertyNameTest() throws JToscaException { @@ -123,7 +123,7 @@ public class JToscaImportTest { ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(0); - ArrayList valueList = (ArrayList)nodeTemplate.getPropertyValueFromTemplatesByName("vmxvpfe_sriov41_0_port_vlanfilter"); + ArrayList valueList = (ArrayList) nodeTemplate.getPropertyValueFromTemplatesByName("vmxvpfe_sriov41_0_port_vlanfilter"); assertEquals(4, valueList.size()); assertEquals("vPE", (String) nodeTemplate.getPropertyValueFromTemplatesByName("nf_role")); @@ -131,24 +131,24 @@ public class JToscaImportTest { assertNull(nodeTemplate.getPropertyValueFromTemplatesByName("test")); } - @Test - public void testGetParentNodeTemplateTest() throws JToscaException { - - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(0); - //parent of this VF is service (null) - assertNull(nodeTemplate.getParentNodeTemplate()); - List children = nodeTemplate.getSubMappingToscaTemplate().getNodeTemplates(); - assertFalse(children.isEmpty()); - NodeTemplate cVFC = children.get(4); - //parent is the VF above - assertEquals("2017-488_ADIOD-vPE 0", cVFC.getParentNodeTemplate().getName()); - List children1 = cVFC.getSubMappingToscaTemplate().getNodeTemplates(); - assertFalse(children1.isEmpty()); - //parent is the CVFC above - assertEquals(cVFC, children1.get(0).getParentNodeTemplate()); + @Test + public void testGetParentNodeTemplateTest() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(0); + //parent of this VF is service (null) + assertNull(nodeTemplate.getParentNodeTemplate()); + List children = nodeTemplate.getSubMappingToscaTemplate().getNodeTemplates(); + assertFalse(children.isEmpty()); + NodeTemplate cVFC = children.get(4); + //parent is the VF above + assertEquals("2017-488_ADIOD-vPE 0", cVFC.getParentNodeTemplate().getName()); + List children1 = cVFC.getSubMappingToscaTemplate().getNodeTemplates(); + assertFalse(children1.isEmpty()); + //parent is the CVFC above + assertEquals(cVFC, children1.get(0).getParentNodeTemplate()); /* @@ -161,149 +161,149 @@ public class JToscaImportTest { policies = tt.getPolicies(); */ - } - - @Test - public void testNullValueHasNoNullPointerException() throws JToscaException { - - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-JennyVtsbcKarunaSvc-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List inputs = toscaTemplate.getInputs(); - assertNotNull(inputs); - } - - @Test - public void testGetPolicyMetadata() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - ArrayList policies = toscaTemplate.getPolicies(); - assertNotNull(policies); - assertEquals(1, policies.size()); - assertEquals("org.openecomp.policies.External", policies.get(0).getType()); - assertEquals("adf03496-bf87-43cf-b20a-450e47cb44bd", policies.get(0).getMetaData().getOrDefault("UUID", "").toString()); - assertTrue(policies.get(0).getMetaData().getOrDefault("UUID_test", "").toString().isEmpty()); - } - - @Test - public void testGetPolicyMetadataObj() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - ArrayList policies = toscaTemplate.getPolicies(); - assertNotNull(policies); - assertEquals(1, policies.size()); - assertEquals("adf03496-bf87-43cf-b20a-450e47cb44bd", policies.get(0).getMetaDataObj().getAllProperties().getOrDefault("UUID", "").toString()); - assertTrue(policies.get(0).getMetaDataObj().getAllProperties().getOrDefault("name_test", "").toString().isEmpty()); - } + } + + @Test + public void testNullValueHasNoNullPointerException() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-JennyVtsbcKarunaSvc-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List inputs = toscaTemplate.getInputs(); + assertNotNull(inputs); + } + + @Test + public void testGetPolicyMetadata() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + ArrayList policies = toscaTemplate.getPolicies(); + assertNotNull(policies); + assertEquals(1, policies.size()); + assertEquals("org.openecomp.policies.External", policies.get(0).getType()); + assertEquals("adf03496-bf87-43cf-b20a-450e47cb44bd", policies.get(0).getMetaData().getOrDefault("UUID", "").toString()); + assertTrue(policies.get(0).getMetaData().getOrDefault("UUID_test", "").toString().isEmpty()); + } + + @Test + public void testGetPolicyMetadataObj() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + ArrayList policies = toscaTemplate.getPolicies(); + assertNotNull(policies); + assertEquals(1, policies.size()); + assertEquals("adf03496-bf87-43cf-b20a-450e47cb44bd", policies.get(0).getMetaDataObj().getAllProperties().getOrDefault("UUID", "").toString()); + assertTrue(policies.get(0).getMetaDataObj().getAllProperties().getOrDefault("name_test", "").toString().isEmpty()); + } private void validateInputsAnnotations(List inputs) { - List inputsWithAnnotations = inputs.stream().filter(i -> i.getAnnotations() != null) - .collect(Collectors.toList()); - assertTrue(!inputs.isEmpty()); - inputsWithAnnotations.stream().forEach(i -> validateAnnotations(i)); - } - - private void validateAnnotations(Input input) { - assertNotNull(input.getAnnotations()); - assertEquals(input.getAnnotations().size(), 1); - Annotation annotation = input.getAnnotations().get("source"); - assertEquals(annotation.getName(), "source"); - assertEquals(annotation.getType().toLowerCase(), "org.openecomp.annotations.source"); - assertNotNull(annotation.getProperties()); - Optional source_type = annotation.getProperties().stream() - .filter(p -> p.getName().equals("source_type")).findFirst(); - assertTrue(source_type.isPresent()); - assertEquals(source_type.get().getValue(), "HEAT"); - } - - private static final String TEST_DATATYPE_FILENAME ="csars/dataTypes-test-service.csar"; - private static final String TEST_DATATYPE_TEST1 = "TestType1"; - private static final String TEST_DATATYPE_TEST2 = "TestType2"; - private static final String TEST_DATATYPE_PROPERTY_STR = "strdata"; - private static final String TEST_DATATYPE_PROPERTY_INT = "intdata"; - private static final String TEST_DATATYPE_PROPERTY_LIST = "listdata"; - private static final String TEST_DATATYPE_PROPERTY_TYPE = "type"; - private static final String TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA = "entry_schema"; - private static final String TEST_DATATYPE_TOSTRING = "data_types="; - - @Test - public void testGetDataType() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_DATATYPE_FILENAME).getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - HashSet dataTypes = toscaTemplate.getDataTypes(); - assertThat(dataTypes,notNullValue()); - assertThat(dataTypes.size(),is(2)); - - for(DataType dataType: dataTypes){ - LinkedHashMap properties; - PropertyDef property; - if(dataType.getType().equals(TEST_DATATYPE_TEST1)){ - properties = dataType.getAllProperties(); - property = properties.get(TEST_DATATYPE_PROPERTY_STR); - assertThat(property,notNullValue()); - assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_STR)); - assertThat( property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.STRING)); - } - if(dataType.getType().equals(TEST_DATATYPE_TEST2)){ - properties = dataType.getAllProperties(); - property = properties.get(TEST_DATATYPE_PROPERTY_INT); - assertThat(property,notNullValue()); - assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_INT)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.INTEGER)); - - property = properties.get(TEST_DATATYPE_PROPERTY_LIST); - assertThat(property,notNullValue()); - assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_LIST)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.LIST)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA),is(TEST_DATATYPE_TEST1)); - - assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST1),notNullValue()); - assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST2),notNullValue()); - assertThat(toscaTemplate.toString(),containsString(TEST_DATATYPE_TOSTRING)); - } - } - - } - - @Test - public void testGetInputValidate() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_DATATYPE_FILENAME).getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - HashSet dataTypes = toscaTemplate.getDataTypes(); - assertThat(dataTypes,notNullValue()); - assertThat(dataTypes.size(),is(2)); - - for(DataType dataType: dataTypes) { - LinkedHashMap properties; - PropertyDef property; - if(dataType.getType().equals(TEST_DATATYPE_TEST1)) { - properties = dataType.getAllProperties(); - property = properties.get(TEST_DATATYPE_PROPERTY_STR); - assertThat(property,notNullValue()); - assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_STR)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.STRING)); - } - if(dataType.getType().equals(TEST_DATATYPE_TEST2)) { - properties = dataType.getAllProperties(); - property = properties.get(TEST_DATATYPE_PROPERTY_INT); - assertThat(property,notNullValue()); - assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_INT)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.INTEGER)); - - property = properties.get(TEST_DATATYPE_PROPERTY_LIST); - assertThat(property,notNullValue()); - assertThat(property.getName(),is(TEST_DATATYPE_PROPERTY_LIST)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE),is(Schema.LIST)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA),is(TEST_DATATYPE_TEST1)); - - assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST1),notNullValue()); - assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST2),notNullValue()); - assertThat(toscaTemplate.toString(),containsString(TEST_DATATYPE_TOSTRING)); - } - } - } + List inputsWithAnnotations = inputs.stream().filter(i -> i.getAnnotations() != null) + .collect(Collectors.toList()); + assertTrue(!inputs.isEmpty()); + inputsWithAnnotations.stream().forEach(i -> validateAnnotations(i)); + } + + private void validateAnnotations(Input input) { + assertNotNull(input.getAnnotations()); + assertEquals(input.getAnnotations().size(), 1); + Annotation annotation = input.getAnnotations().get("source"); + assertEquals(annotation.getName(), "source"); + assertEquals(annotation.getType().toLowerCase(), "org.openecomp.annotations.source"); + assertNotNull(annotation.getProperties()); + Optional source_type = annotation.getProperties().stream() + .filter(p -> p.getName().equals("source_type")).findFirst(); + assertTrue(source_type.isPresent()); + assertEquals(source_type.get().getValue(), "HEAT"); + } + + private static final String TEST_DATATYPE_FILENAME = "csars/dataTypes-test-service.csar"; + private static final String TEST_DATATYPE_TEST1 = "TestType1"; + private static final String TEST_DATATYPE_TEST2 = "TestType2"; + private static final String TEST_DATATYPE_PROPERTY_STR = "strdata"; + private static final String TEST_DATATYPE_PROPERTY_INT = "intdata"; + private static final String TEST_DATATYPE_PROPERTY_LIST = "listdata"; + private static final String TEST_DATATYPE_PROPERTY_TYPE = "type"; + private static final String TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA = "entry_schema"; + private static final String TEST_DATATYPE_TOSTRING = "data_types="; + + @Test + public void testGetDataType() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_DATATYPE_FILENAME).getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + HashSet dataTypes = toscaTemplate.getDataTypes(); + assertThat(dataTypes, notNullValue()); + assertThat(dataTypes.size(), is(2)); + + for (DataType dataType : dataTypes) { + LinkedHashMap properties; + PropertyDef property; + if (dataType.getType().equals(TEST_DATATYPE_TEST1)) { + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_STR); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_STR)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.STRING)); + } + if (dataType.getType().equals(TEST_DATATYPE_TEST2)) { + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_INT); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_INT)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.INTEGER)); + + property = properties.get(TEST_DATATYPE_PROPERTY_LIST); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA), is(TEST_DATATYPE_TEST1)); + + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST1), notNullValue()); + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST2), notNullValue()); + assertThat(toscaTemplate.toString(), containsString(TEST_DATATYPE_TOSTRING)); + } + } + + } + + @Test + public void testGetInputValidate() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_DATATYPE_FILENAME).getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + HashSet dataTypes = toscaTemplate.getDataTypes(); + assertThat(dataTypes, notNullValue()); + assertThat(dataTypes.size(), is(2)); + + for (DataType dataType : dataTypes) { + LinkedHashMap properties; + PropertyDef property; + if (dataType.getType().equals(TEST_DATATYPE_TEST1)) { + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_STR); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_STR)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.STRING)); + } + if (dataType.getType().equals(TEST_DATATYPE_TEST2)) { + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_INT); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_INT)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.INTEGER)); + + property = properties.get(TEST_DATATYPE_PROPERTY_LIST); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA), is(TEST_DATATYPE_TEST1)); + + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST1), notNullValue()); + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST2), notNullValue()); + assertThat(toscaTemplate.toString(), containsString(TEST_DATATYPE_TOSTRING)); + } + } + } } diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java index f8295d7..3f5290d 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -52,30 +52,30 @@ public class JToscaMetadataParse { File file = new File(fileStr); ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); - assertTrue(validationIssuesCaught == 0 ); + assertTrue(validationIssuesCaught == 0); } - + @Test public void testEmptyCsar() throws JToscaException { String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/emptyCsar.csar").getFile(); File file = new File(fileStr); try { - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); } catch (JToscaException e) { - assertTrue(e.getCode().equals(JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue())); - } + assertTrue(e.getCode().equals(JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue())); + } int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); - assertTrue(validationIssuesCaught == 0 ); + assertTrue(validationIssuesCaught == 0); } - + @Test public void testEmptyPath() throws JToscaException { String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("").getFile(); File file = new File(fileStr); try { - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - }catch (JToscaException e) { - assertTrue(e.getCode().equals(JToscaErrorCodes.PATH_NOT_VALID.getValue())); - } + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + } catch (JToscaException e) { + assertTrue(e.getCode().equals(JToscaErrorCodes.PATH_NOT_VALID.getValue())); + } } } diff --git a/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java b/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java index eaf182e..fd84d6e 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. diff --git a/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java b/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java index 271eb59..d65de28 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java @@ -7,9 +7,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -31,45 +31,45 @@ import static org.junit.Assert.assertEquals; public class EntityTypeTest { - private static final Map origMap = EntityType.TOSCA_DEF; + private static final Map origMap = EntityType.TOSCA_DEF; - @Test - public void testUpdateDefinitions() throws Exception { + @Test + public void testUpdateDefinitions() throws Exception { - Map testData = new HashMap<>(); - testData.put("tosca.nodes.nfv.VNF", "{derived_from=tosca.nodes.Root, properties={id={type=string, description=ID of this VNF}, vendor={type=string, description=name of the vendor who generate this VNF}, version={type=version, description=version of the software for this VNF}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}]}"); - testData.put("tosca.nodes.nfv.VDU", "{derived_from=tosca.nodes.Compute, capabilities={high_availability={type=tosca.capabilities.nfv.HA}, virtualbinding={type=tosca.capabilities.nfv.VirtualBindable}, monitoring_parameter={type=tosca.capabilities.nfv.Metric}}, requirements=[{high_availability={capability=tosca.capabilities.nfv.HA, relationship=tosca.relationships.nfv.HA, node=tosca.nodes.nfv.VDU, occurrences=[0, 1]}}]}"); - testData.put("tosca.nodes.nfv.CP", "{derived_from=tosca.nodes.network.Port, properties={type={type=string, required=false}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}, {virtualBinding={capability=tosca.capabilities.nfv.VirtualBindable, relationship=tosca.relationships.nfv.VirtualBindsTo, node=tosca.nodes.nfv.VDU}}], attributes={address={type=string}}}"); - testData.put("tosca.nodes.nfv.VL", "{derived_from=tosca.nodes.network.Network, properties={vendor={type=string, required=true, description=name of the vendor who generate this VL}}, capabilities={virtual_linkable={type=tosca.capabilities.nfv.VirtualLinkable}}}"); - testData.put("tosca.nodes.nfv.VL.ELine", "{derived_from=tosca.nodes.nfv.VL, capabilities={virtual_linkable={occurrences=2}}}"); - testData.put("tosca.nodes.nfv.VL.ELAN", "{derived_from=tosca.nodes.nfv.VL}"); - testData.put("tosca.nodes.nfv.VL.ETree", "{derived_from=tosca.nodes.nfv.VL}"); - testData.put("tosca.nodes.nfv.FP", "{derived_from=tosca.nodes.Root, properties={policy={type=string, required=false, description=name of the vendor who generate this VL}}, requirements=[{forwarder={capability=tosca.capabilities.nfv.Forwarder, relationship=tosca.relationships.nfv.ForwardsTo}}]}"); - testData.put("tosca.groups.nfv.VNFFG", "{derived_from=tosca.groups.Root, properties={vendor={type=string, required=true, description=name of the vendor who generate this VNFFG}, version={type=string, required=true, description=version of this VNFFG}, number_of_endpoints={type=integer, required=true, description=count of the external endpoints included in this VNFFG}, dependent_virtual_link={type=list, entry_schema={type=string}, required=true, description=Reference to a VLD used in this Forwarding Graph}, connection_point={type=list, entry_schema={type=string}, required=true, description=Reference to Connection Points forming the VNFFG}, constituent_vnfs={type=list, entry_schema={type=string}, required=true, description=Reference to a list of VNFD used in this VNF Forwarding Graph}}}"); - testData.put("tosca.relationships.nfv.VirtualLinksTo", "{derived_from=tosca.relationships.network.LinksTo, valid_target_types=[tosca.capabilities.nfv.VirtualLinkable]}"); - testData.put("tosca.relationships.nfv.VirtualBindsTo", "{derived_from=tosca.relationships.network.BindsTo, valid_target_types=[tosca.capabilities.nfv.VirtualBindable]}"); - testData.put("tosca.relationships.nfv.HA", "{derived_from=tosca.relationships.Root, valid_target_types=[tosca.capabilities.nfv.HA]}"); - testData.put("tosca.relationships.nfv.Monitor", "{derived_from=tosca.relationships.ConnectsTo, valid_target_types=[tosca.capabilities.nfv.Metric]}"); - testData.put("tosca.relationships.nfv.ForwardsTo", "{derived_from=tosca.relationships.root, valid_target_types=[tosca.capabilities.nfv.Forwarder]}"); - testData.put("tosca.capabilities.nfv.VirtualLinkable", "{derived_from=tosca.capabilities.network.Linkable}"); - testData.put("tosca.capabilities.nfv.VirtualBindable", "{derived_from=tosca.capabilities.network.Bindable}"); - testData.put("tosca.capabilities.nfv.HA", "{derived_from=tosca.capabilities.Root, valid_source_types=[tosca.nodes.nfv.VDU]}"); - testData.put("tosca.capabilities.nfv.HA.ActiveActive", "{derived_from=tosca.capabilities.nfv.HA}"); - testData.put("tosca.capabilities.nfv.HA.ActivePassive", "{derived_from=tosca.capabilities.nfv.HA}"); - testData.put("tosca.capabilities.nfv.Metric", "{derived_from=tosca.capabilities.Root}"); - testData.put("tosca.capabilities.nfv.Forwarder", "{derived_from=tosca.capabilities.Root}"); + Map testData = new HashMap<>(); + testData.put("tosca.nodes.nfv.VNF", "{derived_from=tosca.nodes.Root, properties={id={type=string, description=ID of this VNF}, vendor={type=string, description=name of the vendor who generate this VNF}, version={type=version, description=version of the software for this VNF}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}]}"); + testData.put("tosca.nodes.nfv.VDU", "{derived_from=tosca.nodes.Compute, capabilities={high_availability={type=tosca.capabilities.nfv.HA}, virtualbinding={type=tosca.capabilities.nfv.VirtualBindable}, monitoring_parameter={type=tosca.capabilities.nfv.Metric}}, requirements=[{high_availability={capability=tosca.capabilities.nfv.HA, relationship=tosca.relationships.nfv.HA, node=tosca.nodes.nfv.VDU, occurrences=[0, 1]}}]}"); + testData.put("tosca.nodes.nfv.CP", "{derived_from=tosca.nodes.network.Port, properties={type={type=string, required=false}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}, {virtualBinding={capability=tosca.capabilities.nfv.VirtualBindable, relationship=tosca.relationships.nfv.VirtualBindsTo, node=tosca.nodes.nfv.VDU}}], attributes={address={type=string}}}"); + testData.put("tosca.nodes.nfv.VL", "{derived_from=tosca.nodes.network.Network, properties={vendor={type=string, required=true, description=name of the vendor who generate this VL}}, capabilities={virtual_linkable={type=tosca.capabilities.nfv.VirtualLinkable}}}"); + testData.put("tosca.nodes.nfv.VL.ELine", "{derived_from=tosca.nodes.nfv.VL, capabilities={virtual_linkable={occurrences=2}}}"); + testData.put("tosca.nodes.nfv.VL.ELAN", "{derived_from=tosca.nodes.nfv.VL}"); + testData.put("tosca.nodes.nfv.VL.ETree", "{derived_from=tosca.nodes.nfv.VL}"); + testData.put("tosca.nodes.nfv.FP", "{derived_from=tosca.nodes.Root, properties={policy={type=string, required=false, description=name of the vendor who generate this VL}}, requirements=[{forwarder={capability=tosca.capabilities.nfv.Forwarder, relationship=tosca.relationships.nfv.ForwardsTo}}]}"); + testData.put("tosca.groups.nfv.VNFFG", "{derived_from=tosca.groups.Root, properties={vendor={type=string, required=true, description=name of the vendor who generate this VNFFG}, version={type=string, required=true, description=version of this VNFFG}, number_of_endpoints={type=integer, required=true, description=count of the external endpoints included in this VNFFG}, dependent_virtual_link={type=list, entry_schema={type=string}, required=true, description=Reference to a VLD used in this Forwarding Graph}, connection_point={type=list, entry_schema={type=string}, required=true, description=Reference to Connection Points forming the VNFFG}, constituent_vnfs={type=list, entry_schema={type=string}, required=true, description=Reference to a list of VNFD used in this VNF Forwarding Graph}}}"); + testData.put("tosca.relationships.nfv.VirtualLinksTo", "{derived_from=tosca.relationships.network.LinksTo, valid_target_types=[tosca.capabilities.nfv.VirtualLinkable]}"); + testData.put("tosca.relationships.nfv.VirtualBindsTo", "{derived_from=tosca.relationships.network.BindsTo, valid_target_types=[tosca.capabilities.nfv.VirtualBindable]}"); + testData.put("tosca.relationships.nfv.HA", "{derived_from=tosca.relationships.Root, valid_target_types=[tosca.capabilities.nfv.HA]}"); + testData.put("tosca.relationships.nfv.Monitor", "{derived_from=tosca.relationships.ConnectsTo, valid_target_types=[tosca.capabilities.nfv.Metric]}"); + testData.put("tosca.relationships.nfv.ForwardsTo", "{derived_from=tosca.relationships.root, valid_target_types=[tosca.capabilities.nfv.Forwarder]}"); + testData.put("tosca.capabilities.nfv.VirtualLinkable", "{derived_from=tosca.capabilities.network.Linkable}"); + testData.put("tosca.capabilities.nfv.VirtualBindable", "{derived_from=tosca.capabilities.network.Bindable}"); + testData.put("tosca.capabilities.nfv.HA", "{derived_from=tosca.capabilities.Root, valid_source_types=[tosca.nodes.nfv.VDU]}"); + testData.put("tosca.capabilities.nfv.HA.ActiveActive", "{derived_from=tosca.capabilities.nfv.HA}"); + testData.put("tosca.capabilities.nfv.HA.ActivePassive", "{derived_from=tosca.capabilities.nfv.HA}"); + testData.put("tosca.capabilities.nfv.Metric", "{derived_from=tosca.capabilities.Root}"); + testData.put("tosca.capabilities.nfv.Forwarder", "{derived_from=tosca.capabilities.Root}"); - Map expectedDefMap = origMap; - expectedDefMap.putAll(testData); - EntityType.updateDefinitions("tosca_simple_profile_for_nfv_1_0_0"); + Map expectedDefMap = origMap; + expectedDefMap.putAll(testData); + EntityType.updateDefinitions("tosca_simple_profile_for_nfv_1_0_0"); - assertEquals(expectedDefMap, EntityType.TOSCA_DEF); + assertEquals(expectedDefMap, EntityType.TOSCA_DEF); - } + } - @After - public void tearDown() throws Exception { - EntityType.TOSCA_DEF = (LinkedHashMap) origMap; - } + @After + public void tearDown() throws Exception { + EntityType.TOSCA_DEF = (LinkedHashMap) origMap; + } } diff --git a/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java b/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java index 577fb17..98e5102 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java @@ -5,9 +5,9 @@ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * + * * http://www.apache.org/licenses/LICENSE-2.0 - * + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -41,10 +41,10 @@ public class GetInputTest { private static final String TEST_PROPERTY_LONGITUDE = "longitude"; private static final String TEST_DEFAULT_VALUE = "dsvpn-hub"; private static final String TEST_DESCRIPTION_VALUE = "This is used for SDWAN only"; - private static final String TEST_INPUT_TYPE="type"; - private static final String TEST_INPUT_SCHEMA_TYPE="tosca.datatypes.siteresource.site"; + private static final String TEST_INPUT_TYPE = "type"; + private static final String TEST_INPUT_SCHEMA_TYPE = "tosca.datatypes.siteresource.site"; private static final String TEST_TOSTRING = "get_input:[sites, 1, longitude]"; - private static final String TEST_INPUT_SITES= "sites"; + private static final String TEST_INPUT_SITES = "sites"; @Test public void validate() throws JToscaException { @@ -54,33 +54,33 @@ public class GetInputTest { NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(1).getSubMappingToscaTemplate().getNodeTemplates().get(0); ArrayList inputs = toscaTemplate.getNodeTemplates().get(1).getSubMappingToscaTemplate().getInputs(); LinkedHashMap properties = nodeTemplate.getProperties(); - assertThat(properties,notNullValue()); - assertThat(properties.size(),is(14)); + assertThat(properties, notNullValue()); + assertThat(properties.size(), is(14)); Property property = properties.get(TEST_PROPERTY_ROLE); - assertThat(properties,notNullValue()); - assertThat(property.getName(),is(TEST_PROPERTY_ROLE)); - assertThat(property.getType(),is(Schema.STRING)); - assertThat(property.getDefault(),is(TEST_DEFAULT_VALUE)); - assertThat(property.getDescription(),is(TEST_DESCRIPTION_VALUE)); - GetInput getInput= (GetInput)property.getValue(); - assertThat(getInput.getEntrySchema().get(TEST_INPUT_TYPE).toString(),is(TEST_INPUT_SCHEMA_TYPE)); + assertThat(properties, notNullValue()); + assertThat(property.getName(), is(TEST_PROPERTY_ROLE)); + assertThat(property.getType(), is(Schema.STRING)); + assertThat(property.getDefault(), is(TEST_DEFAULT_VALUE)); + assertThat(property.getDescription(), is(TEST_DESCRIPTION_VALUE)); + GetInput getInput = (GetInput) property.getValue(); + assertThat(getInput.getEntrySchema().get(TEST_INPUT_TYPE).toString(), is(TEST_INPUT_SCHEMA_TYPE)); property = properties.get(TEST_PROPERTY_LONGITUDE); - assertThat(properties,notNullValue()); + assertThat(properties, notNullValue()); assertThat(property.getName(), is(TEST_PROPERTY_LONGITUDE)); - assertThat(property.getValue().toString(),is(TEST_TOSTRING)); - getInput= (GetInput)property.getValue(); + assertThat(property.getValue().toString(), is(TEST_TOSTRING)); + getInput = (GetInput) property.getValue(); ArrayList getInputArguments = getInput.getArguments(); - assertThat(getInputArguments.size(),is(3)); + assertThat(getInputArguments.size(), is(3)); assertThat(getInputArguments.get(0).toString(), is(TEST_INPUT_SITES)); assertThat(getInputArguments.get(1).toString(), is("1")); assertThat(getInputArguments.get(2).toString(), is(TEST_PROPERTY_LONGITUDE)); Input in = inputs.get(10); assertThat(in.getEntrySchema().get(TEST_INPUT_TYPE), is(TEST_INPUT_SCHEMA_TYPE)); - assertThat(in.getName(),is(TEST_INPUT_SITES)); - assertThat(in.getType(),is(Input.LIST)); + assertThat(in.getName(), is(TEST_INPUT_SITES)); + assertThat(in.getType(), is(Input.LIST)); } @Test @@ -88,9 +88,9 @@ public class GetInputTest { //invalid file String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_FILENAME_NG).getFile(); File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null,false); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null, false); List issues = ThreadLocalsHolder.getCollector().getValidationIssueReport(); assertTrue(issues.stream().anyMatch(x -> x.contains("JE282"))); } - } +} -- cgit 1.2.3-korg From 527fe8bdac1f1c7c59738c598996dc8c842a22e3 Mon Sep 17 00:00:00 2001 From: "andre.schmid" Date: Wed, 31 Jul 2019 13:40:17 +0000 Subject: Validation to ThreadsLocalHolder instead stdout Fix one validation error that was going to stdout instead of normal handling through ThreadsLocalHolder singleton. Issue-ID: SDC-2344 Change-Id: I34827aa1314f2b65eff7b92fce7890eeddbca0e6 Signed-off-by: andre.schmid --- pom.xml | 13 +++++ .../onap/sdc/toscaparser/api/TopologyTemplate.java | 5 +- .../api/common/JToscaValidationIssue.java | 20 +++++++ .../sdc/toscaparser/api/JToscaMetadataParse.java | 58 ++++++++++++++++++--- .../csars/tmpCSAR_Huawei_vSPGW_fixed.csar | Bin 43627 -> 45116 bytes ...pCSAR_Huawei_vSPGW_without_required_inputs.csar | Bin 0 -> 43627 bytes 6 files changed, 89 insertions(+), 7 deletions(-) create mode 100644 src/test/resources/csars/tmpCSAR_Huawei_vSPGW_without_required_inputs.csar diff --git a/pom.xml b/pom.xml index c619031..8e02299 100644 --- a/pom.xml +++ b/pom.xml @@ -23,6 +23,7 @@ + 2.1 @@ -64,6 +65,18 @@ + + org.hamcrest + hamcrest + ${hamcrest.version} + test + + + org.hamcrest + hamcrest-library + ${hamcrest.version} + test + junit junit diff --git a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java index 2160527..efc6948 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java @@ -119,7 +119,10 @@ public class TopologyTemplate { } if ((parsedParams != null && parsedParams.get(input.getName()) == null || parsedParams == null) && input.isRequired() && input.getDefault() == null) { - System.out.format("Log warning: The required parameter \"%s\" is not provided\n", input.getName()); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", + String.format("MissingRequiredFieldError: The required input \"%s\" was not provided" + , input.getName())) + ); } alInputs.add(input); } diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java index 19c9583..cd5cbc5 100644 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java +++ b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java @@ -20,6 +20,8 @@ package org.onap.sdc.toscaparser.api.common; +import java.util.Objects; + public class JToscaValidationIssue { private String code; @@ -52,4 +54,22 @@ public class JToscaValidationIssue { public String toString() { return "JToscaError [code=" + code + ", message=" + message + "]"; } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final JToscaValidationIssue that = (JToscaValidationIssue) o; + return Objects.equals(code, that.code) && + Objects.equals(message, that.message); + } + + @Override + public int hashCode() { + return Objects.hash(code, message); + } } diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java index 3f5290d..2ec41b2 100644 --- a/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java +++ b/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java @@ -20,15 +20,23 @@ package org.onap.sdc.toscaparser.api; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.hasSize; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.File; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collection; import java.util.LinkedHashMap; +import java.util.Map; import org.junit.Test; import org.onap.sdc.toscaparser.api.common.JToscaException; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; import org.onap.sdc.toscaparser.api.utils.JToscaErrorCodes; import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; @@ -36,8 +44,7 @@ public class JToscaMetadataParse { @Test public void testMetadataParsedCorrectly() throws JToscaException { - String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/csar_hello_world.csar").getFile(); - File file = new File(fileStr); + final File file = loadCsar("csars/csar_hello_world.csar"); ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); LinkedHashMap metadataProperties = toscaTemplate.getMetaProperties("TOSCA.meta"); assertNotNull(metadataProperties); @@ -48,17 +55,49 @@ public class JToscaMetadataParse { @Test public void noWarningsAfterParse() throws JToscaException { - String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/tmpCSAR_Huawei_vSPGW_fixed.csar").getFile(); - File file = new File(fileStr); + final File file = loadCsar("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); assertTrue(validationIssuesCaught == 0); } + @Test + public void requiredInputErrorsAfterParse() throws JToscaException { + final File file = loadCsar("csars/tmpCSAR_Huawei_vSPGW_without_required_inputs.csar"); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + + final Map validationIssues = ThreadLocalsHolder.getCollector() + .getValidationIssues(); + final Collection actualValidationIssueList = validationIssues.values(); + + final Collection expectedValidationIssueList = new ArrayList<>(); + final String errorCode = "JE003"; + final String errorFormat = "MissingRequiredFieldError: The required input \"%s\" was not provided"; + expectedValidationIssueList.add(new JToscaValidationIssue(errorCode + , String.format(errorFormat, "nf_naming_code"))); + expectedValidationIssueList.add(new JToscaValidationIssue(errorCode + , String.format(errorFormat, "nf_type"))); + expectedValidationIssueList.add(new JToscaValidationIssue(errorCode + , String.format(errorFormat, "nf_role"))); + expectedValidationIssueList.add(new JToscaValidationIssue(errorCode + , String.format(errorFormat, "min_instances"))); + expectedValidationIssueList.add(new JToscaValidationIssue(errorCode + , String.format(errorFormat, "max_instances"))); + expectedValidationIssueList.add(new JToscaValidationIssue(errorCode + , String.format(errorFormat, "nf_function"))); + + assertThat("The actual and the expected validation issue lists should have the same size" + , actualValidationIssueList, hasSize(expectedValidationIssueList.size()) + ); + + assertThat("The actual and the expected validation issue lists should be the same" + , actualValidationIssueList, containsInAnyOrder(expectedValidationIssueList.toArray(new JToscaValidationIssue[0])) + ); + } + @Test public void testEmptyCsar() throws JToscaException { - String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/emptyCsar.csar").getFile(); - File file = new File(fileStr); + final File file = loadCsar("csars/emptyCsar.csar"); try { ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); } catch (JToscaException e) { @@ -78,4 +117,11 @@ public class JToscaMetadataParse { assertTrue(e.getCode().equals(JToscaErrorCodes.PATH_NOT_VALID.getValue())); } } + + private File loadCsar(final String csarFilePath) { + final URL resourceUrl = JToscaMetadataParse.class.getClassLoader().getResource(csarFilePath); + assertNotNull(String.format("Could not load CSAR file '%s'", csarFilePath), resourceUrl); + + return new File(resourceUrl.getFile()); + } } diff --git a/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar b/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar index 194fabb..9dc29c7 100644 Binary files a/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar and b/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar differ diff --git a/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_without_required_inputs.csar b/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_without_required_inputs.csar new file mode 100644 index 0000000..194fabb Binary files /dev/null and b/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_without_required_inputs.csar differ -- cgit 1.2.3-korg From 2a3357829783d850d230d8b85b9e1c15effe1a38 Mon Sep 17 00:00:00 2001 From: Jessica Wagantall Date: Thu, 7 Nov 2019 11:33:01 -0800 Subject: Migrate jtosca contents Issue-ID: CIMAN-33 Signed-off-by: Jessica Wagantall --- .gitreview | 4 - INFO.yaml | 62 - LICENSE.TXT | 20 - README.md | 41 - checkstyle-suppressions.xml | 38 - docs/index.rst | 8 - jtosca/.gitreview | 4 + jtosca/INFO.yaml | 62 + jtosca/LICENSE.TXT | 20 + jtosca/README.md | 41 + jtosca/checkstyle-suppressions.xml | 38 + jtosca/docs/index.rst | 8 + jtosca/pom.xml | 284 +++++ .../sdc/toscaparser/api/CapabilityAssignment.java | 174 +++ .../sdc/toscaparser/api/CapabilityAssignments.java | 72 ++ .../org/onap/sdc/toscaparser/api/DataEntity.java | 457 +++++++ .../onap/sdc/toscaparser/api/EntityTemplate.java | 885 ++++++++++++++ .../java/org/onap/sdc/toscaparser/api/Group.java | 171 +++ .../onap/sdc/toscaparser/api/ImportsLoader.java | 748 ++++++++++++ .../org/onap/sdc/toscaparser/api/NodeTemplate.java | 824 +++++++++++++ .../java/org/onap/sdc/toscaparser/api/Policy.java | 232 ++++ .../org/onap/sdc/toscaparser/api/Property.java | 401 +++++++ .../sdc/toscaparser/api/RelationshipTemplate.java | 227 ++++ .../org/onap/sdc/toscaparser/api/Repository.java | 137 +++ .../sdc/toscaparser/api/RequirementAssignment.java | 111 ++ .../toscaparser/api/RequirementAssignments.java | 59 + .../sdc/toscaparser/api/SubstitutionMappings.java | 539 +++++++++ .../onap/sdc/toscaparser/api/TopologyTemplate.java | 866 +++++++++++++ .../org/onap/sdc/toscaparser/api/ToscaGraph.java | 129 ++ .../onap/sdc/toscaparser/api/ToscaTemplate.java | 1267 ++++++++++++++++++++ .../org/onap/sdc/toscaparser/api/Triggers.java | 201 ++++ .../onap/sdc/toscaparser/api/UnsupportedType.java | 101 ++ .../toscaparser/api/common/JToscaException.java | 47 + .../api/common/JToscaValidationIssue.java | 75 ++ .../sdc/toscaparser/api/common/TOSCAException.java | 58 + .../api/common/ValidationIssueCollector.java | 57 + .../toscaparser/api/elements/ArtifactTypeDef.java | 121 ++ .../sdc/toscaparser/api/elements/AttributeDef.java | 60 + .../api/elements/CapabilityTypeDef.java | 240 ++++ .../sdc/toscaparser/api/elements/DataType.java | 136 +++ .../sdc/toscaparser/api/elements/EntityType.java | 436 +++++++ .../sdc/toscaparser/api/elements/GroupType.java | 263 ++++ .../toscaparser/api/elements/InterfacesDef.java | 283 +++++ .../sdc/toscaparser/api/elements/Metadata.java | 62 + .../sdc/toscaparser/api/elements/NodeType.java | 549 +++++++++ .../sdc/toscaparser/api/elements/PolicyType.java | 309 +++++ .../sdc/toscaparser/api/elements/PortSpec.java | 177 +++ .../sdc/toscaparser/api/elements/PropertyDef.java | 249 ++++ .../toscaparser/api/elements/RelationshipType.java | 121 ++ .../sdc/toscaparser/api/elements/ScalarUnit.java | 287 +++++ .../api/elements/ScalarUnitFrequency.java | 39 + .../toscaparser/api/elements/ScalarUnitSize.java | 43 + .../toscaparser/api/elements/ScalarUnitTime.java | 37 + .../api/elements/StatefulEntityType.java | 234 ++++ .../toscaparser/api/elements/TypeValidation.java | 173 +++ .../api/elements/constraints/Constraint.java | 309 +++++ .../api/elements/constraints/Equal.java | 77 ++ .../api/elements/constraints/GreaterOrEqual.java | 130 ++ .../api/elements/constraints/GreaterThan.java | 120 ++ .../api/elements/constraints/InRange.java | 186 +++ .../api/elements/constraints/Length.java | 100 ++ .../api/elements/constraints/LessOrEqual.java | 124 ++ .../api/elements/constraints/LessThan.java | 121 ++ .../api/elements/constraints/MaxLength.java | 110 ++ .../api/elements/constraints/MinLength.java | 109 ++ .../api/elements/constraints/Pattern.java | 116 ++ .../api/elements/constraints/Schema.java | 309 +++++ .../api/elements/constraints/ValidValues.java | 99 ++ .../toscaparser/api/elements/enums/FileSize.java | 32 + .../api/elements/enums/ToscaElementNames.java | 40 + .../sdc/toscaparser/api/extensions/ExtTools.java | 204 ++++ .../onap/sdc/toscaparser/api/functions/Concat.java | 97 ++ .../sdc/toscaparser/api/functions/Function.java | 259 ++++ .../toscaparser/api/functions/GetAttribute.java | 544 +++++++++ .../sdc/toscaparser/api/functions/GetInput.java | 203 ++++ .../api/functions/GetOperationOutput.java | 243 ++++ .../sdc/toscaparser/api/functions/GetProperty.java | 639 ++++++++++ .../onap/sdc/toscaparser/api/functions/Token.java | 130 ++ .../sdc/toscaparser/api/parameters/Annotation.java | 98 ++ .../onap/sdc/toscaparser/api/parameters/Input.java | 199 +++ .../sdc/toscaparser/api/parameters/Output.java | 129 ++ .../org/onap/sdc/toscaparser/api/prereq/CSAR.java | 790 ++++++++++++ .../onap/sdc/toscaparser/api/utils/CopyUtils.java | 50 + .../onap/sdc/toscaparser/api/utils/DumpUtils.java | 68 ++ .../toscaparser/api/utils/JToscaErrorCodes.java | 52 + .../api/utils/TOSCAVersionProperty.java | 209 ++++ .../toscaparser/api/utils/ThreadLocalsHolder.java | 45 + .../onap/sdc/toscaparser/api/utils/UrlUtils.java | 145 +++ .../sdc/toscaparser/api/utils/ValidateUtils.java | 439 +++++++ .../src/main/resources/TOSCA_definition_1_0.yaml | 971 +++++++++++++++ .../TOSCA_simple_yaml_definition_1_0_0.py | 19 + .../TOSCA_simple_yaml_definition_1_0_0.yaml | 240 ++++ .../extensions/nfv/TOSCA_nfv_definition_1_0.yaml | 240 ++++ jtosca/src/main/resources/extensions/nfv/nfv.py | 19 + .../sdc/toscaparser/api/GetValidationIssues.java | 100 ++ .../onap/sdc/toscaparser/api/JToscaImportTest.java | 309 +++++ .../sdc/toscaparser/api/JToscaMetadataParse.java | 127 ++ .../api/elements/CalculatePropertyByPathTest.java | 167 +++ .../toscaparser/api/elements/EntityTypeTest.java | 75 ++ .../toscaparser/api/functions/GetInputTest.java | 96 ++ .../src/test/resources/csars/csar_hello_world.csar | Bin 0 -> 936 bytes .../resources/csars/dataTypes-test-service.csar | Bin 0 -> 46307 bytes jtosca/src/test/resources/csars/emptyCsar.csar | Bin 0 -> 22 bytes jtosca/src/test/resources/csars/listed_input.csar | Bin 0 -> 46229 bytes .../src/test/resources/csars/listed_input_ng.csar | Bin 0 -> 46232 bytes .../resources/csars/resource-Spgw-csar-ZTE.csar | Bin 0 -> 31639 bytes .../test/resources/csars/sdc-onboarding_csar.csar | Bin 0 -> 79654 bytes .../csars/service-AdiodVmxVpeBvService-csar.csar | Bin 0 -> 117439 bytes .../csars/service-JennyVtsbcKarunaSvc-csar.csar | Bin 0 -> 145576 bytes .../service-NetworkCloudVnfServiceMock-csar.csar | Bin 0 -> 60223 bytes .../csars/tmpCSAR_Huawei_vSPGW_fixed.csar | Bin 0 -> 45116 bytes ...pCSAR_Huawei_vSPGW_without_required_inputs.csar | Bin 0 -> 43627 bytes jtosca/version.properties | 13 + pom.xml | 284 ----- .../sdc/toscaparser/api/CapabilityAssignment.java | 174 --- .../sdc/toscaparser/api/CapabilityAssignments.java | 72 -- .../org/onap/sdc/toscaparser/api/DataEntity.java | 457 ------- .../onap/sdc/toscaparser/api/EntityTemplate.java | 885 -------------- .../java/org/onap/sdc/toscaparser/api/Group.java | 171 --- .../onap/sdc/toscaparser/api/ImportsLoader.java | 748 ------------ .../org/onap/sdc/toscaparser/api/NodeTemplate.java | 824 ------------- .../java/org/onap/sdc/toscaparser/api/Policy.java | 232 ---- .../org/onap/sdc/toscaparser/api/Property.java | 401 ------- .../sdc/toscaparser/api/RelationshipTemplate.java | 227 ---- .../org/onap/sdc/toscaparser/api/Repository.java | 137 --- .../sdc/toscaparser/api/RequirementAssignment.java | 111 -- .../toscaparser/api/RequirementAssignments.java | 59 - .../sdc/toscaparser/api/SubstitutionMappings.java | 539 --------- .../onap/sdc/toscaparser/api/TopologyTemplate.java | 866 ------------- .../org/onap/sdc/toscaparser/api/ToscaGraph.java | 129 -- .../onap/sdc/toscaparser/api/ToscaTemplate.java | 1267 -------------------- .../org/onap/sdc/toscaparser/api/Triggers.java | 201 ---- .../onap/sdc/toscaparser/api/UnsupportedType.java | 101 -- .../toscaparser/api/common/JToscaException.java | 47 - .../api/common/JToscaValidationIssue.java | 75 -- .../sdc/toscaparser/api/common/TOSCAException.java | 58 - .../api/common/ValidationIssueCollector.java | 57 - .../toscaparser/api/elements/ArtifactTypeDef.java | 121 -- .../sdc/toscaparser/api/elements/AttributeDef.java | 60 - .../api/elements/CapabilityTypeDef.java | 240 ---- .../sdc/toscaparser/api/elements/DataType.java | 136 --- .../sdc/toscaparser/api/elements/EntityType.java | 436 ------- .../sdc/toscaparser/api/elements/GroupType.java | 263 ---- .../toscaparser/api/elements/InterfacesDef.java | 283 ----- .../sdc/toscaparser/api/elements/Metadata.java | 62 - .../sdc/toscaparser/api/elements/NodeType.java | 549 --------- .../sdc/toscaparser/api/elements/PolicyType.java | 309 ----- .../sdc/toscaparser/api/elements/PortSpec.java | 177 --- .../sdc/toscaparser/api/elements/PropertyDef.java | 249 ---- .../toscaparser/api/elements/RelationshipType.java | 121 -- .../sdc/toscaparser/api/elements/ScalarUnit.java | 287 ----- .../api/elements/ScalarUnitFrequency.java | 39 - .../toscaparser/api/elements/ScalarUnitSize.java | 43 - .../toscaparser/api/elements/ScalarUnitTime.java | 37 - .../api/elements/StatefulEntityType.java | 234 ---- .../toscaparser/api/elements/TypeValidation.java | 173 --- .../api/elements/constraints/Constraint.java | 309 ----- .../api/elements/constraints/Equal.java | 77 -- .../api/elements/constraints/GreaterOrEqual.java | 130 -- .../api/elements/constraints/GreaterThan.java | 120 -- .../api/elements/constraints/InRange.java | 186 --- .../api/elements/constraints/Length.java | 100 -- .../api/elements/constraints/LessOrEqual.java | 124 -- .../api/elements/constraints/LessThan.java | 121 -- .../api/elements/constraints/MaxLength.java | 110 -- .../api/elements/constraints/MinLength.java | 109 -- .../api/elements/constraints/Pattern.java | 116 -- .../api/elements/constraints/Schema.java | 309 ----- .../api/elements/constraints/ValidValues.java | 99 -- .../toscaparser/api/elements/enums/FileSize.java | 32 - .../api/elements/enums/ToscaElementNames.java | 40 - .../sdc/toscaparser/api/extensions/ExtTools.java | 204 ---- .../onap/sdc/toscaparser/api/functions/Concat.java | 97 -- .../sdc/toscaparser/api/functions/Function.java | 259 ---- .../toscaparser/api/functions/GetAttribute.java | 544 --------- .../sdc/toscaparser/api/functions/GetInput.java | 203 ---- .../api/functions/GetOperationOutput.java | 243 ---- .../sdc/toscaparser/api/functions/GetProperty.java | 639 ---------- .../onap/sdc/toscaparser/api/functions/Token.java | 130 -- .../sdc/toscaparser/api/parameters/Annotation.java | 98 -- .../onap/sdc/toscaparser/api/parameters/Input.java | 199 --- .../sdc/toscaparser/api/parameters/Output.java | 129 -- .../org/onap/sdc/toscaparser/api/prereq/CSAR.java | 790 ------------ .../onap/sdc/toscaparser/api/utils/CopyUtils.java | 50 - .../onap/sdc/toscaparser/api/utils/DumpUtils.java | 68 -- .../toscaparser/api/utils/JToscaErrorCodes.java | 52 - .../api/utils/TOSCAVersionProperty.java | 209 ---- .../toscaparser/api/utils/ThreadLocalsHolder.java | 45 - .../onap/sdc/toscaparser/api/utils/UrlUtils.java | 145 --- .../sdc/toscaparser/api/utils/ValidateUtils.java | 439 ------- src/main/resources/TOSCA_definition_1_0.yaml | 971 --------------- .../TOSCA_simple_yaml_definition_1_0_0.py | 19 - .../TOSCA_simple_yaml_definition_1_0_0.yaml | 240 ---- .../extensions/nfv/TOSCA_nfv_definition_1_0.yaml | 240 ---- src/main/resources/extensions/nfv/nfv.py | 19 - .../sdc/toscaparser/api/GetValidationIssues.java | 100 -- .../onap/sdc/toscaparser/api/JToscaImportTest.java | 309 ----- .../sdc/toscaparser/api/JToscaMetadataParse.java | 127 -- .../api/elements/CalculatePropertyByPathTest.java | 167 --- .../toscaparser/api/elements/EntityTypeTest.java | 75 -- .../toscaparser/api/functions/GetInputTest.java | 96 -- src/test/resources/csars/csar_hello_world.csar | Bin 936 -> 0 bytes .../resources/csars/dataTypes-test-service.csar | Bin 46307 -> 0 bytes src/test/resources/csars/emptyCsar.csar | Bin 22 -> 0 bytes src/test/resources/csars/listed_input.csar | Bin 46229 -> 0 bytes src/test/resources/csars/listed_input_ng.csar | Bin 46232 -> 0 bytes .../resources/csars/resource-Spgw-csar-ZTE.csar | Bin 31639 -> 0 bytes src/test/resources/csars/sdc-onboarding_csar.csar | Bin 79654 -> 0 bytes .../csars/service-AdiodVmxVpeBvService-csar.csar | Bin 117439 -> 0 bytes .../csars/service-JennyVtsbcKarunaSvc-csar.csar | Bin 145576 -> 0 bytes .../service-NetworkCloudVnfServiceMock-csar.csar | Bin 60223 -> 0 bytes .../csars/tmpCSAR_Huawei_vSPGW_fixed.csar | Bin 45116 -> 0 bytes ...pCSAR_Huawei_vSPGW_without_required_inputs.csar | Bin 43627 -> 0 bytes version.properties | 13 - 214 files changed, 21015 insertions(+), 21015 deletions(-) delete mode 100644 .gitreview delete mode 100644 INFO.yaml delete mode 100644 LICENSE.TXT delete mode 100644 README.md delete mode 100644 checkstyle-suppressions.xml delete mode 100644 docs/index.rst create mode 100644 jtosca/.gitreview create mode 100644 jtosca/INFO.yaml create mode 100644 jtosca/LICENSE.TXT create mode 100644 jtosca/README.md create mode 100644 jtosca/checkstyle-suppressions.xml create mode 100644 jtosca/docs/index.rst create mode 100644 jtosca/pom.xml create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/Group.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/Policy.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/Property.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/Repository.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java create mode 100644 jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java create mode 100644 jtosca/src/main/resources/TOSCA_definition_1_0.yaml create mode 100644 jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py create mode 100644 jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml create mode 100644 jtosca/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml create mode 100644 jtosca/src/main/resources/extensions/nfv/nfv.py create mode 100644 jtosca/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java create mode 100644 jtosca/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java create mode 100644 jtosca/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java create mode 100644 jtosca/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java create mode 100644 jtosca/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java create mode 100644 jtosca/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java create mode 100644 jtosca/src/test/resources/csars/csar_hello_world.csar create mode 100644 jtosca/src/test/resources/csars/dataTypes-test-service.csar create mode 100644 jtosca/src/test/resources/csars/emptyCsar.csar create mode 100644 jtosca/src/test/resources/csars/listed_input.csar create mode 100644 jtosca/src/test/resources/csars/listed_input_ng.csar create mode 100644 jtosca/src/test/resources/csars/resource-Spgw-csar-ZTE.csar create mode 100644 jtosca/src/test/resources/csars/sdc-onboarding_csar.csar create mode 100644 jtosca/src/test/resources/csars/service-AdiodVmxVpeBvService-csar.csar create mode 100644 jtosca/src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar create mode 100644 jtosca/src/test/resources/csars/service-NetworkCloudVnfServiceMock-csar.csar create mode 100644 jtosca/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar create mode 100644 jtosca/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_without_required_inputs.csar create mode 100644 jtosca/version.properties delete mode 100644 pom.xml delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/Group.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/Policy.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/Property.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/Repository.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/Triggers.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java delete mode 100644 src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java delete mode 100644 src/main/resources/TOSCA_definition_1_0.yaml delete mode 100644 src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py delete mode 100644 src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml delete mode 100644 src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml delete mode 100644 src/main/resources/extensions/nfv/nfv.py delete mode 100644 src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java delete mode 100644 src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java delete mode 100644 src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java delete mode 100644 src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java delete mode 100644 src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java delete mode 100644 src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java delete mode 100644 src/test/resources/csars/csar_hello_world.csar delete mode 100644 src/test/resources/csars/dataTypes-test-service.csar delete mode 100644 src/test/resources/csars/emptyCsar.csar delete mode 100644 src/test/resources/csars/listed_input.csar delete mode 100644 src/test/resources/csars/listed_input_ng.csar delete mode 100644 src/test/resources/csars/resource-Spgw-csar-ZTE.csar delete mode 100644 src/test/resources/csars/sdc-onboarding_csar.csar delete mode 100644 src/test/resources/csars/service-AdiodVmxVpeBvService-csar.csar delete mode 100644 src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar delete mode 100644 src/test/resources/csars/service-NetworkCloudVnfServiceMock-csar.csar delete mode 100644 src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar delete mode 100644 src/test/resources/csars/tmpCSAR_Huawei_vSPGW_without_required_inputs.csar delete mode 100644 version.properties diff --git a/.gitreview b/.gitreview deleted file mode 100644 index 369108b..0000000 --- a/.gitreview +++ /dev/null @@ -1,4 +0,0 @@ -[gerrit] -host=gerrit.onap.org -port=29418 -project=sdc/jtosca.git \ No newline at end of file diff --git a/INFO.yaml b/INFO.yaml deleted file mode 100644 index 3d21b87..0000000 --- a/INFO.yaml +++ /dev/null @@ -1,62 +0,0 @@ ---- -project: 'sdc/jtosca' -project_creation_date: '2017-05-26' -lifecycle_state: 'Incubation' -project_lead: &onap_releng_ptl - name: 'Ofir Sonsino' - email: 'ofir.sonsino@intl.att.com' - company: 'ATT' - id: 'os0695' - timezone: 'Israel/Lod' -primary_contact: *onap_releng_ptl -issue_tracking: - type: 'jira' - url: 'https://jira.onap.org/projects/SDC' - key: 'SDC' -meetings: - - type: 'zoom' - agenda: '' - url: 'https://wiki.onap.org/pages/viewpage.action?pageId=6592847' - server: 'n/a' - channel: 'n/a' - repeats: 'weekly' - time: '14:00 UTC' -committers: - - <<: *onap_releng_ptl - - name: 'Idan Amit' - email: 'ia096e@intl.att.com' - company: 'ATT' - id: 'idanamit' - timezone: 'Israel/Aviv' - - name: 'Tal Gitelman' - email: 'tg851x@intl.att.com' - company: 'ATT' - id: 'tgitelman' - timezone: 'Israel/Aviv' - - name: 'Yuli Shlosberg' - email: 'ys9693@att.com' - company: 'ATT' - id: 'ys9693' - timezone: 'Israel/Aviv' - - name: 'ELI LEVY' - email: 'el489u@intl.att.com' - company: 'ATT' - id: 'el489u' - timezone: 'Israel/Lod' -tsc: - approval: 'https://lists.onap.org/pipermail/onap-tsc' - changes: - - type: 'Addition' - name: 'Michael Lando' - name: 'Idan Amit' - name: 'Tal Gitelman' - name: 'Yuli Shlosberg' - name: 'ELI LEVI' - link: 'https://wiki.onap.org/pages/viewpage.action?pageId=25435557' - - type: 'Addition' - name: 'Ofir Sonsino' - link: 'https://wiki.onap.org/pages/viewpage.action?pageId=45305945' - - type: 'Removal' - name: 'Michael Lando' - link: 'https://lists.onap.org/g/onap-tsc/message/4239' - diff --git a/LICENSE.TXT b/LICENSE.TXT deleted file mode 100644 index f479f8a..0000000 --- a/LICENSE.TXT +++ /dev/null @@ -1,20 +0,0 @@ -/* -* ============LICENSE_START========================================== -* =================================================================== -* Copyright © 2018 AT&T Intellectual Property. -* Copyright © 2018 Amdocs -* All rights reserved. -* =================================================================== -* Licensed under the Apache License, Version 2.0 (the "License"); -* you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* -* http://www.apache.org/licenses/LICENSE-2.0 -* -* Unless required by applicable law or agreed to in writing, software -* distributed under the License is distributed on an "AS IS" BASIS, -* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -* See the License for the specific language governing permissions and -* limitations under the License. -* ============LICENSE_END============================================ -*/ \ No newline at end of file diff --git a/README.md b/README.md deleted file mode 100644 index a3a150f..0000000 --- a/README.md +++ /dev/null @@ -1,41 +0,0 @@ -# ONAP JTOSCA - - ---- ---- - -# Introduction - -ONAP JTOSCA is delivered as helper JAR that can be used by clients that work with TOSCA CSAR files. -It parses the CSAR and returns the model object which represents the CSAR contents. -Prior to that, it performs validations on the CSAR to check its TOSCA compliance. - - -# Compiling ONAP JTOSCA - -ONAP JTOSCA can be compiled easily using maven command: `mvn clean install` -The result is JAR file under "target" folder - -# Getting Help - -*** to be completed on release *** - -SDC@lists.onap.org - -SDC Javadoc and Maven site - -*** to be completed on rrelease *** - -# Release notes for versions - -1.1.0-SNAPSHOT - -Initial after separating into separate repo - -------------------------------- - -1.1.1-SNAPSHOT - -Added toString of Function (GetInput, etc.) - -Allowed two arguments for GetInput - name of list input and index in list diff --git a/checkstyle-suppressions.xml b/checkstyle-suppressions.xml deleted file mode 100644 index 2920ca2..0000000 --- a/checkstyle-suppressions.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index 833e1aa..0000000 --- a/docs/index.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. This work is licensed under a Creative Commons Attribution 4.0 International License. - -TODO Add files to toctree and delete this header ------------------------------------------------- -.. toctree:: - :maxdepth: 1 - - diff --git a/jtosca/.gitreview b/jtosca/.gitreview new file mode 100644 index 0000000..369108b --- /dev/null +++ b/jtosca/.gitreview @@ -0,0 +1,4 @@ +[gerrit] +host=gerrit.onap.org +port=29418 +project=sdc/jtosca.git \ No newline at end of file diff --git a/jtosca/INFO.yaml b/jtosca/INFO.yaml new file mode 100644 index 0000000..3d21b87 --- /dev/null +++ b/jtosca/INFO.yaml @@ -0,0 +1,62 @@ +--- +project: 'sdc/jtosca' +project_creation_date: '2017-05-26' +lifecycle_state: 'Incubation' +project_lead: &onap_releng_ptl + name: 'Ofir Sonsino' + email: 'ofir.sonsino@intl.att.com' + company: 'ATT' + id: 'os0695' + timezone: 'Israel/Lod' +primary_contact: *onap_releng_ptl +issue_tracking: + type: 'jira' + url: 'https://jira.onap.org/projects/SDC' + key: 'SDC' +meetings: + - type: 'zoom' + agenda: '' + url: 'https://wiki.onap.org/pages/viewpage.action?pageId=6592847' + server: 'n/a' + channel: 'n/a' + repeats: 'weekly' + time: '14:00 UTC' +committers: + - <<: *onap_releng_ptl + - name: 'Idan Amit' + email: 'ia096e@intl.att.com' + company: 'ATT' + id: 'idanamit' + timezone: 'Israel/Aviv' + - name: 'Tal Gitelman' + email: 'tg851x@intl.att.com' + company: 'ATT' + id: 'tgitelman' + timezone: 'Israel/Aviv' + - name: 'Yuli Shlosberg' + email: 'ys9693@att.com' + company: 'ATT' + id: 'ys9693' + timezone: 'Israel/Aviv' + - name: 'ELI LEVY' + email: 'el489u@intl.att.com' + company: 'ATT' + id: 'el489u' + timezone: 'Israel/Lod' +tsc: + approval: 'https://lists.onap.org/pipermail/onap-tsc' + changes: + - type: 'Addition' + name: 'Michael Lando' + name: 'Idan Amit' + name: 'Tal Gitelman' + name: 'Yuli Shlosberg' + name: 'ELI LEVI' + link: 'https://wiki.onap.org/pages/viewpage.action?pageId=25435557' + - type: 'Addition' + name: 'Ofir Sonsino' + link: 'https://wiki.onap.org/pages/viewpage.action?pageId=45305945' + - type: 'Removal' + name: 'Michael Lando' + link: 'https://lists.onap.org/g/onap-tsc/message/4239' + diff --git a/jtosca/LICENSE.TXT b/jtosca/LICENSE.TXT new file mode 100644 index 0000000..f479f8a --- /dev/null +++ b/jtosca/LICENSE.TXT @@ -0,0 +1,20 @@ +/* +* ============LICENSE_START========================================== +* =================================================================== +* Copyright © 2018 AT&T Intellectual Property. +* Copyright © 2018 Amdocs +* All rights reserved. +* =================================================================== +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +* ============LICENSE_END============================================ +*/ \ No newline at end of file diff --git a/jtosca/README.md b/jtosca/README.md new file mode 100644 index 0000000..a3a150f --- /dev/null +++ b/jtosca/README.md @@ -0,0 +1,41 @@ +# ONAP JTOSCA + + +--- +--- + +# Introduction + +ONAP JTOSCA is delivered as helper JAR that can be used by clients that work with TOSCA CSAR files. +It parses the CSAR and returns the model object which represents the CSAR contents. +Prior to that, it performs validations on the CSAR to check its TOSCA compliance. + + +# Compiling ONAP JTOSCA + +ONAP JTOSCA can be compiled easily using maven command: `mvn clean install` +The result is JAR file under "target" folder + +# Getting Help + +*** to be completed on release *** + +SDC@lists.onap.org + +SDC Javadoc and Maven site + +*** to be completed on rrelease *** + +# Release notes for versions + +1.1.0-SNAPSHOT + +Initial after separating into separate repo + +------------------------------- + +1.1.1-SNAPSHOT + +Added toString of Function (GetInput, etc.) + +Allowed two arguments for GetInput - name of list input and index in list diff --git a/jtosca/checkstyle-suppressions.xml b/jtosca/checkstyle-suppressions.xml new file mode 100644 index 0000000..2920ca2 --- /dev/null +++ b/jtosca/checkstyle-suppressions.xml @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/jtosca/docs/index.rst b/jtosca/docs/index.rst new file mode 100644 index 0000000..833e1aa --- /dev/null +++ b/jtosca/docs/index.rst @@ -0,0 +1,8 @@ +.. This work is licensed under a Creative Commons Attribution 4.0 International License. + +TODO Add files to toctree and delete this header +------------------------------------------------ +.. toctree:: + :maxdepth: 1 + + diff --git a/jtosca/pom.xml b/jtosca/pom.xml new file mode 100644 index 0000000..8e02299 --- /dev/null +++ b/jtosca/pom.xml @@ -0,0 +1,284 @@ + + 4.0.0 + + org.onap.sdc.jtosca + jtosca + 1.6.0-SNAPSHOT + sdc-jtosca + + + org.onap.oparent + oparent + 2.0.0 + + + + + + + + + UTF-8 + + + + 2.1 + + + + + + + ${project.basedir}/target/jacoco.exec + https://nexus.onap.org + /content/sites/site/org/onap/sdc/jtosca/${project.version} + snapshots + releases + + ${project.build.sourceEncoding} + true + ${project.basedir} + . + **/scripts/**/* + **/test/**/*,**/tests/**/* + app/**/*.js,server-mock/**/*.js,src/**/*.js,src/main/**/*.java + ${project.version} + + + + + + + org.yaml + snakeyaml + 1.14 + compile + + + + org.slf4j + slf4j-api + 1.7.25 + + + + + + org.hamcrest + hamcrest + ${hamcrest.version} + test + + + org.hamcrest + hamcrest-library + ${hamcrest.version} + test + + + junit + junit + 4.12 + test + + + + com.opencsv + opencsv + 3.10 + test + + + + + org.apache.commons + commons-io + 1.3.2 + + + + org.reflections + reflections + 0.9.11 + + + com.google.guava + guava + + + + + com.google.guava + guava + compile + 25.1-jre + + + + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 2.10.4 + + false + org.umlgraph.doclet.UmlGraphDoc + + org.umlgraph + umlgraph + 5.6 + + -views + true + + + + + + + + + maven-checkstyle-plugin + 2.17 + + checkstyle-suppressions.xml + checkstyle.suppressions.file + + + + org.apache.maven.plugins + maven-site-plugin + 3.4 + + + org.apache.maven.wagon + wagon-webdav-jackrabbit + 2.10 + + + + + + org.jacoco + jacoco-maven-plugin + 0.7.8 + + + + prepare-agent + + prepare-agent + + + ${sonar.jacoco.reportPath} + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.5.1 + true + + 1.8 + 1.8 + + + + org.apache.maven.plugins + maven-javadoc-plugin + 2.10.3 + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.19.1 + + + */* + + + + + org.sonarsource.scanner.maven + sonar-maven-plugin + 3.0.2 + + + com.github.sylvainlaurent.maven + yaml-json-validator-maven-plugin + 1.0.1 + + + validate + validate + + validate + + + + + + src/main/resources/**/*.y*ml + src/test/resources/**/*.y*ml + + + + + src/main/resources/**/*.json + src/test/resources/**/*.json + + + + + + + + + + + + + central + Official Maven repository + http://repo2.maven.org/maven2/ + + + onap-releases + Release Repository + ${nexus.proxy}/content/repositories/releases/ + + + onap-snapshots + Snapshots Repository + ${nexus.proxy}/content/repositories/snapshots/ + + + + + + onap-releases + Release Repository + ${nexus.proxy}/content/repositories/${releases.path}/ + + + onap-snapshots + Snapshot Repository + ${nexus.proxy}/content/repositories/${snapshots.path}/ + + + onap-site + dav:${nexus.proxy}${sitePath} + + + + diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java new file mode 100644 index 0000000..bb7b47d --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java @@ -0,0 +1,174 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef; +import org.onap.sdc.toscaparser.api.elements.PropertyDef; + +public class CapabilityAssignment { + + private String name; + private LinkedHashMap _properties; + private CapabilityTypeDef _definition; + private LinkedHashMap _customDef; + + public CapabilityAssignment(String cname, + LinkedHashMap cproperties, + CapabilityTypeDef cdefinition, LinkedHashMap customDef) { + name = cname; + _properties = cproperties; + _definition = cdefinition; + _customDef = customDef; + } + + /** + * Get the properties list for capability + * + * @return list of property objects for capability + */ + public ArrayList getPropertiesObjects() { + // Return a list of property objects + ArrayList properties = new ArrayList(); + LinkedHashMap props = _properties; + if (props != null) { + for (Map.Entry me : props.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + + LinkedHashMap propsDef = _definition.getPropertiesDef(); + if (propsDef != null) { + PropertyDef pd = (PropertyDef) propsDef.get(pname); + if (pd != null) { + properties.add(new Property(pname, pvalue, pd.getSchema(), _customDef)); + } + } + } + } + return properties; + } + + /** + * Get the map of properties + * + * @return map of all properties contains dictionary of property name and property object + */ + public LinkedHashMap getProperties() { + // Return a dictionary of property name-object pairs + LinkedHashMap npps = new LinkedHashMap<>(); + for (Property p : getPropertiesObjects()) { + npps.put(p.getName(), p); + } + return npps; + } + + /** + * Get the property value by name + * + * @param pname - the property name for capability + * @return the property value for this name + */ + public Object getPropertyValue(String pname) { + // Return the value of a given property name + LinkedHashMap props = getProperties(); + if (props != null && props.get(pname) != null) { + return props.get(name).getValue(); + } + return null; + } + + /** + * Get the name for capability + * + * @return the name for capability + */ + public String getName() { + return name; + } + + /** + * Get the definition for capability + * + * @return CapabilityTypeDef - contain definition for capability + */ + public CapabilityTypeDef getDefinition() { + return _definition; + } + + /** + * Set the property for capability + * + * @param pname - the property name for capability to set + * @param pvalue - the property valiue for capability to set + */ + public void setProperty(String pname, Object pvalue) { + _properties.put(pname, pvalue); + } + + @Override + public String toString() { + return "CapabilityAssignment{" + + "name='" + name + '\'' + + ", _properties=" + _properties + + ", _definition=" + _definition + + '}'; + } +} + +/*python + +from toscaparser.properties import Property + + +class CapabilityAssignment(object): + '''TOSCA built-in capabilities type.''' + + def __init__(self, name, properties, definition): + self.name = name + self._properties = properties + self.definition = definition + + def get_properties_objects(self): + '''Return a list of property objects.''' + properties = [] + props = self._properties + if props: + for name, value in props.items(): + props_def = self.definition.get_properties_def() + if props_def and name in props_def: + properties.append(Property(name, value, + props_def[name].schema)) + return properties + + def get_properties(self): + '''Return a dictionary of property name-object pairs.''' + return {prop.name: prop + for prop in self.get_properties_objects()} + + def get_property_value(self, name): + '''Return the value of a given property name.''' + props = self.get_properties() + if props and name in props: + return props[name].value +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java new file mode 100644 index 0000000..28ada96 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java @@ -0,0 +1,72 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class CapabilityAssignments { + + private Map capabilityAssignments; + + public CapabilityAssignments(Map capabilityAssignments) { + this.capabilityAssignments = capabilityAssignments != null ? new HashMap<>(capabilityAssignments) : new HashMap<>(); + } + + /** + * Get all capability assignments for node template.
+ * This object can be either the original one, holding all capability assignments for this node template,or a filtered one, holding a filtered subset.
+ * + * @return list of capability assignments for the node template.
+ * If there are no capability assignments, empty list is returned. + */ + public List getAll() { + return new ArrayList<>(capabilityAssignments.values()); + } + + /** + * Filter capability assignments by capability tosca type. + * + * @param type - The tosca type of capability assignments. + * @return CapabilityAssignments object, containing capability assignments of this type.
+ * If no such found, filtering will result in an empty collection. + */ + public CapabilityAssignments getCapabilitiesByType(String type) { + Map capabilityAssignmentsMap = capabilityAssignments.entrySet().stream() + .filter(cap -> cap.getValue().getDefinition().getType().equals(type)).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + + return new CapabilityAssignments(capabilityAssignmentsMap); + } + + /** + * Get capability assignment by capability name. + * + * @param name - The name of capability assignment + * @return capability assignment with this name, or null if no such capability assignment was found. + */ + public CapabilityAssignment getCapabilityByName(String name) { + return capabilityAssignments.get(name); + } + +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java new file mode 100644 index 0000000..e95fe72 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java @@ -0,0 +1,457 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.DataType; +import org.onap.sdc.toscaparser.api.elements.PortSpec; +import org.onap.sdc.toscaparser.api.elements.PropertyDef; +import org.onap.sdc.toscaparser.api.elements.ScalarUnitFrequency; +import org.onap.sdc.toscaparser.api.elements.ScalarUnitSize; +import org.onap.sdc.toscaparser.api.elements.ScalarUnitTime; +import org.onap.sdc.toscaparser.api.elements.constraints.Constraint; +import org.onap.sdc.toscaparser.api.elements.constraints.Schema; +import org.onap.sdc.toscaparser.api.functions.Function; +import org.onap.sdc.toscaparser.api.utils.TOSCAVersionProperty; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.ValidateUtils; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; + +public class DataEntity { + // A complex data value entity + + private LinkedHashMap customDef; + private DataType dataType; + private LinkedHashMap schema; + private Object value; + private String propertyName; + + public DataEntity(String _dataTypeName, Object _valueDict, + LinkedHashMap _customDef, String _propName) { + + customDef = _customDef; + dataType = new DataType(_dataTypeName, _customDef); + schema = dataType.getAllProperties(); + value = _valueDict; + propertyName = _propName; + } + + @SuppressWarnings("unchecked") + public Object validate() { + // Validate the value by the definition of the datatype + + // A datatype can not have both 'type' and 'properties' definitions. + // If the datatype has 'type' definition + if (dataType.getValueType() != null) { + value = DataEntity.validateDatatype(dataType.getValueType(), value, null, customDef, null); + Schema schemaCls = new Schema(propertyName, dataType.getDefs()); + for (Constraint constraint : schemaCls.getConstraints()) { + constraint.validate(value); + } + } + // If the datatype has 'properties' definition + else { + if (!(value instanceof LinkedHashMap)) { + //ERROR under investigation + String checkedVal = value != null ? value.toString() : null; + + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE001", String.format( + "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"", + checkedVal, dataType.getType()))); + + if (value instanceof List && ((List) value).size() > 0) { + value = ((List) value).get(0); + } + + if (!(value instanceof LinkedHashMap)) { + return value; + } + } + + + LinkedHashMap valueDict = (LinkedHashMap) value; + ArrayList allowedProps = new ArrayList<>(); + ArrayList requiredProps = new ArrayList<>(); + LinkedHashMap defaultProps = new LinkedHashMap<>(); + if (schema != null) { + allowedProps.addAll(schema.keySet()); + for (String name : schema.keySet()) { + PropertyDef propDef = schema.get(name); + if (propDef.isRequired()) { + requiredProps.add(name); + } + if (propDef.getDefault() != null) { + defaultProps.put(name, propDef.getDefault()); + } + } + } + + // check allowed field + for (String valueKey : valueDict.keySet()) { + //1710 devlop JSON validation + if (!("json").equals(dataType.getType()) && !allowedProps.contains(valueKey)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE100", String.format( + "UnknownFieldError: Data value of type \"%s\" contains unknown field \"%s\"", + dataType.getType(), valueKey))); + } + } + + // check default field + for (String defKey : defaultProps.keySet()) { + Object defValue = defaultProps.get(defKey); + if (valueDict.get(defKey) == null) { + valueDict.put(defKey, defValue); + } + + } + + // check missing field + ArrayList missingProp = new ArrayList<>(); + for (String reqKey : requiredProps) { + if (!valueDict.keySet().contains(reqKey)) { + missingProp.add(reqKey); + } + } + if (missingProp.size() > 0) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", String.format( + "MissingRequiredFieldError: Data value of type \"%s\" is missing required field(s) \"%s\"", + dataType.getType(), missingProp.toString()))); + } + + // check every field + for (String vname : valueDict.keySet()) { + Object vvalue = valueDict.get(vname); + LinkedHashMap schemaName = _findSchema(vname); + if (schemaName == null) { + continue; + } + Schema propSchema = new Schema(vname, schemaName); + // check if field value meets type defined + DataEntity.validateDatatype(propSchema.getType(), + vvalue, + propSchema.getEntrySchema(), + customDef, + null); + + // check if field value meets constraints defined + if (propSchema.getConstraints() != null) { + for (Constraint constraint : propSchema.getConstraints()) { + if (vvalue instanceof ArrayList) { + for (Object val : (ArrayList) vvalue) { + constraint.validate(val); + } + } else { + constraint.validate(vvalue); + } + } + } + } + } + return value; + } + + private LinkedHashMap _findSchema(String name) { + if (schema != null && schema.get(name) != null) { + return schema.get(name).getSchema(); + } + return null; + } + + public static Object validateDatatype(String type, + Object value, + LinkedHashMap entrySchema, + LinkedHashMap customDef, + String propName) { + // Validate value with given type + + // If type is list or map, validate its entry by entry_schema(if defined) + // If type is a user-defined complex datatype, custom_def is required. + + if (Function.isFunction(value)) { + return value; + } else if (type == null) { + //NOT ANALYZED + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE002", String.format( + "MissingType: Type is missing for value \"%s\"", + value.toString()))); + return value; + } else if (type.equals(Schema.STRING)) { + return ValidateUtils.validateString(value); + } else if (type.equals(Schema.INTEGER)) { + return ValidateUtils.validateInteger(value); + } else if (type.equals(Schema.FLOAT)) { + return ValidateUtils.validateFloat(value); + } else if (type.equals(Schema.NUMBER)) { + return ValidateUtils.validateNumeric(value); + } else if (type.equals(Schema.BOOLEAN)) { + return ValidateUtils.validateBoolean(value); + } else if (type.equals(Schema.RANGE)) { + return ValidateUtils.validateRange(value); + } else if (type.equals(Schema.TIMESTAMP)) { + ValidateUtils.validateTimestamp(value); + return value; + } else if (type.equals(Schema.LIST)) { + ValidateUtils.validateList(value); + if (entrySchema != null) { + DataEntity.validateEntry(value, entrySchema, customDef); + } + return value; + } else if (type.equals(Schema.SCALAR_UNIT_SIZE)) { + return (new ScalarUnitSize(value)).validateScalarUnit(); + } else if (type.equals(Schema.SCALAR_UNIT_FREQUENCY)) { + return (new ScalarUnitFrequency(value)).validateScalarUnit(); + } else if (type.equals(Schema.SCALAR_UNIT_TIME)) { + return (new ScalarUnitTime(value)).validateScalarUnit(); + } else if (type.equals(Schema.VERSION)) { + return (new TOSCAVersionProperty(value.toString())).getVersion(); + } else if (type.equals(Schema.MAP)) { + ValidateUtils.validateMap(value); + if (entrySchema != null) { + DataEntity.validateEntry(value, entrySchema, customDef); + } + return value; + } else if (type.equals(Schema.PORTSPEC)) { + // tODO(TBD) bug 1567063, validate source & target as PortDef type + // as complex types not just as integers + PortSpec.validateAdditionalReq(value, propName, customDef); + } else { + DataEntity data = new DataEntity(type, value, customDef, null); + return data.validate(); + } + + return value; + } + + @SuppressWarnings("unchecked") + public static Object validateEntry(Object value, + LinkedHashMap entrySchema, + LinkedHashMap customDef) { + + // Validate entries for map and list + Schema schema = new Schema(null, entrySchema); + Object valueob = value; + ArrayList valueList = null; + if (valueob instanceof LinkedHashMap) { + valueList = new ArrayList(((LinkedHashMap) valueob).values()); + } else if (valueob instanceof ArrayList) { + valueList = (ArrayList) valueob; + } + if (valueList != null) { + for (Object v : valueList) { + DataEntity.validateDatatype(schema.getType(), v, schema.getEntrySchema(), customDef, null); + if (schema.getConstraints() != null) { + for (Constraint constraint : schema.getConstraints()) { + constraint.validate(v); + } + } + } + } + return value; + } + + @Override + public String toString() { + return "DataEntity{" + + "customDef=" + customDef + + ", dataType=" + dataType + + ", schema=" + schema + + ", value=" + value + + ", propertyName='" + propertyName + '\'' + + '}'; + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import TypeMismatchError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.constraints import Schema +from toscaparser.elements.datatype import DataType +from toscaparser.elements.portspectype import PortSpec +from toscaparser.elements.scalarunit import ScalarUnit_Frequency +from toscaparser.elements.scalarunit import ScalarUnit_Size +from toscaparser.elements.scalarunit import ScalarUnit_Time +from toscaparser.utils.gettextutils import _ +from toscaparser.utils import validateutils + + +class DataEntity(object): + '''A complex data value entity.''' + + def __init__(self, datatypename, value_dict, custom_def=None, + prop_name=None): + self.custom_def = custom_def + self.datatype = DataType(datatypename, custom_def) + self.schema = self.datatype.get_all_properties() + self.value = value_dict + self.property_name = prop_name + + def validate(self): + '''Validate the value by the definition of the datatype.''' + + # A datatype can not have both 'type' and 'properties' definitions. + # If the datatype has 'type' definition + if self.datatype.value_type: + self.value = DataEntity.validate_datatype(self.datatype.value_type, + self.value, + None, + self.custom_def) + schema = Schema(self.property_name, self.datatype.defs) + for constraint in schema.constraints: + constraint.validate(self.value) + # If the datatype has 'properties' definition + else: + if not isinstance(self.value, dict): + ValidationIssueCollector.appendException( + TypeMismatchError(what=self.value, + type=self.datatype.type)) + allowed_props = [] + required_props = [] + default_props = {} + if self.schema: + allowed_props = self.schema.keys() + for name, prop_def in self.schema.items(): + if prop_def.required: + required_props.append(name) + if prop_def.default: + default_props[name] = prop_def.default + + # check allowed field + for value_key in list(self.value.keys()): + if value_key not in allowed_props: + ValidationIssueCollector.appendException( + UnknownFieldError(what=(_('Data value of type "%s"') + % self.datatype.type), + field=value_key)) + + # check default field + for def_key, def_value in list(default_props.items()): + if def_key not in list(self.value.keys()): + self.value[def_key] = def_value + + # check missing field + missingprop = [] + for req_key in required_props: + if req_key not in list(self.value.keys()): + missingprop.append(req_key) + if missingprop: + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what=(_('Data value of type "%s"') + % self.datatype.type), required=missingprop)) + + # check every field + for name, value in list(self.value.items()): + schema_name = self._find_schema(name) + if not schema_name: + continue + prop_schema = Schema(name, schema_name) + # check if field value meets type defined + DataEntity.validate_datatype(prop_schema.type, value, + prop_schema.entry_schema, + self.custom_def) + # check if field value meets constraints defined + if prop_schema.constraints: + for constraint in prop_schema.constraints: + if isinstance(value, list): + for val in value: + constraint.validate(val) + else: + constraint.validate(value) + + return self.value + + def _find_schema(self, name): + if self.schema and name in self.schema.keys(): + return self.schema[name].schema + + @staticmethod + def validate_datatype(type, value, entry_schema=None, custom_def=None, + prop_name=None): + '''Validate value with given type. + + If type is list or map, validate its entry by entry_schema(if defined) + If type is a user-defined complex datatype, custom_def is required. + ''' + from toscaparser.functions import is_function + if is_function(value): + return value + if type == Schema.STRING: + return validateutils.validate_string(value) + elif type == Schema.INTEGER: + return validateutils.validate_integer(value) + elif type == Schema.FLOAT: + return validateutils.validate_float(value) + elif type == Schema.NUMBER: + return validateutils.validate_numeric(value) + elif type == Schema.BOOLEAN: + return validateutils.validate_boolean(value) + elif type == Schema.RANGE: + return validateutils.validate_range(value) + elif type == Schema.TIMESTAMP: + validateutils.validate_timestamp(value) + return value + elif type == Schema.LIST: + validateutils.validate_list(value) + if entry_schema: + DataEntity.validate_entry(value, entry_schema, custom_def) + return value + elif type == Schema.SCALAR_UNIT_SIZE: + return ScalarUnit_Size(value).validate_scalar_unit() + elif type == Schema.SCALAR_UNIT_FREQUENCY: + return ScalarUnit_Frequency(value).validate_scalar_unit() + elif type == Schema.SCALAR_UNIT_TIME: + return ScalarUnit_Time(value).validate_scalar_unit() + elif type == Schema.VERSION: + return validateutils.TOSCAVersionProperty(value).get_version() + elif type == Schema.MAP: + validateutils.validate_map(value) + if entry_schema: + DataEntity.validate_entry(value, entry_schema, custom_def) + return value + elif type == Schema.PORTSPEC: + # tODO(TBD) bug 1567063, validate source & target as PortDef type + # as complex types not just as integers + PortSpec.validate_additional_req(value, prop_name, custom_def) + else: + data = DataEntity(type, value, custom_def) + return data.validate() + + @staticmethod + def validate_entry(value, entry_schema, custom_def=None): + '''Validate entries for map and list.''' + schema = Schema(None, entry_schema) + valuelist = value + if isinstance(value, dict): + valuelist = list(value.values()) + for v in valuelist: + DataEntity.validate_datatype(schema.type, v, schema.entry_schema, + custom_def) + if schema.constraints: + for constraint in schema.constraints: + constraint.validate(v) + return value +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java new file mode 100644 index 0000000..93bfe2b --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java @@ -0,0 +1,885 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.*; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + + +import javax.annotation.Nullable; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public abstract class EntityTemplate { + // Base class for TOSCA templates + + protected static final String DERIVED_FROM = "derived_from"; + protected static final String PROPERTIES = "properties"; + protected static final String REQUIREMENTS = "requirements"; + protected static final String INTERFACES = "interfaces"; + protected static final String CAPABILITIES = "capabilities"; + protected static final String TYPE = "type"; + protected static final String DESCRIPTION = "description"; + protected static final String DIRECTIVES = "directives"; + protected static final String ATTRIBUTES = "attributes"; + protected static final String ARTIFACTS = "artifacts"; + protected static final String NODE_FILTER = "node_filter"; + protected static final String COPY = "copy"; + + protected static final String SECTIONS[] = { + DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES, + CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES, + ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY}; + + private static final String NODE = "node"; + private static final String CAPABILITY = "capability"; + private static final String RELATIONSHIP = "relationship"; + private static final String OCCURRENCES = "occurrences"; + + protected static final String REQUIREMENTS_SECTION[] = { + NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER}; + + //# Special key names + private static final String METADATA = "metadata"; + protected static final String SPECIAL_SECTIONS[] = {METADATA}; + + protected String name; + protected LinkedHashMap entityTpl; + protected LinkedHashMap customDef; + protected StatefulEntityType typeDefinition; + private ArrayList _properties; + private ArrayList _interfaces; + private ArrayList _requirements; + private ArrayList _capabilities; + + @Nullable + private NodeTemplate _parentNodeTemplate; + + // dummy constructor for subclasses that don't want super + public EntityTemplate() { + return; + } + + public EntityTemplate(String _name, + LinkedHashMap _template, + String _entityName, + LinkedHashMap _customDef) { + this(_name, _template, _entityName, _customDef, null); + } + + @SuppressWarnings("unchecked") + public EntityTemplate(String _name, + LinkedHashMap _template, + String _entityName, + LinkedHashMap _customDef, + NodeTemplate parentNodeTemplate) { + name = _name; + entityTpl = _template; + customDef = _customDef; + _validateField(entityTpl); + String type = (String) entityTpl.get("type"); + UnsupportedType.validateType(type); + if (_entityName.equals("node_type")) { + if (type != null) { + typeDefinition = new NodeType(type, customDef); + } else { + typeDefinition = null; + } + } + if (_entityName.equals("relationship_type")) { + Object relationship = _template.get("relationship"); + type = null; + if (relationship != null && relationship instanceof LinkedHashMap) { + type = (String) ((LinkedHashMap) relationship).get("type"); + } else if (relationship instanceof String) { + type = (String) entityTpl.get("relationship"); + } else { + type = (String) entityTpl.get("type"); + } + UnsupportedType.validateType(type); + typeDefinition = new RelationshipType(type, null, customDef); + } + if (_entityName.equals("policy_type")) { + if (type == null) { + //msg = (_('Policy definition of "%(pname)s" must have' + // ' a "type" ''attribute.') % dict(pname=name)) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE140", String.format( + "ValidationError: Policy definition of \"%s\" must have a \"type\" attribute", name))); + } + typeDefinition = new PolicyType(type, customDef); + } + if (_entityName.equals("group_type")) { + if (type != null) { + typeDefinition = new GroupType(type, customDef); + } else { + typeDefinition = null; + } + } + _properties = null; + _interfaces = null; + _requirements = null; + _capabilities = null; + _parentNodeTemplate = parentNodeTemplate; + } + + public NodeTemplate getParentNodeTemplate() { + return _parentNodeTemplate; + } + + public String getType() { + if (typeDefinition != null) { + String clType = typeDefinition.getClass().getSimpleName(); + if (clType.equals("NodeType")) { + return (String) ((NodeType) typeDefinition).getType(); + } else if (clType.equals("PolicyType")) { + return (String) ((PolicyType) typeDefinition).getType(); + } else if (clType.equals("GroupType")) { + return (String) ((GroupType) typeDefinition).getType(); + } else if (clType.equals("RelationshipType")) { + return (String) ((RelationshipType) typeDefinition).getType(); + } + } + return null; + } + + public Object getParentType() { + if (typeDefinition != null) { + String clType = typeDefinition.getClass().getSimpleName(); + if (clType.equals("NodeType")) { + return ((NodeType) typeDefinition).getParentType(); + } else if (clType.equals("PolicyType")) { + return ((PolicyType) typeDefinition).getParentType(); + } else if (clType.equals("GroupType")) { + return ((GroupType) typeDefinition).getParentType(); + } else if (clType.equals("RelationshipType")) { + return ((RelationshipType) typeDefinition).getParentType(); + } + } + return null; + } + + @SuppressWarnings("unchecked") + public RequirementAssignments getRequirements() { + if (_requirements == null) { + _requirements = _createRequirements(); + } + return new RequirementAssignments(_requirements); + } + + private ArrayList _createRequirements() { + ArrayList reqs = new ArrayList<>(); + ArrayList> requirements = (ArrayList>) + typeDefinition.getValue(REQUIREMENTS, entityTpl, false); + if (requirements == null) { + requirements = new ArrayList<>(); + } + for (Map req : requirements) { + for (String reqName : req.keySet()) { + Object reqItem = req.get(reqName); + if (reqItem instanceof LinkedHashMap) { + Object rel = ((LinkedHashMap) reqItem).get("relationship"); +// LinkedHashMap relationship = rel instanceof LinkedHashMap ? (LinkedHashMap) rel : null; + String nodeName = ((LinkedHashMap) reqItem).get("node").toString(); + Object capability = ((LinkedHashMap) reqItem).get("capability"); + String capabilityString = capability != null ? capability.toString() : null; + + reqs.add(new RequirementAssignment(reqName, nodeName, capabilityString, rel)); + } else if (reqItem instanceof String) { //short notation + String nodeName = String.valueOf(reqItem); + reqs.add(new RequirementAssignment(reqName, nodeName)); + } + } + } + return reqs; + } + + public ArrayList getPropertiesObjects() { + // Return properties objects for this template + if (_properties == null) { + _properties = _createProperties(); + } + return _properties; + } + + public LinkedHashMap getProperties() { + LinkedHashMap props = new LinkedHashMap<>(); + for (Property po : getPropertiesObjects()) { + props.put(po.getName(), po); + } + return props; + } + + public Object getPropertyValue(String name) { + LinkedHashMap props = getProperties(); + Property p = props.get(name); + return p != null ? p.getValue() : null; + } + + public String getPropertyType(String name) { + Property property = getProperties().get(name); + if (property != null) { + return property.getType(); + } + return null; + } + + public ArrayList getInterfaces() { + if (_interfaces == null) { + _interfaces = _createInterfaces(); + } + return _interfaces; + } + + public ArrayList getCapabilitiesObjects() { + // Return capabilities objects for this template + if (_capabilities == null) { + _capabilities = _createCapabilities(); + } + return _capabilities; + + } + + public CapabilityAssignments getCapabilities() { + LinkedHashMap caps = new LinkedHashMap(); + for (CapabilityAssignment cap : getCapabilitiesObjects()) { + caps.put(cap.getName(), cap); + } + return new CapabilityAssignments(caps); + } + + public boolean isDerivedFrom(String typeStr) { + // Returns true if this object is derived from 'type_str'. + // False otherwise + + if (getType() == null) { + return false; + } else if (getType().equals(typeStr)) { + return true; + } else if (getParentType() != null) { + return ((EntityType) getParentType()).isDerivedFrom(typeStr); + } + return false; + } + + @SuppressWarnings("unchecked") + private ArrayList _createCapabilities() { + ArrayList capability = new ArrayList(); + LinkedHashMap caps = (LinkedHashMap) + ((EntityType) typeDefinition).getValue(CAPABILITIES, entityTpl, true); + if (caps != null) { + //?!? getCapabilities defined only for NodeType... + LinkedHashMap capabilities = null; + if (typeDefinition instanceof NodeType) { + capabilities = ((NodeType) typeDefinition).getCapabilities(); + } else if (typeDefinition instanceof GroupType) { + capabilities = ((GroupType) typeDefinition).getCapabilities(); + } + for (Map.Entry me : caps.entrySet()) { + String name = me.getKey(); + LinkedHashMap props = (LinkedHashMap) me.getValue(); + if (capabilities.get(name) != null) { + CapabilityTypeDef c = capabilities.get(name); // a CapabilityTypeDef + LinkedHashMap properties = new LinkedHashMap(); + // first use the definition default value + LinkedHashMap cprops = c.getProperties(); + if (cprops != null) { + for (Map.Entry cpe : cprops.entrySet()) { + String propertyName = cpe.getKey(); + LinkedHashMap propertyDef = (LinkedHashMap) cpe.getValue(); + Object dob = propertyDef.get("default"); + if (dob != null) { + properties.put(propertyName, dob); + + } + } + } + // then update (if available) with the node properties + LinkedHashMap pp = (LinkedHashMap) props.get("properties"); + if (pp != null) { + properties.putAll(pp); + } + CapabilityAssignment cap = new CapabilityAssignment(name, properties, c, customDef); + capability.add(cap); + } + } + } + return capability; + } + + protected void _validateProperties(LinkedHashMap template, StatefulEntityType entityType) { + @SuppressWarnings("unchecked") + LinkedHashMap properties = (LinkedHashMap) entityType.getValue(PROPERTIES, template, false); + _commonValidateProperties(entityType, properties); + } + + protected void _validateCapabilities() { + //BUG??? getCapabilities only defined in NodeType... + LinkedHashMap typeCapabilities = ((NodeType) typeDefinition).getCapabilities(); + ArrayList allowedCaps = new ArrayList(); + if (typeCapabilities != null) { + allowedCaps.addAll(typeCapabilities.keySet()); + } + @SuppressWarnings("unchecked") + LinkedHashMap capabilities = (LinkedHashMap) + ((EntityType) typeDefinition).getValue(CAPABILITIES, entityTpl, false); + if (capabilities != null) { + _commonValidateField(capabilities, allowedCaps, "capabilities"); + _validateCapabilitiesProperties(capabilities); + } + } + + @SuppressWarnings("unchecked") + private void _validateCapabilitiesProperties(LinkedHashMap capabilities) { + for (Map.Entry me : capabilities.entrySet()) { + String cap = me.getKey(); + LinkedHashMap props = (LinkedHashMap) me.getValue(); + CapabilityAssignment capability = getCapability(cap); + if (capability == null) { + continue; + } + CapabilityTypeDef capabilitydef = capability.getDefinition(); + _commonValidateProperties(capabilitydef, (LinkedHashMap) props.get(PROPERTIES)); + + // validating capability properties values + for (Property prop : getCapability(cap).getPropertiesObjects()) { + prop.validate(); + + if (cap.equals("scalable") && prop.getName().equals("default_instances")) { + LinkedHashMap propDict = (LinkedHashMap) props.get(PROPERTIES); + int minInstances = (int) propDict.get("min_instances"); + int maxInstances = (int) propDict.get("max_instances"); + int defaultInstances = (int) propDict.get("default_instances"); + if (defaultInstances < minInstances || defaultInstances > maxInstances) { + //err_msg = ('"properties" of template "%s": ' + // '"default_instances" value is not between ' + // '"min_instances" and "max_instances".' % + // self.name) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE141", String.format( + "ValidationError: \"properties\" of template \"%s\": \"default_instances\" value is not between \"min_instances\" and \"max_instances\"", + name))); + } + } + } + } + } + + private void _commonValidateProperties(StatefulEntityType entityType, LinkedHashMap properties) { + ArrayList allowedProps = new ArrayList(); + ArrayList requiredProps = new ArrayList(); + for (PropertyDef p : entityType.getPropertiesDefObjects()) { + allowedProps.add(p.getName()); + // If property is 'required' and has no 'default' value then record + if (p.isRequired() && p.getDefault() == null) { + requiredProps.add(p.getName()); + } + } + // validate all required properties have values + if (properties != null) { + ArrayList reqPropsNoValueOrDefault = new ArrayList(); + _commonValidateField(properties, allowedProps, "properties"); + // make sure it's not missing any property required by a tosca type + for (String r : requiredProps) { + if (properties.get(r) == null) { + reqPropsNoValueOrDefault.add(r); + } + } + // Required properties found without value or a default value + if (!reqPropsNoValueOrDefault.isEmpty()) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", String.format( + "MissingRequiredFieldError: properties of template \"%s\" are missing field(s): %s", + name, reqPropsNoValueOrDefault.toString()))); + } + } else { + // Required properties in schema, but not in template + if (!requiredProps.isEmpty()) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE004", String.format( + "MissingRequiredFieldError2: properties of template \"%s\" are missing field(s): %s", + name, requiredProps.toString()))); + } + } + } + + @SuppressWarnings("unchecked") + private void _validateField(LinkedHashMap template) { + if (!(template instanceof LinkedHashMap)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE142", String.format( + "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"", name, TYPE))); + return;//??? + } + boolean bBad = false; + Object relationship = ((LinkedHashMap) template).get("relationship"); + if (relationship != null) { + if (!(relationship instanceof String)) { + bBad = (((LinkedHashMap) relationship).get(TYPE) == null); + } else if (relationship instanceof String) { + bBad = (template.get("relationship") == null); + } + } else { + bBad = (template.get(TYPE) == null); + } + if (bBad) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE143", String.format( + "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"", name, TYPE))); + } + } + + protected void _commonValidateField(LinkedHashMap schema, ArrayList allowedList, String section) { + for (String sname : schema.keySet()) { + boolean bFound = false; + for (String allowed : allowedList) { + if (sname.equals(allowed)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE144", String.format( + "UnknownFieldError: Section \"%s\" of template \"%s\" contains unknown field \"%s\"", section, name, sname))); + } + } + + } + + @SuppressWarnings("unchecked") + private ArrayList _createProperties() { + ArrayList props = new ArrayList(); + LinkedHashMap properties = (LinkedHashMap) + ((EntityType) typeDefinition).getValue(PROPERTIES, entityTpl, false); + if (properties == null) { + properties = new LinkedHashMap(); + } + for (Map.Entry me : properties.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + LinkedHashMap propsDef = ((StatefulEntityType) typeDefinition).getPropertiesDef(); + if (propsDef != null && propsDef.get(pname) != null) { + PropertyDef pd = (PropertyDef) propsDef.get(pname); + Property prop = new Property(pname, pvalue, pd.getSchema(), customDef); + props.add(prop); + } + } + ArrayList pds = ((StatefulEntityType) typeDefinition).getPropertiesDefObjects(); + for (Object pdo : pds) { + PropertyDef pd = (PropertyDef) pdo; + if (pd.getDefault() != null && properties.get(pd.getName()) == null) { + Property prop = new Property(pd.getName(), pd.getDefault(), pd.getSchema(), customDef); + props.add(prop); + } + } + return props; + } + + @SuppressWarnings("unchecked") + private ArrayList _createInterfaces() { + ArrayList interfaces = new ArrayList<>(); + LinkedHashMap typeInterfaces = new LinkedHashMap(); + if (typeDefinition instanceof RelationshipType) { + if (entityTpl instanceof LinkedHashMap) { + typeInterfaces = (LinkedHashMap) entityTpl.get(INTERFACES); + if (typeInterfaces == null) { + for (String relName : entityTpl.keySet()) { + Object relValue = entityTpl.get(relName); + if (!relName.equals("type")) { + Object relDef = relValue; + LinkedHashMap rel = null; + if (relDef instanceof LinkedHashMap) { + Object relob = ((LinkedHashMap) relDef).get("relationship"); + if (relob instanceof LinkedHashMap) { + rel = (LinkedHashMap) relob; + } + } + if (rel != null) { + if (rel.get(INTERFACES) != null) { + typeInterfaces = (LinkedHashMap) rel.get(INTERFACES); + break; + } + } + } + } + } + } + } else { + typeInterfaces = (LinkedHashMap) + ((EntityType) typeDefinition).getValue(INTERFACES, entityTpl, false); + } + if (typeInterfaces != null) { + for (Map.Entry me : typeInterfaces.entrySet()) { + String interfaceType = me.getKey(); + LinkedHashMap value = (LinkedHashMap) me.getValue(); + for (Map.Entry ve : value.entrySet()) { + String op = ve.getKey(); + Object opDef = ve.getValue(); + InterfacesDef iface = new InterfacesDef((EntityType) typeDefinition, + interfaceType, + this, + op, + opDef); + interfaces.add(iface); + } + + } + } + return interfaces; + } + + public CapabilityAssignment getCapability(String name) { + // Provide named capability + // :param name: name of capability + // :return: capability object if found, None otherwise + return getCapabilities().getCapabilityByName(name); + } + + // getter + public String getName() { + return name; + } + + public StatefulEntityType getTypeDefinition() { + return typeDefinition; + } + + public LinkedHashMap getCustomDef() { + return customDef; + } + + @Override + public String toString() { + return "EntityTemplate{" + + "name='" + name + '\'' + + ", entityTpl=" + entityTpl + + ", customDef=" + customDef + + ", typeDefinition=" + typeDefinition + + ", _properties=" + _properties + + ", _interfaces=" + _interfaces + + ", _requirements=" + _requirements + + ", _capabilities=" + _capabilities + + '}'; + } +} + +/*python + +class EntityTemplate(object): + '''Base class for TOSCA templates.''' + + SECTIONS = (DERIVED_FROM, PROPERTIES, REQUIREMENTS, + INTERFACES, CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES, + ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY) = \ + ('derived_from', 'properties', 'requirements', 'interfaces', + 'capabilities', 'type', 'description', 'directives', + 'attributes', 'artifacts', 'node_filter', 'copy') + REQUIREMENTS_SECTION = (NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER) = \ + ('node', 'capability', 'relationship', + 'occurrences', 'node_filter') + # Special key names + SPECIAL_SECTIONS = (METADATA) = ('metadata') + + def __init__(self, name, template, entity_name, custom_def=None): + self.name = name + self.entity_tpl = template + self.custom_def = custom_def + self._validate_field(self.entity_tpl) + type = self.entity_tpl.get('type') + UnsupportedType.validate_type(type) + if entity_name == 'node_type': + self.type_definition = NodeType(type, custom_def) \ + if type is not None else None + if entity_name == 'relationship_type': + relationship = template.get('relationship') + type = None + if relationship and isinstance(relationship, dict): + type = relationship.get('type') + elif isinstance(relationship, str): + type = self.entity_tpl['relationship'] + else: + type = self.entity_tpl['type'] + UnsupportedType.validate_type(type) + self.type_definition = RelationshipType(type, + None, custom_def) + if entity_name == 'policy_type': + if not type: + msg = (_('Policy definition of "%(pname)s" must have' + ' a "type" ''attribute.') % dict(pname=name)) + ValidationIssueCollector.appendException( + ValidationError(msg)) + + self.type_definition = PolicyType(type, custom_def) + if entity_name == 'group_type': + self.type_definition = GroupType(type, custom_def) \ + if type is not None else None + self._properties = None + self._interfaces = None + self._requirements = None + self._capabilities = None + + @property + def type(self): + if self.type_definition: + return self.type_definition.type + + @property + def parent_type(self): + if self.type_definition: + return self.type_definition.parent_type + + @property + def requirements(self): + if self._requirements is None: + self._requirements = self.type_definition.get_value( + self.REQUIREMENTS, + self.entity_tpl) or [] + return self._requirements + + def get_properties_objects(self): + '''Return properties objects for this template.''' + if self._properties is None: + self._properties = self._create_properties() + return self._properties + + def get_properties(self): + '''Return a dictionary of property name-object pairs.''' + return {prop.name: prop + for prop in self.get_properties_objects()} + + def get_property_value(self, name): + '''Return the value of a given property name.''' + props = self.get_properties() + if props and name in props.keys(): + return props[name].value + + @property + def interfaces(self): + if self._interfaces is None: + self._interfaces = self._create_interfaces() + return self._interfaces + + def get_capabilities_objects(self): + '''Return capabilities objects for this template.''' + if not self._capabilities: + self._capabilities = self._create_capabilities() + return self._capabilities + + def get_capabilities(self): + '''Return a dictionary of capability name-object pairs.''' + return {cap.name: cap + for cap in self.get_capabilities_objects()} + + def is_derived_from(self, type_str): + '''Check if object inherits from the given type. + + Returns true if this object is derived from 'type_str'. + False otherwise. + ''' + if not self.type: + return False + elif self.type == type_str: + return True + elif self.parent_type: + return self.parent_type.is_derived_from(type_str) + else: + return False + + def _create_capabilities(self): + capability = [] + caps = self.type_definition.get_value(self.CAPABILITIES, + self.entity_tpl, True) + if caps: + for name, props in caps.items(): + capabilities = self.type_definition.get_capabilities() + if name in capabilities.keys(): + c = capabilities[name] + properties = {} + # first use the definition default value + if c.properties: + for property_name in c.properties.keys(): + prop_def = c.properties[property_name] + if 'default' in prop_def: + properties[property_name] = prop_def['default'] + # then update (if available) with the node properties + if 'properties' in props and props['properties']: + properties.update(props['properties']) + + cap = CapabilityAssignment(name, properties, c) + capability.append(cap) + return capability + + def _validate_properties(self, template, entitytype): + properties = entitytype.get_value(self.PROPERTIES, template) + self._common_validate_properties(entitytype, properties) + + def _validate_capabilities(self): + type_capabilities = self.type_definition.get_capabilities() + allowed_caps = \ + type_capabilities.keys() if type_capabilities else [] + capabilities = self.type_definition.get_value(self.CAPABILITIES, + self.entity_tpl) + if capabilities: + self._common_validate_field(capabilities, allowed_caps, + 'capabilities') + self._validate_capabilities_properties(capabilities) + + def _validate_capabilities_properties(self, capabilities): + for cap, props in capabilities.items(): + capability = self.get_capability(cap) + if not capability: + continue + capabilitydef = capability.definition + self._common_validate_properties(capabilitydef, + props[self.PROPERTIES]) + + # validating capability properties values + for prop in self.get_capability(cap).get_properties_objects(): + prop.validate() + + # tODO(srinivas_tadepalli): temporary work around to validate + # default_instances until standardized in specification + if cap == "scalable" and prop.name == "default_instances": + prop_dict = props[self.PROPERTIES] + min_instances = prop_dict.get("min_instances") + max_instances = prop_dict.get("max_instances") + default_instances = prop_dict.get("default_instances") + if not (min_instances <= default_instances + <= max_instances): + err_msg = ('"properties" of template "%s": ' + '"default_instances" value is not between ' + '"min_instances" and "max_instances".' % + self.name) + ValidationIssueCollector.appendException( + ValidationError(message=err_msg)) + + def _common_validate_properties(self, entitytype, properties): + allowed_props = [] + required_props = [] + for p in entitytype.get_properties_def_objects(): + allowed_props.append(p.name) + # If property is 'required' and has no 'default' value then record + if p.required and p.default is None: + required_props.append(p.name) + # validate all required properties have values + if properties: + req_props_no_value_or_default = [] + self._common_validate_field(properties, allowed_props, + 'properties') + # make sure it's not missing any property required by a tosca type + for r in required_props: + if r not in properties.keys(): + req_props_no_value_or_default.append(r) + # Required properties found without value or a default value + if req_props_no_value_or_default: + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what='"properties" of template "%s"' % self.name, + required=req_props_no_value_or_default)) + else: + # Required properties in schema, but not in template + if required_props: + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what='"properties" of template "%s"' % self.name, + required=required_props)) + + def _validate_field(self, template): + if not isinstance(template, dict): + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what='Template "%s"' % self.name, required=self.TYPE)) + try: + relationship = template.get('relationship') + if relationship and not isinstance(relationship, str): + relationship[self.TYPE] + elif isinstance(relationship, str): + template['relationship'] + else: + template[self.TYPE] + except KeyError: + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what='Template "%s"' % self.name, required=self.TYPE)) + + def _common_validate_field(self, schema, allowedlist, section): + for name in schema: + if name not in allowedlist: + ValidationIssueCollector.appendException( + UnknownFieldError( + what=('"%(section)s" of template "%(nodename)s"' + % {'section': section, 'nodename': self.name}), + field=name)) + + def _create_properties(self): + props = [] + properties = self.type_definition.get_value(self.PROPERTIES, + self.entity_tpl) or {} + for name, value in properties.items(): + props_def = self.type_definition.get_properties_def() + if props_def and name in props_def: + prop = Property(name, value, + props_def[name].schema, self.custom_def) + props.append(prop) + for p in self.type_definition.get_properties_def_objects(): + if p.default is not None and p.name not in properties.keys(): + prop = Property(p.name, p.default, p.schema, self.custom_def) + props.append(prop) + return props + + def _create_interfaces(self): + interfaces = [] + type_interfaces = None + if isinstance(self.type_definition, RelationshipType): + if isinstance(self.entity_tpl, dict): + if self.INTERFACES in self.entity_tpl: + type_interfaces = self.entity_tpl[self.INTERFACES] + else: + for rel_def, value in self.entity_tpl.items(): + if rel_def != 'type': + rel_def = self.entity_tpl.get(rel_def) + rel = None + if isinstance(rel_def, dict): + rel = rel_def.get('relationship') + if rel: + if self.INTERFACES in rel: + type_interfaces = rel[self.INTERFACES] + break + else: + type_interfaces = self.type_definition.get_value(self.INTERFACES, + self.entity_tpl) + if type_interfaces: + for interface_type, value in type_interfaces.items(): + for op, op_def in value.items(): + iface = InterfacesDef(self.type_definition, + interfacetype=interface_type, + node_template=self, + name=op, + value=op_def) + interfaces.append(iface) + return interfaces + + def get_capability(self, name): + """Provide named capability + + :param name: name of capability + :return: capability object if found, None otherwise + """ + caps = self.get_capabilities() + if caps and name in caps.keys(): + return caps[name] +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Group.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Group.java new file mode 100644 index 0000000..0591d9a --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Group.java @@ -0,0 +1,171 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.Metadata; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.ValidateUtils; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public class Group extends EntityTemplate { + + private static final String TYPE = "type"; + private static final String METADATA = "metadata"; + private static final String DESCRIPTION = "description"; + private static final String PROPERTIES = "properties"; + private static final String MEMBERS = "members"; + private static final String INTERFACES = "interfaces"; + private static final String[] SECTIONS = { + TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; + + private String name; + private LinkedHashMap tpl; + private ArrayList memberNodes; + private LinkedHashMap customDef; + private Metadata metaData; + + + public Group(String name, LinkedHashMap templates, + ArrayList memberNodes, + LinkedHashMap customDef) { + this(name, templates, memberNodes, customDef, null); + } + + public Group(String name, LinkedHashMap templates, + ArrayList memberNodes, + LinkedHashMap customDef, NodeTemplate parentNodeTemplate) { + super(name, templates, "group_type", customDef, parentNodeTemplate); + + this.name = name; + tpl = templates; + if (tpl.get(METADATA) != null) { + Object metadataObject = tpl.get(METADATA); + ValidateUtils.validateMap(metadataObject); + metaData = new Metadata((Map) metadataObject); + } + this.memberNodes = memberNodes; + validateKeys(); + getCapabilities(); + } + + public Metadata getMetadata() { + return metaData; + } + + public ArrayList getMembers() { + return (ArrayList) entityTpl.get("members"); + } + + public String getDescription() { + return (String) entityTpl.get("description"); + + } + + public ArrayList getMemberNodes() { + return memberNodes; + } + + private void validateKeys() { + for (String key : entityTpl.keySet()) { + boolean bFound = false; + for (String sect : SECTIONS) { + if (key.equals(sect)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE183", String.format( + "UnknownFieldError: Groups \"%s\" contains unknown field \"%s\"", + name, key))); + } + } + } + + @Override + public String toString() { + return "Group{" + + "name='" + name + '\'' + + ", tpl=" + tpl + + ", memberNodes=" + memberNodes + + ", customDef=" + customDef + + ", metaData=" + metaData + + '}'; + } + + public int compareTo(Group other) { + if (this.equals(other)) { + return 0; + } + return this.getName().compareTo(other.getName()) == 0 ? this.getType().compareTo(other.getType()) : this.getName().compareTo(other.getName()); + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import UnknownFieldError +from toscaparser.entity_template import EntityTemplate +from toscaparser.utils import validateutils + +SECTIONS = (TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES) = \ + ('type', 'metadata', 'description', + 'properties', 'members', 'interfaces') + + +class Group(EntityTemplate): + + def __init__(self, name, group_templates, member_nodes, custom_defs=None): + super(Group, self).__init__(name, + group_templates, + 'group_type', + custom_defs) + self.name = name + self.tpl = group_templates + self.meta_data = None + if self.METADATA in self.tpl: + self.meta_data = self.tpl.get(self.METADATA) + validateutils.validate_map(self.meta_data) + self.member_nodes = member_nodes + self._validate_keys() + + @property + def members(self): + return self.entity_tpl.get('members') + + @property + def description(self): + return self.entity_tpl.get('description') + + def get_member_nodes(self): + return self.member_nodes + + def _validate_keys(self): + for key in self.entity_tpl.keys(): + if key not in SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Groups "%s"' % self.name, + field=key)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java new file mode 100644 index 0000000..019adb3 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java @@ -0,0 +1,748 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import com.google.common.base.Charsets; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.UrlUtils; + +import org.onap.sdc.toscaparser.api.elements.TypeValidation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.Yaml; + +import java.io.*; +import java.net.URL; +import java.nio.file.Paths; +import java.util.*; + +public class ImportsLoader { + + private static Logger log = LoggerFactory.getLogger(ImportsLoader.class.getName()); + private static final String FILE = "file"; + private static final String REPOSITORY = "repository"; + private static final String NAMESPACE_URI = "namespace_uri"; + private static final String NAMESPACE_PREFIX = "namespace_prefix"; + private String IMPORTS_SECTION[] = {FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX}; + + private ArrayList importslist; + private String path; + private ArrayList typeDefinitionList; + + private LinkedHashMap customDefs; + private LinkedHashMap allCustomDefs; + private ArrayList> nestedToscaTpls; + private LinkedHashMap repositories; + + @SuppressWarnings("unchecked") + public ImportsLoader(ArrayList _importslist, + String _path, + Object _typeDefinitionList, + LinkedHashMap tpl) { + + this.importslist = _importslist; + customDefs = new LinkedHashMap(); + allCustomDefs = new LinkedHashMap(); + nestedToscaTpls = new ArrayList>(); + if ((_path == null || _path.isEmpty()) && tpl == null) { + //msg = _('Input tosca template is not provided.') + //log.warning(msg) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE184", "ValidationError: Input tosca template is not provided")); + } + + this.path = _path; + this.repositories = new LinkedHashMap(); + + if (tpl != null && tpl.get("repositories") != null) { + this.repositories = (LinkedHashMap) tpl.get("repositories"); + } + this.typeDefinitionList = new ArrayList(); + if (_typeDefinitionList != null) { + if (_typeDefinitionList instanceof ArrayList) { + this.typeDefinitionList = (ArrayList) _typeDefinitionList; + } else { + this.typeDefinitionList.add((String) _typeDefinitionList); + } + } + _validateAndLoadImports(); + } + + public LinkedHashMap getCustomDefs() { + return allCustomDefs; + } + + public ArrayList> getNestedToscaTpls() { + return nestedToscaTpls; + } + + @SuppressWarnings({"unchecked", "unused"}) + public void _validateAndLoadImports() { + Set importNames = new HashSet(); + + if (importslist == null) { + //msg = _('"imports" keyname is defined without including templates.') + //log.error(msg) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE185", + "ValidationError: \"imports\" keyname is defined without including templates")); + return; + } + + for (Object importDef : importslist) { + String fullFileName = null; + LinkedHashMap customType = null; + if (importDef instanceof LinkedHashMap) { + for (Map.Entry me : ((LinkedHashMap) importDef).entrySet()) { + String importName = me.getKey(); + Object importUri = me.getValue(); + if (importNames.contains(importName)) { + //msg = (_('Duplicate import name "%s" was found.') % import_name) + //log.error(msg) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE186", String.format( + "ValidationError: Duplicate import name \"%s\" was found", importName))); + } + importNames.add(importName); //??? + + // _loadImportTemplate returns 2 objects + Object ffnct[] = _loadImportTemplate(importName, importUri); + fullFileName = (String) ffnct[0]; + customType = (LinkedHashMap) ffnct[1]; + String namespacePrefix = ""; + if (importUri instanceof LinkedHashMap) { + namespacePrefix = (String) + ((LinkedHashMap) importUri).get(NAMESPACE_PREFIX); + } + + if (customType != null) { + TypeValidation tv = new TypeValidation(customType, importDef); + _updateCustomDefs(customType, namespacePrefix); + } + } + } else { // old style of imports + // _loadImportTemplate returns 2 objects + Object ffnct[] = _loadImportTemplate(null, importDef); + fullFileName = (String) ffnct[0]; + customType = (LinkedHashMap) ffnct[1]; + if (customType != null) { + TypeValidation tv = new TypeValidation(customType, importDef); + _updateCustomDefs(customType, null); + } + } + _updateNestedToscaTpls(fullFileName, customType); + + + } + } + + /** + * This method is used to get consolidated custom definitions by passing custom Types from + * each import. The resultant collection is then passed back which contains all import + * definitions + * + * @param customType the custom type + * @param namespacePrefix the namespace prefix + */ + @SuppressWarnings("unchecked") + private void _updateCustomDefs(LinkedHashMap customType, String namespacePrefix) { + LinkedHashMap outerCustomTypes; + for (String typeDef : typeDefinitionList) { + if (typeDef.equals("imports")) { + customDefs.put("imports", customType.get(typeDef)); + if (allCustomDefs.isEmpty() || allCustomDefs.get("imports") == null) { + allCustomDefs.put("imports", customType.get(typeDef)); + } else if (customType.get(typeDef) != null) { + Set allCustomImports = new HashSet<>((ArrayList) allCustomDefs.get("imports")); + allCustomImports.addAll((ArrayList) customType.get(typeDef)); + allCustomDefs.put("imports", new ArrayList<>(allCustomImports)); + } + } else { + outerCustomTypes = (LinkedHashMap) customType.get(typeDef); + if (outerCustomTypes != null) { + if (namespacePrefix != null && !namespacePrefix.isEmpty()) { + LinkedHashMap prefixCustomTypes = new LinkedHashMap(); + for (Map.Entry me : outerCustomTypes.entrySet()) { + String typeDefKey = me.getKey(); + String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey; + prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey)); + } + customDefs.putAll(prefixCustomTypes); + allCustomDefs.putAll(prefixCustomTypes); + } else { + customDefs.putAll(outerCustomTypes); + allCustomDefs.putAll(outerCustomTypes); + } + } + } + } + } + + private void _updateNestedToscaTpls(String fullFileName, LinkedHashMap customTpl) { + if (fullFileName != null && customTpl != null) { + LinkedHashMap tt = new LinkedHashMap(); + tt.put(fullFileName, customTpl); + nestedToscaTpls.add(tt); + } + } + + private void _validateImportKeys(String importName, LinkedHashMap importUri) { + if (importUri.get(FILE) == null) { + //log.warning(_('Missing keyname "file" in import "%(name)s".') % {'name': import_name}) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE187", String.format( + "MissingRequiredFieldError: Import of template \"%s\" is missing field %s", importName, FILE))); + } + for (String key : importUri.keySet()) { + boolean bFound = false; + for (String is : IMPORTS_SECTION) { + if (is.equals(key)) { + bFound = true; + break; + } + } + if (!bFound) { + //log.warning(_('Unknown keyname "%(key)s" error in ' + // 'imported definition "%(def)s".') + // % {'key': key, 'def': import_name}) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE188", String.format( + "UnknownFieldError: Import of template \"%s\" has unknown fiels %s", importName, key))); + } + } + } + + @SuppressWarnings("unchecked") + private Object[] _loadImportTemplate(String importName, Object importUriDef) { + /* + This method loads the custom type definitions referenced in "imports" + section of the TOSCA YAML template by determining whether each import + is specified via a file reference (by relative or absolute path) or a + URL reference. + + Possibilities: + +----------+--------+------------------------------+ + | template | import | comment | + +----------+--------+------------------------------+ + | file | file | OK | + | file | URL | OK | + | preparsed| file | file must be a full path | + | preparsed| URL | OK | + | URL | file | file must be a relative path | + | URL | URL | OK | + +----------+--------+------------------------------+ + */ + Object al[] = new Object[2]; + + boolean shortImportNotation = false; + String fileName; + String repository; + if (importUriDef instanceof LinkedHashMap) { + _validateImportKeys(importName, (LinkedHashMap) importUriDef); + fileName = (String) ((LinkedHashMap) importUriDef).get(FILE); + repository = (String) ((LinkedHashMap) importUriDef).get(REPOSITORY); + if (repository != null) { + if (!repositories.keySet().contains(repository)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE189", String.format( + "InvalidPropertyValueError: Repository \"%s\" not found in \"%s\"", + repository, repositories.keySet().toString()))); + } + } + } else { + fileName = (String) importUriDef; + repository = null; + shortImportNotation = true; + } + + if (fileName == null || fileName.isEmpty()) { + //msg = (_('A template file name is not provided with import ' + // 'definition "%(import_name)s".') + // % {'import_name': import_name}) + //log.error(msg) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE190", String.format( + "ValidationError: A template file name is not provided with import definition \"%s\"", importName))); + al[0] = al[1] = null; + return al; + } + + if (UrlUtils.validateUrl(fileName)) { + try (InputStream input = new URL(fileName).openStream();) { + al[0] = fileName; + Yaml yaml = new Yaml(); + al[1] = yaml.load(input); + return al; + } catch (IOException e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE191", String.format( + "ImportError: \"%s\" loading YAML import from \"%s\"", e.getClass().getSimpleName(), fileName))); + al[0] = al[1] = null; + return al; + } + } else if (repository == null || repository.isEmpty()) { + boolean aFile = false; + String importTemplate = null; + if (path != null && !path.isEmpty()) { + if (UrlUtils.validateUrl(path)) { + File fp = new File(path); + if (fp.isAbsolute()) { + String msg = String.format( + "ImportError: Absolute file name \"%s\" cannot be used in the URL-based input template \"%s\"", + fileName, path); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE192", msg)); + al[0] = al[1] = null; + return al; + } + importTemplate = UrlUtils.joinUrl(path, fileName); + aFile = false; + } else { + + aFile = true; + File fp = new File(path); + if (fp.isFile()) { + File fn = new File(fileName); + if (fn.isFile()) { + importTemplate = fileName; + } else { + String fullPath = Paths.get(path).toAbsolutePath().getParent().toString() + File.separator + fileName; + File ffp = new File(fullPath); + if (ffp.isFile()) { + importTemplate = fullPath; + } else { + String dirPath = Paths.get(path).toAbsolutePath().getParent().toString(); + String filePath; + if (Paths.get(fileName).getParent() != null) { + filePath = Paths.get(fileName).getParent().toString(); + } else { + filePath = ""; + } + if (!filePath.isEmpty() && dirPath.endsWith(filePath)) { + String sFileName = Paths.get(fileName).getFileName().toString(); + importTemplate = dirPath + File.separator + sFileName; + File fit = new File(importTemplate); + if (!fit.isFile()) { + //msg = (_('"%(import_template)s" is' + // 'not a valid file') + // % {'import_template': + // import_template}) + //log.error(msg) + String msg = String.format( + "ValueError: \"%s\" is not a valid file", importTemplate); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE193", msg)); + log.debug("ImportsLoader - _loadImportTemplate - {}", msg); + } + } + } + } + } + } + } else { // template is pre-parsed + File fn = new File(fileName); + if (fn.isAbsolute() && fn.isFile()) { + aFile = true; + importTemplate = fileName; + } else { + String msg = String.format( + "Relative file name \"%s\" cannot be used in a pre-parsed input template", fileName); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE194", "ImportError: " + msg)); + al[0] = al[1] = null; + return al; + } + } + + if (importTemplate == null || importTemplate.isEmpty()) { + //log.error(_('Import "%(name)s" is not valid.') % + // {'name': import_uri_def}) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE195", String.format( + "ImportError: Import \"%s\" is not valid", importUriDef))); + al[0] = al[1] = null; + return al; + } + + // for now, this must be a file + if (!aFile) { + log.error("ImportsLoader - _loadImportTemplate - Error!! Expected a file. importUriDef = {}, importTemplate = {}", importUriDef, importTemplate); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE196", String.format( + "ImportError: Import \"%s\" is not a file", importName))); + al[0] = al[1] = null; + return al; + } + try (BufferedReader br = new BufferedReader(new FileReader(importTemplate));) { + al[0] = importTemplate; + + Yaml yaml = new Yaml(); + al[1] = yaml.load(br); + return al; + } catch (FileNotFoundException e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE197", String.format( + "ImportError: Failed to load YAML from \"%s\"" + e, importName))); + al[0] = al[1] = null; + return al; + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE198", String.format( + "ImportError: Exception from SnakeYAML file = \"%s\"" + e, importName))); + al[0] = al[1] = null; + return al; + } + } + + if (shortImportNotation) { + //log.error(_('Import "%(name)s" is not valid.') % import_uri_def) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE199", String.format( + "ImportError: Import \"%s\" is not valid", importName))); + al[0] = al[1] = null; + return al; + } + + String fullUrl = ""; + String repoUrl = ""; + if (repository != null && !repository.isEmpty()) { + if (repositories != null) { + for (String repoName : repositories.keySet()) { + if (repoName.equals(repository)) { + Object repoDef = repositories.get(repoName); + if (repoDef instanceof String) { + repoUrl = (String) repoDef; + } else if (repoDef instanceof LinkedHashMap) { + repoUrl = (String) ((LinkedHashMap) repoDef).get("url"); + } + // Remove leading, ending spaces and strip + // the last character if "/" + repoUrl = repoUrl.trim(); + if (repoUrl.endsWith("/")) { + repoUrl = repoUrl.substring(0, repoUrl.length() - 1); + } + fullUrl = repoUrl + "/" + fileName; + break; + } + } + } + if (fullUrl.isEmpty()) { + String msg = String.format( + "referenced repository \"%s\" in import definition \"%s\" not found", + repository, importName); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE200", "ImportError: " + msg)); + al[0] = al[1] = null; + return al; + } + } + if (UrlUtils.validateUrl(fullUrl)) { + try (InputStream input = new URL(fullUrl).openStream();) { + al[0] = fullUrl; + Yaml yaml = new Yaml(); + al[1] = yaml.load(input); + return al; + } catch (IOException e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE201", String.format( + "ImportError: Exception loading YAML import from \"%s\"", fullUrl))); + al[0] = al[1] = null; + return al; + } + } else { + String msg = String.format( + "repository URL \"%s\" in import definition \"%s\" is not valid", + repoUrl, importName); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE202", "ImportError: " + msg)); + } + + // if we got here something is wrong with the flow... + log.error("ImportsLoader - _loadImportTemplate - got to dead end (importName {})", importName); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE203", String.format( + "ImportError: _loadImportTemplate got to dead end (importName %s)\n", importName))); + al[0] = al[1] = null; + return al; + } + + @Override + public String toString() { + return "ImportsLoader{" + + "IMPORTS_SECTION=" + Arrays.toString(IMPORTS_SECTION) + + ", importslist=" + importslist + + ", path='" + path + '\'' + + ", typeDefinitionList=" + typeDefinitionList + + ", customDefs=" + customDefs + + ", nestedToscaTpls=" + nestedToscaTpls + + ", repositories=" + repositories + + '}'; + } +} + +/*python + +import logging +import os + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidPropertyValueError +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.common.exception import ValidationError +from toscaparser.elements.tosca_type_validation import TypeValidation +from toscaparser.utils.gettextutils import _ +import org.openecomp.sdc.toscaparser.api.utils.urlutils +import org.openecomp.sdc.toscaparser.api.utils.yamlparser + +YAML_LOADER = toscaparser.utils.yamlparser.load_yaml +log = logging.getLogger("tosca") + + +class ImportsLoader(object): + + IMPORTS_SECTION = (FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX) = \ + ('file', 'repository', 'namespace_uri', + 'namespace_prefix') + + def __init__(self, importslist, path, type_definition_list=None, + tpl=None): + self.importslist = importslist + self.custom_defs = {} + if not path and not tpl: + msg = _('Input tosca template is not provided.') + log.warning(msg) + ValidationIssueCollector.appendException(ValidationError(message=msg)) + self.path = path + self.repositories = {} + if tpl and tpl.get('repositories'): + self.repositories = tpl.get('repositories') + self.type_definition_list = [] + if type_definition_list: + if isinstance(type_definition_list, list): + self.type_definition_list = type_definition_list + else: + self.type_definition_list.append(type_definition_list) + self._validate_and_load_imports() + + def get_custom_defs(self): + return self.custom_defs + + def _validate_and_load_imports(self): + imports_names = set() + + if not self.importslist: + msg = _('"imports" keyname is defined without including ' + 'templates.') + log.error(msg) + ValidationIssueCollector.appendException(ValidationError(message=msg)) + return + + for import_def in self.importslist: + if isinstance(import_def, dict): + for import_name, import_uri in import_def.items(): + if import_name in imports_names: + msg = (_('Duplicate import name "%s" was found.') % + import_name) + log.error(msg) + ValidationIssueCollector.appendException( + ValidationError(message=msg)) + imports_names.add(import_name) + + custom_type = self._load_import_template(import_name, + import_uri) + namespace_prefix = None + if isinstance(import_uri, dict): + namespace_prefix = import_uri.get( + self.NAMESPACE_PREFIX) + if custom_type: + TypeValidation(custom_type, import_def) + self._update_custom_def(custom_type, namespace_prefix) + else: # old style of imports + custom_type = self._load_import_template(None, + import_def) + if custom_type: + TypeValidation( + custom_type, import_def) + self._update_custom_def(custom_type, None) + + def _update_custom_def(self, custom_type, namespace_prefix): + outer_custom_types = {} + for type_def in self.type_definition_list: + outer_custom_types = custom_type.get(type_def) + if outer_custom_types: + if type_def == "imports": + self.custom_defs.update({'imports': outer_custom_types}) + else: + if namespace_prefix: + prefix_custom_types = {} + for type_def_key in outer_custom_types.keys(): + namespace_prefix_to_key = (namespace_prefix + + "." + type_def_key) + prefix_custom_types[namespace_prefix_to_key] = \ + outer_custom_types[type_def_key] + self.custom_defs.update(prefix_custom_types) + else: + self.custom_defs.update(outer_custom_types) + + def _validate_import_keys(self, import_name, import_uri_def): + if self.FILE not in import_uri_def.keys(): + log.warning(_('Missing keyname "file" in import "%(name)s".') + % {'name': import_name}) + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what='Import of template "%s"' % import_name, + required=self.FILE)) + for key in import_uri_def.keys(): + if key not in self.IMPORTS_SECTION: + log.warning(_('Unknown keyname "%(key)s" error in ' + 'imported definition "%(def)s".') + % {'key': key, 'def': import_name}) + ValidationIssueCollector.appendException( + UnknownFieldError( + what='Import of template "%s"' % import_name, + field=key)) + + def _load_import_template(self, import_name, import_uri_def): + """Handle custom types defined in imported template files + + This method loads the custom type definitions referenced in "imports" + section of the TOSCA YAML template by determining whether each import + is specified via a file reference (by relative or absolute path) or a + URL reference. + + Possibilities: + +----------+--------+------------------------------+ + | template | import | comment | + +----------+--------+------------------------------+ + | file | file | OK | + | file | URL | OK | + | preparsed| file | file must be a full path | + | preparsed| URL | OK | + | URL | file | file must be a relative path | + | URL | URL | OK | + +----------+--------+------------------------------+ + """ + short_import_notation = False + if isinstance(import_uri_def, dict): + self._validate_import_keys(import_name, import_uri_def) + file_name = import_uri_def.get(self.FILE) + repository = import_uri_def.get(self.REPOSITORY) + repos = self.repositories.keys() + if repository is not None: + if repository not in repos: + ValidationIssueCollector.appendException( + InvalidPropertyValueError( + what=_('Repository is not found in "%s"') % repos)) + else: + file_name = import_uri_def + repository = None + short_import_notation = True + + if not file_name: + msg = (_('A template file name is not provided with import ' + 'definition "%(import_name)s".') + % {'import_name': import_name}) + log.error(msg) + ValidationIssueCollector.appendException(ValidationError(message=msg)) + return + + if toscaparser.utils.urlutils.UrlUtils.validate_url(file_name): + return YAML_LOADER(file_name, False) + elif not repository: + import_template = None + if self.path: + if toscaparser.utils.urlutils.UrlUtils.validate_url(self.path): + if os.path.isabs(file_name): + msg = (_('Absolute file name "%(name)s" cannot be ' + 'used in a URL-based input template ' + '"%(template)s".') + % {'name': file_name, 'template': self.path}) + log.error(msg) + ValidationIssueCollector.appendException(ImportError(msg)) + return + import_template = toscaparser.utils.urlutils.UrlUtils.\ + join_url(self.path, file_name) + a_file = False + else: + a_file = True + main_a_file = os.path.isfile(self.path) + + if main_a_file: + if os.path.isfile(file_name): + import_template = file_name + else: + full_path = os.path.join( + os.path.dirname(os.path.abspath(self.path)), + file_name) + if os.path.isfile(full_path): + import_template = full_path + else: + file_path = file_name.rpartition("/") + dir_path = os.path.dirname(os.path.abspath( + self.path)) + if file_path[0] != '' and dir_path.endswith( + file_path[0]): + import_template = dir_path + "/" +\ + file_path[2] + if not os.path.isfile(import_template): + msg = (_('"%(import_template)s" is' + 'not a valid file') + % {'import_template': + import_template}) + log.error(msg) + ValidationIssueCollector.appendException + (ValueError(msg)) + else: # template is pre-parsed + if os.path.isabs(file_name) and os.path.isfile(file_name): + a_file = True + import_template = file_name + else: + msg = (_('Relative file name "%(name)s" cannot be used ' + 'in a pre-parsed input template.') + % {'name': file_name}) + log.error(msg) + ValidationIssueCollector.appendException(ImportError(msg)) + return + + if not import_template: + log.error(_('Import "%(name)s" is not valid.') % + {'name': import_uri_def}) + ValidationIssueCollector.appendException( + ImportError(_('Import "%s" is not valid.') % + import_uri_def)) + return + return YAML_LOADER(import_template, a_file) + + if short_import_notation: + log.error(_('Import "%(name)s" is not valid.') % import_uri_def) + ValidationIssueCollector.appendException( + ImportError(_('Import "%s" is not valid.') % import_uri_def)) + return + + full_url = "" + if repository: + if self.repositories: + for repo_name, repo_def in self.repositories.items(): + if repo_name == repository: + # Remove leading, ending spaces and strip + # the last character if "/" + repo_url = ((repo_def['url']).strip()).rstrip("//") + full_url = repo_url + "/" + file_name + + if not full_url: + msg = (_('referenced repository "%(n_uri)s" in import ' + 'definition "%(tpl)s" not found.') + % {'n_uri': repository, 'tpl': import_name}) + log.error(msg) + ValidationIssueCollector.appendException(ImportError(msg)) + return + + if toscaparser.utils.urlutils.UrlUtils.validate_url(full_url): + return YAML_LOADER(full_url, False) + else: + msg = (_('repository url "%(n_uri)s" is not valid in import ' + 'definition "%(tpl)s".') + % {'n_uri': repo_url, 'tpl': import_name}) + log.error(msg) + ValidationIssueCollector.appendException(ImportError(msg)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java new file mode 100644 index 0000000..4fabe38 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java @@ -0,0 +1,824 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.EntityType; +import org.onap.sdc.toscaparser.api.elements.InterfacesDef; +import org.onap.sdc.toscaparser.api.elements.Metadata; +import org.onap.sdc.toscaparser.api.elements.NodeType; +import org.onap.sdc.toscaparser.api.elements.RelationshipType; +import org.onap.sdc.toscaparser.api.utils.CopyUtils; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import static org.onap.sdc.toscaparser.api.elements.EntityType.TOSCA_DEF; + +public class NodeTemplate extends EntityTemplate { + + private LinkedHashMap templates; + private LinkedHashMap customDef; + private ArrayList availableRelTpls; + private LinkedHashMap availableRelTypes; + private LinkedHashMap related; + private ArrayList relationshipTpl; + private LinkedHashMap _relationships; + private SubstitutionMappings subMappingToscaTemplate; + private TopologyTemplate originComponentTemplate; + private Metadata metadata; + + private static final String METADATA = "metadata"; + + public NodeTemplate(String name, + LinkedHashMap ntnodeTemplates, + LinkedHashMap ntcustomDef, + ArrayList ntavailableRelTpls, + LinkedHashMap ntavailableRelTypes) { + this(name, ntnodeTemplates, ntcustomDef, ntavailableRelTpls, + ntavailableRelTypes, null); + } + + @SuppressWarnings("unchecked") + public NodeTemplate(String name, + LinkedHashMap ntnodeTemplates, + LinkedHashMap ntcustomDef, + ArrayList ntavailableRelTpls, + LinkedHashMap ntavailableRelTypes, + NodeTemplate parentNodeTemplate) { + + super(name, (LinkedHashMap) ntnodeTemplates.get(name), + "node_type", ntcustomDef, parentNodeTemplate); + + templates = ntnodeTemplates; + _validateFields((LinkedHashMap) templates.get(name)); + customDef = ntcustomDef; + related = new LinkedHashMap(); + relationshipTpl = new ArrayList(); + availableRelTpls = ntavailableRelTpls; + availableRelTypes = ntavailableRelTypes; + _relationships = new LinkedHashMap(); + subMappingToscaTemplate = null; + metadata = _metaData(); + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getRelationships() { + if (_relationships.isEmpty()) { + List requires = getRequirements().getAll(); + if (requires != null && requires instanceof List) { + for (RequirementAssignment r : requires) { + LinkedHashMap explicit = _getExplicitRelationship(r); + if (explicit != null) { + // _relationships.putAll(explicit)... + for (Map.Entry ee : explicit.entrySet()) { + _relationships.put(ee.getKey(), ee.getValue()); + } + } + } + } + } + return _relationships; + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _getExplicitRelationship(RequirementAssignment req) { + // Handle explicit relationship + + // For example, + // - req: + // node: DBMS + // relationship: tosca.relationships.HostedOn + + LinkedHashMap explicitRelation = new LinkedHashMap(); + String node = req.getNodeTemplateName(); + + if (node != null && !node.isEmpty()) { + //msg = _('Lookup by TOSCA types is not supported. ' + // 'Requirement for "%s" can not be full-filled.') % self.name + boolean bFound = false; + for (String k : EntityType.TOSCA_DEF.keySet()) { + if (k.equals(node)) { + bFound = true; + break; + } + } + if (bFound || customDef.get(node) != null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE205", String.format( + "NotImplementedError: Lookup by TOSCA types is not supported. Requirement for \"%s\" can not be full-filled", + getName()))); + return null; + } + if (templates.get(node) == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE206", String.format( + "KeyError: Node template \"%s\" was not found", node))); + return null; + } + NodeTemplate relatedTpl = new NodeTemplate(node, templates, customDef, null, null); + Object relationship = req.getRelationship(); + String relationshipString = null; +// // here relationship can be a string or a LHM with 'type': + + // check if its type has relationship defined + if (relationship == null) { + ArrayList parentReqs = ((NodeType) typeDefinition).getAllRequirements(); + if (parentReqs == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE207", "ValidationError: parent_req is null")); + } else { +// for(String key: req.keySet()) { +// boolean bFoundRel = false; + for (Object rdo : parentReqs) { + LinkedHashMap reqDict = (LinkedHashMap) rdo; + LinkedHashMap relDict = (LinkedHashMap) reqDict.get(req.getName()); + if (relDict != null) { + relationship = relDict.get("relationship"); + //BUG-python??? need to break twice? +// bFoundRel = true; + break; + } + } +// if(bFoundRel) { +// break; +// } +// } + } + } + + if (relationship != null) { + // here relationship can be a string or a LHM with 'type': + if (relationship instanceof String) { + relationshipString = (String) relationship; + } else if (relationship instanceof LinkedHashMap) { + relationshipString = (String) ((LinkedHashMap) relationship).get("type"); + } + + boolean foundRelationshipTpl = false; + // apply available relationship templates if found + if (availableRelTpls != null) { + for (RelationshipTemplate tpl : availableRelTpls) { + if (tpl.getName().equals(relationshipString)) { + RelationshipType rtype = new RelationshipType(tpl.getType(), null, customDef); + explicitRelation.put(rtype, relatedTpl); + tpl.setTarget(relatedTpl); + tpl.setSource(this); + relationshipTpl.add(tpl); + foundRelationshipTpl = true; + } + } + } + // create relationship template object. + String relPrfx = EntityType.RELATIONSHIP_PREFIX; + if (!foundRelationshipTpl) { + if (relationship instanceof LinkedHashMap) { + relationshipString = (String) ((LinkedHashMap) relationship).get("type"); + if (relationshipString != null) { + if (availableRelTypes != null && !availableRelTypes.isEmpty() && + availableRelTypes.get(relationshipString) != null) { + ; + } else if (!(relationshipString).startsWith(relPrfx)) { + relationshipString = relPrfx + relationshipString; + } + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE208", String.format( + "MissingRequiredFieldError: \"relationship\" used in template \"%s\" is missing required field \"type\"", + relatedTpl.getName()))); + } + } + for (RelationshipType rtype : ((NodeType) typeDefinition).getRelationship().keySet()) { + if (rtype.getType().equals(relationshipString)) { + explicitRelation.put(rtype, relatedTpl); + relatedTpl._addRelationshipTemplate(req, rtype.getType(), this); + } else if (availableRelTypes != null && !availableRelTypes.isEmpty()) { + LinkedHashMap relTypeDef = (LinkedHashMap) availableRelTypes.get(relationshipString); + if (relTypeDef != null) { + String superType = (String) relTypeDef.get("derived_from"); + if (superType != null) { + if (!superType.startsWith(relPrfx)) { + superType = relPrfx + superType; + } + if (rtype.getType().equals(superType)) { + explicitRelation.put(rtype, relatedTpl); + relatedTpl._addRelationshipTemplate(req, rtype.getType(), this); + } + } + } + } + } + } + } + } + return explicitRelation; + } + + @SuppressWarnings("unchecked") + private void _addRelationshipTemplate(RequirementAssignment requirement, String rtype, NodeTemplate source) { + LinkedHashMap req = new LinkedHashMap<>(); + req.put("relationship", CopyUtils.copyLhmOrAl(requirement.getRelationship())); + req.put("type", rtype); + RelationshipTemplate tpl = new RelationshipTemplate(req, rtype, customDef, this, source, getParentNodeTemplate()); + relationshipTpl.add(tpl); + } + + public ArrayList getRelationshipTemplate() { + return relationshipTpl; + } + + void _addNext(NodeTemplate nodetpl, RelationshipType relationship) { + related.put(nodetpl, relationship); + } + + public ArrayList getRelatedNodes() { + if (related.isEmpty()) { + for (Map.Entry me : ((NodeType) typeDefinition).getRelationship().entrySet()) { + RelationshipType relation = me.getKey(); + NodeType node = me.getValue(); + for (String tpl : templates.keySet()) { + if (tpl.equals(node.getType())) { + //BUG.. python has + // self.related[NodeTemplate(tpl)] = relation + // but NodeTemplate doesn't have a constructor with just name... + //???? + related.put(new NodeTemplate(tpl, null, null, null, null), relation); + } + } + } + } + return new ArrayList(related.keySet()); + } + + public void validate(/*tosca_tpl=none is not used...*/) { + _validateCapabilities(); + _validateRequirements(); + _validateProperties(entityTpl, (NodeType) typeDefinition); + _validateInterfaces(); + for (Property prop : getPropertiesObjects()) { + prop.validate(); + } + } + + public Object getPropertyValueFromTemplatesByName(String propertyName) { + LinkedHashMap nodeObject = (LinkedHashMap) templates.get(name); + if (nodeObject != null) { + LinkedHashMap properties = (LinkedHashMap) nodeObject.get(PROPERTIES); + if (properties != null) { + return properties.get(propertyName); + } + } + return null; + } + + private Metadata _metaData() { + if (entityTpl.get(METADATA) != null) { + return new Metadata((Map) entityTpl.get(METADATA)); + } else { + return null; + } + } + + @SuppressWarnings("unchecked") + private void _validateRequirements() { + ArrayList typeRequires = ((NodeType) typeDefinition).getAllRequirements(); + ArrayList allowedReqs = new ArrayList<>(); + allowedReqs.add("template"); + if (typeRequires != null) { + for (Object to : typeRequires) { + LinkedHashMap treq = (LinkedHashMap) to; + for (Map.Entry me : treq.entrySet()) { + String key = me.getKey(); + Object value = me.getValue(); + allowedReqs.add(key); + if (value instanceof LinkedHashMap) { + allowedReqs.addAll(((LinkedHashMap) value).keySet()); + } + } + + } + } + + ArrayList requires = (ArrayList) ((NodeType) typeDefinition).getValue(REQUIREMENTS, entityTpl, false); + if (requires != null) { + if (!(requires instanceof ArrayList)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE209", String.format( + "TypeMismatchError: \"requirements\" of template \"%s\" are not of type \"list\"", name))); + } else { + for (Object ro : requires) { + LinkedHashMap req = (LinkedHashMap) ro; + for (Map.Entry me : req.entrySet()) { + String rl = me.getKey(); + Object vo = me.getValue(); + if (vo instanceof LinkedHashMap) { + LinkedHashMap value = (LinkedHashMap) vo; + _validateRequirementsKeys(value); + _validateRequirementsProperties(value); + allowedReqs.add(rl); + } + } + _commonValidateField(req, allowedReqs, "requirements"); + } + } + } + } + + @SuppressWarnings("unchecked") + private void _validateRequirementsProperties(LinkedHashMap reqs) { + // TO-DO(anyone): Only occurrences property of the requirements is + // validated here. Validation of other requirement properties are being + // validated in different files. Better to keep all the requirements + // properties validation here. + for (Map.Entry me : reqs.entrySet()) { + if (me.getKey().equals("occurrences")) { + ArrayList val = (ArrayList) me.getValue(); + _validateOccurrences(val); + } + + } + } + + private void _validateOccurrences(ArrayList occurrences) { + DataEntity.validateDatatype("list", occurrences, null, null, null); + for (Object val : occurrences) { + DataEntity.validateDatatype("Integer", val, null, null, null); + } + if (occurrences.size() != 2 || + !(0 <= (int) occurrences.get(0) && (int) occurrences.get(0) <= (int) occurrences.get(1)) || + (int) occurrences.get(1) == 0) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE210", String.format( + "InvalidPropertyValueError: property has invalid value %s", occurrences.toString()))); + } + } + + private void _validateRequirementsKeys(LinkedHashMap reqs) { + for (String key : reqs.keySet()) { + boolean bFound = false; + for (int i = 0; i < REQUIREMENTS_SECTION.length; i++) { + if (key.equals(REQUIREMENTS_SECTION[i])) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE211", String.format( + "UnknownFieldError: \"requirements\" of template \"%s\" contains unknown field \"%s\"", name, key))); + } + } + } + + @SuppressWarnings("unchecked") + private void _validateInterfaces() { + LinkedHashMap ifaces = (LinkedHashMap) + ((NodeType) typeDefinition).getValue(INTERFACES, entityTpl, false); + if (ifaces != null) { + for (Map.Entry me : ifaces.entrySet()) { + String iname = me.getKey(); + LinkedHashMap value = (LinkedHashMap) me.getValue(); + if (iname.equals(InterfacesDef.LIFECYCLE) || iname.equals(InterfacesDef.LIFECYCLE_SHORTNAME)) { + // maybe we should convert [] to arraylist??? + ArrayList inlo = new ArrayList<>(); + for (int i = 0; i < InterfacesDef.INTERFACE_NODE_LIFECYCLE_OPERATIONS.length; i++) { + inlo.add(InterfacesDef.INTERFACE_NODE_LIFECYCLE_OPERATIONS[i]); + } + _commonValidateField(value, inlo, "interfaces"); + } else if (iname.equals(InterfacesDef.CONFIGURE) || iname.equals(InterfacesDef.CONFIGURE_SHORTNAME)) { + // maybe we should convert [] to arraylist??? + ArrayList irco = new ArrayList<>(); + for (int i = 0; i < InterfacesDef.INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS.length; i++) { + irco.add(InterfacesDef.INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS[i]); + } + _commonValidateField(value, irco, "interfaces"); + } else if (((NodeType) typeDefinition).getInterfaces().keySet().contains(iname)) { + _commonValidateField(value, _collectCustomIfaceOperations(iname), "interfaces"); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE212", String.format( + "UnknownFieldError: \"interfaces\" of template \"%s\" contains unknown field %s", name, iname))); + } + } + } + } + + @SuppressWarnings("unchecked") + private ArrayList _collectCustomIfaceOperations(String iname) { + ArrayList allowedOperations = new ArrayList<>(); + LinkedHashMap nodetypeIfaceDef = (LinkedHashMap) ((NodeType) + typeDefinition).getInterfaces().get(iname); + allowedOperations.addAll(nodetypeIfaceDef.keySet()); + String ifaceType = (String) nodetypeIfaceDef.get("type"); + if (ifaceType != null) { + LinkedHashMap ifaceTypeDef = null; + if (((NodeType) typeDefinition).customDef != null) { + ifaceTypeDef = (LinkedHashMap) ((NodeType) typeDefinition).customDef.get(ifaceType); + } + if (ifaceTypeDef == null) { + ifaceTypeDef = (LinkedHashMap) EntityType.TOSCA_DEF.get(ifaceType); + } + allowedOperations.addAll(ifaceTypeDef.keySet()); + } + // maybe we should convert [] to arraylist??? + ArrayList idrw = new ArrayList<>(); + for (int i = 0; i < InterfacesDef.INTERFACE_DEF_RESERVED_WORDS.length; i++) { + idrw.add(InterfacesDef.INTERFACE_DEF_RESERVED_WORDS[i]); + } + allowedOperations.removeAll(idrw); + return allowedOperations; + } + + /** + * Get all interface details for given node template.
+ * + * @return Map that contains the list of all interfaces and their definitions. + * If none found, an empty map will be returned. + */ + public Map> getAllInterfaceDetailsForNodeType() { + Map> interfaceMap = new LinkedHashMap<>(); + + // Get custom interface details + Map customInterfacesDetails = ((NodeType) typeDefinition).getInterfaces(); + // Get native interface details from tosca definitions + Object nativeInterfaceDetails = TOSCA_DEF.get(InterfacesDef.LIFECYCLE); + Map allInterfaceDetails = new LinkedHashMap<>(); + allInterfaceDetails.putAll(customInterfacesDetails); + if (nativeInterfaceDetails != null) { + allInterfaceDetails.put(InterfacesDef.LIFECYCLE, nativeInterfaceDetails); + } + + // Process all interface details from combined collection and return an interface Map with + // interface names and their definitions + for (Map.Entry me : allInterfaceDetails.entrySet()) { + ArrayList interfaces = new ArrayList<>(); + String interfaceType = me.getKey(); + Map interfaceValue = (Map) me.getValue(); + if (interfaceValue.containsKey("type")) { + interfaceType = (String) interfaceValue.get("type"); + } + + for (Map.Entry ve : interfaceValue.entrySet()) { + // Filter type as this is a reserved key and not an operation + if (!ve.getKey().equals("type")) { + InterfacesDef iface = new InterfacesDef(typeDefinition, interfaceType, this, ve.getKey(), ve.getValue()); + interfaces.add(iface); + } + } + interfaceMap.put(interfaceType, interfaces); + } + return interfaceMap; + } + + private void _validateFields(LinkedHashMap nodetemplate) { + for (String ntname : nodetemplate.keySet()) { + boolean bFound = false; + for (int i = 0; i < SECTIONS.length; i++) { + if (ntname.equals(SECTIONS[i])) { + bFound = true; + break; + } + } + if (!bFound) { + for (int i = 0; i < SPECIAL_SECTIONS.length; i++) { + if (ntname.equals(SPECIAL_SECTIONS[i])) { + bFound = true; + break; + } + } + + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE213", String.format( + "UnknownFieldError: Node template \"%s\" has unknown field \"%s\"", name, ntname))); + } + } + } + + // getter/setter + + // multilevel nesting + public SubstitutionMappings getSubMappingToscaTemplate() { + return subMappingToscaTemplate; + } + + public void setSubMappingToscaTemplate(SubstitutionMappings sm) { + subMappingToscaTemplate = sm; + } + + public Metadata getMetaData() { + return metadata; + } + + public void setMetaData(Metadata metadata) { + this.metadata = metadata; + } + + @Override + public String toString() { + return getName(); + } + + public TopologyTemplate getOriginComponentTemplate() { + return originComponentTemplate; + } + + public void setOriginComponentTemplate(TopologyTemplate originComponentTemplate) { + this.originComponentTemplate = originComponentTemplate; + } + +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidPropertyValueError +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import TypeMismatchError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.common.exception import ValidationError +from toscaparser.dataentity import DataEntity +from toscaparser.elements.interfaces import CONFIGURE +from toscaparser.elements.interfaces import CONFIGURE_SHORTNAME +from toscaparser.elements.interfaces import INTERFACE_DEF_RESERVED_WORDS +from toscaparser.elements.interfaces import InterfacesDef +from toscaparser.elements.interfaces import LIFECYCLE +from toscaparser.elements.interfaces import LIFECYCLE_SHORTNAME +from toscaparser.elements.relationshiptype import RelationshipType +from toscaparser.entity_template import EntityTemplate +from toscaparser.relationship_template import RelationshipTemplate +from toscaparser.utils.gettextutils import _ + +log = logging.getLogger('tosca') + + +class NodeTemplate(EntityTemplate): + '''Node template from a Tosca profile.''' + def __init__(self, name, node_templates, custom_def=None, + available_rel_tpls=None, available_rel_types=None): + super(NodeTemplate, self).__init__(name, node_templates[name], + 'node_type', + custom_def) + self.templates = node_templates + self._validate_fields(node_templates[name]) + self.custom_def = custom_def + self.related = {} + self.relationship_tpl = [] + self.available_rel_tpls = available_rel_tpls + self.available_rel_types = available_rel_types + self._relationships = {} + self.sub_mapping_tosca_template = None + + @property + def relationships(self): + if not self._relationships: + requires = self.requirements + if requires and isinstance(requires, list): + for r in requires: + for r1, value in r.items(): + explicit = self._get_explicit_relationship(r, value) + if explicit: + for key, value in explicit.items(): + self._relationships[key] = value + return self._relationships + + def _get_explicit_relationship(self, req, value): + """Handle explicit relationship + + For example, + - req: + node: DBMS + relationship: tosca.relationships.HostedOn + """ + explicit_relation = {} + node = value.get('node') if isinstance(value, dict) else value + + if node: + # TO-DO(spzala) implement look up once Glance meta data is available + # to find a matching TOSCA node using the TOSCA types + msg = _('Lookup by TOSCA types is not supported. ' + 'Requirement for "%s" can not be full-filled.') % self.name + if (node in list(self.type_definition.TOSCA_DEF.keys()) + or node in self.custom_def): + ValidationIssueCollector.appendException(NotImplementedError(msg)) + return + + if node not in self.templates: + ValidationIssueCollector.appendException( + KeyError(_('Node template "%s" was not found.') % node)) + return + + related_tpl = NodeTemplate(node, self.templates, self.custom_def) + relationship = value.get('relationship') \ + if isinstance(value, dict) else None + # check if it's type has relationship defined + if not relationship: + parent_reqs = self.type_definition.get_all_requirements() + if parent_reqs is None: + ValidationIssueCollector.appendException( + ValidationError(message='parent_req is ' + + str(parent_reqs))) + else: + for key in req.keys(): + for req_dict in parent_reqs: + if key in req_dict.keys(): + relationship = (req_dict.get(key). + get('relationship')) + break + if relationship: + found_relationship_tpl = False + # apply available relationship templates if found + if self.available_rel_tpls: + for tpl in self.available_rel_tpls: + if tpl.name == relationship: + rtype = RelationshipType(tpl.type, None, + self.custom_def) + explicit_relation[rtype] = related_tpl + tpl.target = related_tpl + tpl.source = self + self.relationship_tpl.append(tpl) + found_relationship_tpl = True + # create relationship template object. + rel_prfx = self.type_definition.RELATIONSHIP_PREFIX + if not found_relationship_tpl: + if isinstance(relationship, dict): + relationship = relationship.get('type') + if relationship: + if self.available_rel_types and \ + relationship in self.available_rel_types.keys(): + pass + elif not relationship.startswith(rel_prfx): + relationship = rel_prfx + relationship + else: + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what=_('"relationship" used in template ' + '"%s"') % related_tpl.name, + required=self.TYPE)) + for rtype in self.type_definition.relationship.keys(): + if rtype.type == relationship: + explicit_relation[rtype] = related_tpl + related_tpl._add_relationship_template(req, + rtype.type, + self) + elif self.available_rel_types: + if relationship in self.available_rel_types.keys(): + rel_type_def = self.available_rel_types.\ + get(relationship) + if 'derived_from' in rel_type_def: + super_type = \ + rel_type_def.get('derived_from') + if not super_type.startswith(rel_prfx): + super_type = rel_prfx + super_type + if rtype.type == super_type: + explicit_relation[rtype] = related_tpl + related_tpl.\ + _add_relationship_template( + req, rtype.type, self) + return explicit_relation + + def _add_relationship_template(self, requirement, rtype, source): + req = requirement.copy() + req['type'] = rtype + tpl = RelationshipTemplate(req, rtype, self.custom_def, self, source) + self.relationship_tpl.append(tpl) + + def get_relationship_template(self): + return self.relationship_tpl + + def _add_next(self, nodetpl, relationship): + self.related[nodetpl] = relationship + + @property + def related_nodes(self): + if not self.related: + for relation, node in self.type_definition.relationship.items(): + for tpl in self.templates: + if tpl == node.type: + self.related[NodeTemplate(tpl)] = relation + return self.related.keys() + + def validate(self, tosca_tpl=None): + self._validate_capabilities() + self._validate_requirements() + self._validate_properties(self.entity_tpl, self.type_definition) + self._validate_interfaces() + for prop in self.get_properties_objects(): + prop.validate() + + def _validate_requirements(self): + type_requires = self.type_definition.get_all_requirements() + allowed_reqs = ["template"] + if type_requires: + for treq in type_requires: + for key, value in treq.items(): + allowed_reqs.append(key) + if isinstance(value, dict): + for key in value: + allowed_reqs.append(key) + + requires = self.type_definition.get_value(self.REQUIREMENTS, + self.entity_tpl) + if requires: + if not isinstance(requires, list): + ValidationIssueCollector.appendException( + TypeMismatchError( + what='"requirements" of template "%s"' % self.name, + type='list')) + else: + for req in requires: + for r1, value in req.items(): + if isinstance(value, dict): + self._validate_requirements_keys(value) + self._validate_requirements_properties(value) + allowed_reqs.append(r1) + self._common_validate_field(req, allowed_reqs, + 'requirements') + + def _validate_requirements_properties(self, requirements): + # TO-DO(anyone): Only occurrences property of the requirements is + # validated here. Validation of other requirement properties are being + # validated in different files. Better to keep all the requirements + # properties validation here. + for key, value in requirements.items(): + if key == 'occurrences': + self._validate_occurrences(value) + break + + def _validate_occurrences(self, occurrences): + DataEntity.validate_datatype('list', occurrences) + for value in occurrences: + DataEntity.validate_datatype('integer', value) + if len(occurrences) != 2 or not (0 <= occurrences[0] <= occurrences[1]) \ + or occurrences[1] == 0: + ValidationIssueCollector.appendException( + InvalidPropertyValueError(what=(occurrences))) + + def _validate_requirements_keys(self, requirement): + for key in requirement.keys(): + if key not in self.REQUIREMENTS_SECTION: + ValidationIssueCollector.appendException( + UnknownFieldError( + what='"requirements" of template "%s"' % self.name, + field=key)) + + def _validate_interfaces(self): + ifaces = self.type_definition.get_value(self.INTERFACES, + self.entity_tpl) + if ifaces: + for name, value in ifaces.items(): + if name in (LIFECYCLE, LIFECYCLE_SHORTNAME): + self._common_validate_field( + value, InterfacesDef. + interfaces_node_lifecycle_operations, + 'interfaces') + elif name in (CONFIGURE, CONFIGURE_SHORTNAME): + self._common_validate_field( + value, InterfacesDef. + interfaces_relationship_configure_operations, + 'interfaces') + elif name in self.type_definition.interfaces.keys(): + self._common_validate_field( + value, + self._collect_custom_iface_operations(name), + 'interfaces') + else: + ValidationIssueCollector.appendException( + UnknownFieldError( + what='"interfaces" of template "%s"' % + self.name, field=name)) + + def _collect_custom_iface_operations(self, name): + allowed_operations = [] + nodetype_iface_def = self.type_definition.interfaces[name] + allowed_operations.extend(nodetype_iface_def.keys()) + if 'type' in nodetype_iface_def: + iface_type = nodetype_iface_def['type'] + if iface_type in self.type_definition.custom_def: + iface_type_def = self.type_definition.custom_def[iface_type] + else: + iface_type_def = self.type_definition.TOSCA_DEF[iface_type] + allowed_operations.extend(iface_type_def.keys()) + allowed_operations = [op for op in allowed_operations if + op not in INTERFACE_DEF_RESERVED_WORDS] + return allowed_operations + + def _validate_fields(self, nodetemplate): + for name in nodetemplate.keys(): + if name not in self.SECTIONS and name not in self.SPECIAL_SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Node template "%s"' % self.name, + field=name))*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Policy.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Policy.java new file mode 100644 index 0000000..ca8ac55 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Policy.java @@ -0,0 +1,232 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.onap.sdc.toscaparser.api.elements.Metadata; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.ValidateUtils; + +public class Policy extends EntityTemplate { + + + static final String TYPE = "type"; + static final String METADATA = "metadata"; + static final String DESCRIPTION = "description"; + static final String PROPERTIES = "properties"; + static final String TARGETS = "targets"; + private static final String TRIGGERS = "triggers"; + private static final String SECTIONS[] = { + TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS}; + + Metadata metaDataObject; + LinkedHashMap metaData = null; + ArrayList targetsList; // *** a list of NodeTemplate OR a list of Group *** + String targetsType; + ArrayList triggers; + LinkedHashMap properties; + + public Policy(String _name, + LinkedHashMap _policy, + ArrayList targetObjects, + String _targetsType, + LinkedHashMap _customDef) { + this(_name, _policy, targetObjects, _targetsType, _customDef, null); + } + + public Policy(String _name, + LinkedHashMap _policy, +// ArrayList targetObjects, + ArrayList targetObjects, + String _targetsType, + LinkedHashMap _customDef, NodeTemplate parentNodeTemplate) { + super(_name, _policy, "policy_type", _customDef, parentNodeTemplate); + + if (_policy.get(METADATA) != null) { + metaData = (LinkedHashMap) _policy.get(METADATA); + ValidateUtils.validateMap(metaData); + metaDataObject = new Metadata(metaData); + } + + targetsList = targetObjects; + targetsType = _targetsType; + triggers = _triggers((LinkedHashMap) _policy.get(TRIGGERS)); + properties = null; + if (_policy.get("properties") != null) { + properties = (LinkedHashMap) _policy.get("properties"); + } + _validateKeys(); + } + + public ArrayList getTargets() { + return (ArrayList) entityTpl.get("targets"); + } + + public ArrayList getDescription() { + return (ArrayList) entityTpl.get("description"); + } + + public ArrayList getmetadata() { + return (ArrayList) entityTpl.get("metadata"); + } + + public String getTargetsType() { + return targetsType; + } + + public Metadata getMetaDataObj() { + return metaDataObject; + } + + public LinkedHashMap getMetaData() { + return metaData; + } + + // public ArrayList getTargetsList() { + public ArrayList getTargetsList() { + return targetsList; + } + + // entityTemplate already has a different getProperties... + // this is to access the local properties variable + public LinkedHashMap getPolicyProperties() { + return properties; + } + + private ArrayList _triggers(LinkedHashMap triggers) { + ArrayList triggerObjs = new ArrayList<>(); + if (triggers != null) { + for (Map.Entry me : triggers.entrySet()) { + String tname = me.getKey(); + LinkedHashMap ttriggerTpl = + (LinkedHashMap) me.getValue(); + Triggers triggersObj = new Triggers(tname, ttriggerTpl); + triggerObjs.add(triggersObj); + } + } + return triggerObjs; + } + + private void _validateKeys() { + for (String key : entityTpl.keySet()) { + boolean bFound = false; + for (int i = 0; i < SECTIONS.length; i++) { + if (key.equals(SECTIONS[i])) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE219", String.format( + "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"", + name, key))); + } + } + } + + @Override + public String toString() { + return "Policy{" + + "metaData=" + metaData + + ", targetsList=" + targetsList + + ", targetsType='" + targetsType + '\'' + + ", triggers=" + triggers + + ", properties=" + properties + + '}'; + } + + public int compareTo(Policy other) { + if (this.equals(other)) + return 0; + return this.getName().compareTo(other.getName()) == 0 ? this.getType().compareTo(other.getType()) : this.getName().compareTo(other.getName()); + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import UnknownFieldError +from toscaparser.entity_template import EntityTemplate +from toscaparser.triggers import Triggers +from toscaparser.utils import validateutils + + +SECTIONS = (TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS) = \ + ('type', 'metadata', 'description', + 'properties', 'targets', 'triggers') + +log = logging.getLogger('tosca') + + +class Policy(EntityTemplate): + '''Policies defined in Topology template.''' + def __init__(self, name, policy, targets, targets_type, custom_def=None): + super(Policy, self).__init__(name, + policy, + 'policy_type', + custom_def) + self.meta_data = None + if self.METADATA in policy: + self.meta_data = policy.get(self.METADATA) + validateutils.validate_map(self.meta_data) + self.targets_list = targets + self.targets_type = targets_type + self.triggers = self._triggers(policy.get(TRIGGERS)) + self._validate_keys() + + @property + def targets(self): + return self.entity_tpl.get('targets') + + @property + def description(self): + return self.entity_tpl.get('description') + + @property + def metadata(self): + return self.entity_tpl.get('metadata') + + def get_targets_type(self): + return self.targets_type + + def get_targets_list(self): + return self.targets_list + + def _triggers(self, triggers): + triggerObjs = [] + if triggers: + for name, trigger_tpl in triggers.items(): + triggersObj = Triggers(name, trigger_tpl) + triggerObjs.append(triggersObj) + return triggerObjs + + def _validate_keys(self): + for key in self.entity_tpl.keys(): + if key not in SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Policy "%s"' % self.name, + field=key)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Property.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Property.java new file mode 100644 index 0000000..e20bd2f --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Property.java @@ -0,0 +1,401 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import com.google.common.collect.Lists; +import org.onap.sdc.toscaparser.api.elements.constraints.Constraint; +import org.onap.sdc.toscaparser.api.elements.constraints.Schema; +import org.onap.sdc.toscaparser.api.functions.Function; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +public class Property { + // TOSCA built-in Property type + private static final Logger LOGGER = LoggerFactory.getLogger(Property.class.getName()); + + private static final String TYPE = "type"; + private static final String REQUIRED = "required"; + private static final String DESCRIPTION = "description"; + private static final String DEFAULT = "default"; + private static final String CONSTRAINTS = "constraints"; + private static String entrySchema = "entry_schema"; + private static String dataType = "datatypes"; + + private static final String[] PROPERTY_KEYS = { + TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS}; + + private static final String ENTRYTYPE = "type"; + private static final String ENTRYPROPERTIES = "properties"; + private static final String PATH_DELIMITER = "#"; + private static final String[] ENTRY_SCHEMA_KEYS = { + ENTRYTYPE, ENTRYPROPERTIES}; + + private String name; + private Object value; + private Schema schema; + private LinkedHashMap customDef; + + public Property(Map.Entry propertyEntry) { + name = propertyEntry.getKey(); + value = propertyEntry.getValue(); + } + + public Property(String propname, + Object propvalue, + LinkedHashMap propschemaDict, + LinkedHashMap propcustomDef) { + + name = propname; + value = propvalue; + customDef = propcustomDef; + schema = new Schema(propname, propschemaDict); + } + + public String getType() { + return schema.getType(); + } + + public boolean isRequired() { + return schema.isRequired(); + } + + public String getDescription() { + return schema.getDescription(); + } + + public Object getDefault() { + return schema.getDefault(); + } + + public ArrayList getConstraints() { + return schema.getConstraints(); + } + + public LinkedHashMap getEntrySchema() { + return schema.getEntrySchema(); + } + + + public String getName() { + return name; + } + + public Object getValue() { + return value; + } + + // setter + public Object setValue(Object vob) { + value = vob; + return value; + } + + public void validate() { + // Validate if not a reference property + if (!Function.isFunction(value)) { + if (getType().equals(Schema.STRING)) { + value = value.toString(); + } + value = DataEntity.validateDatatype(getType(), value, + getEntrySchema(), + customDef, + name); + validateConstraints(); + } + } + + private void validateConstraints() { + if (getConstraints() != null) { + for (Constraint constraint : getConstraints()) { + constraint.validate(value); + } + } + } + + @Override + public String toString() { + return "Property{" + + "name='" + name + '\'' + + ", value=" + value + + ", schema=" + schema + + ", customDef=" + customDef + + '}'; + } + + /** + * Retrieves property value as list of strings if
+ * - the value is simple
+ * - the value is list of simple values
+ * - the provided path refers to a simple property inside a data type
+ * + * @param propertyPath valid name of property for search.
+ * If a name refers to a simple field inside a datatype, the property name should be defined with # delimiter.
+ * @return List of property values. If not found, empty list will be returned.
+ * If property value is a list either of simple fields or of simple fields inside a datatype, all values from the list should be returned + */ + public List getLeafPropertyValue(String propertyPath) { + List propertyValueList = Collections.emptyList(); + + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("getLeafPropertyValue=> A new request: propertyPath: {}, value: {}", propertyPath, getValue()); + } + if (propertyPath == null || getValue() == null + //if entry_schema disappears, it is datatype, + // otherwise it is map of simple types - should be ignored + || isValueMapOfSimpleTypes()) { + LOGGER.error("It is a wrong request - ignoring! propertyPath: {}, value: {}", propertyPath, getValue()); + return propertyValueList; + } + String[] path = propertyPath.split(PATH_DELIMITER); + + if (Schema.isRequestedTypeSimple(getPropertyTypeByPath(path))) { + //the internal property type in the path is either simple or list of simple types + if (isValueInsideDataType()) { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("The requested is an internal simple property inside of a data type"); + } + //requested value is an internal simple property inside of a data type + propertyValueList = getSimplePropertyValueForComplexType(path); + } else { + if (LOGGER.isDebugEnabled()) { + LOGGER.debug("The requested property has simple type or list of simple types"); + } + //the requested property is simple type or list of simple types + propertyValueList = getSimplePropertyValueForSimpleType(); + } + } + return propertyValueList; + } + + private boolean isValueMapOfSimpleTypes() { + if (getValue() instanceof Map && getEntrySchema() != null) { + LOGGER.warn("This property value is a map of simple types"); + return true; + } + return false; + } + + private boolean isValueInsideDataType() { + //value is either a list of values for data type + //or data type + return (Schema.LIST.equals(getType()) && isDataTypeInEntrySchema()) + || (getEntrySchema() == null && getType().contains(dataType)); + } + + private Object getSimpleValueFromComplexObject(Object current, String[] path) { + if (current == null) { + return null; + } + int index = 0; + + if (path.length > index) { + for (int i = index; i < path.length; i++) { + if (current instanceof Map) { + current = ((Map) current).get(path[i]); + } else if (current instanceof List) { + current = ((List) current).get(0); + i--; + } else { + return null; + } + } + } + if (current != null) { + return current; + } + return null; + } + + private List getSimplePropertyValueForSimpleType() { + if (getValue() instanceof List || getValue() instanceof Map) { + return getSimplePropertyValueForComplexType(null); + } + return Lists.newArrayList(String.valueOf(value)); + } + + private List getSimplePropertyValueForComplexType(String[] path) { + if (getValue() instanceof List) { + return ((List) getValue()).stream() + .map(v -> { + if (path != null) { + return getSimpleValueFromComplexObject(v, path); + } else { + return v; + } + }) + //it might be null when get_input can't be resolved + // e.g.: + // - get_input has two parameters: 1. list and 2. index in this list + //and list has no value + // - neither value no default is defined for get_input + .filter(Objects::nonNull) + .map(String::valueOf) + .collect(Collectors.toList()); + } + //it is data type + List valueList = Lists.newArrayList(); + String valueString = String.valueOf(getSimpleValueFromComplexObject(getValue(), path)); + if (Objects.nonNull(valueString)) { + valueList.add(valueString); + } + return valueList; + } + + private String getPropertyTypeByPath(String[] path) { + String propertyType = calculatePropertyType(); + + if (path.length > 0 && !path[0].isEmpty()) { + return getInternalPropertyType(propertyType, path, 0); + } + return propertyType; + } + + private String calculatePropertyType() { + String propertyType = getType(); + if (Schema.LIST.equals(propertyType)) { + //if it is list, return entry schema type + return (String) getEntrySchema().get(ENTRYTYPE); + } + return propertyType; + } + + private String calculatePropertyType(LinkedHashMap property) { + String type = (String) property.get(TYPE); + if (Schema.LIST.equals(type)) { + //it might be a data type + return getEntrySchemaType(property); + } + return type; + } + + private String getInternalPropertyType(String dataTypeName, String[] path, int index) { + if (path.length > index) { + LinkedHashMap complexProperty = (LinkedHashMap) customDef.get(dataTypeName); + if (complexProperty != null) { + LinkedHashMap dataTypeProperties = (LinkedHashMap) complexProperty.get(ENTRYPROPERTIES); + return getPropertyTypeFromCustomDefDeeply(path, index, dataTypeProperties); + } + } + //stop searching - seems as wrong flow: the path is finished but the value is not found yet + return null; + } + + private String getEntrySchemaType(LinkedHashMap property) { + LinkedHashMap entrySchema = (LinkedHashMap) property.get(Property.entrySchema); + if (entrySchema != null) { + return (String) entrySchema.get(TYPE); + } + return null; + } + + private String getPropertyTypeFromCustomDefDeeply(String[] path, int index, LinkedHashMap properties) { + if (properties != null) { + LinkedHashMap foundProperty = (LinkedHashMap) (properties).get(path[index]); + if (foundProperty != null) { + String propertyType = calculatePropertyType(foundProperty); + if (propertyType == null || index == path.length - 1) { + return propertyType; + } + return getInternalPropertyType(propertyType, path, index + 1); + } + } + return null; + } + + private boolean isDataTypeInEntrySchema() { + String entrySchemaType = (String) getEntrySchema().get(ENTRYTYPE); + return entrySchemaType != null && entrySchemaType.contains(dataType); + } + + +} + +/*python + +class Property(object): + '''TOSCA built-in Property type.''' + + PROPERTY_KEYS = ( + TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS + ) = ( + 'type', 'required', 'description', 'default', 'constraints' + ) + + ENTRY_SCHEMA_KEYS = ( + ENTRYTYPE, ENTRYPROPERTIES + ) = ( + 'type', 'properties' + ) + + def __init__(self, property_name, value, schema_dict, custom_def=None): + self.name = property_name + self.value = value + self.custom_def = custom_def + self.schema = Schema(property_name, schema_dict) + + @property + def type(self): + return self.schema.type + + @property + def required(self): + return self.schema.required + + @property + def description(self): + return self.schema.description + + @property + def default(self): + return self.schema.default + + @property + def constraints(self): + return self.schema.constraints + + @property + def entry_schema(self): + return self.schema.entry_schema + + def validate(self): + '''Validate if not a reference property.''' + if not is_function(self.value): + if self.type == Schema.STRING: + self.value = str(self.value) + self.value = DataEntity.validate_datatype(self.type, self.value, + self.entry_schema, + self.custom_def, + self.name) + self._validate_constraints() + + def _validate_constraints(self): + if self.constraints: + for constraint in self.constraints: + constraint.validate(self.value) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java new file mode 100644 index 0000000..d1a1383 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java @@ -0,0 +1,227 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.onap.sdc.toscaparser.api.elements.PropertyDef; +import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; +import org.onap.sdc.toscaparser.api.elements.EntityType; + +public class RelationshipTemplate extends EntityTemplate { + + private static final String DERIVED_FROM = "derived_from"; + private static final String PROPERTIES = "properties"; + private static final String REQUIREMENTS = "requirements"; + private static final String INTERFACES = "interfaces"; + private static final String CAPABILITIES = "capabilities"; + private static final String TYPE = "type"; + @SuppressWarnings("unused") + private static final String SECTIONS[] = { + DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES, CAPABILITIES, TYPE}; + + private String name; + private NodeTemplate target; + private NodeTemplate source; + private ArrayList _properties; + + public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, + String rtname, + LinkedHashMap rtcustomDef, + NodeTemplate rttarget, + NodeTemplate rtsource) { + this(rtrelationshipTemplate, rtname, rtcustomDef, rttarget, rtsource, null); + } + + public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, + String rtname, + LinkedHashMap rtcustomDef, + NodeTemplate rttarget, + NodeTemplate rtsource, NodeTemplate parentNodeTemplate) { + super(rtname, rtrelationshipTemplate, "relationship_type", rtcustomDef, parentNodeTemplate); + + name = rtname; + target = rttarget; + source = rtsource; + _properties = null; + } + + public ArrayList getPropertiesObjects() { + // Return properties objects for this template + if (_properties == null) { + _properties = _createRelationshipProperties(); + } + return _properties; + } + + @SuppressWarnings({"unchecked", "unused"}) + public ArrayList _createRelationshipProperties() { + ArrayList props = new ArrayList(); + LinkedHashMap properties = new LinkedHashMap(); + LinkedHashMap relationship = (LinkedHashMap) entityTpl.get("relationship"); + + if (relationship == null) { + for (Object val : entityTpl.values()) { + if (val instanceof LinkedHashMap) { + relationship = (LinkedHashMap) ((LinkedHashMap) val).get("relationship"); + break; + } + } + } + + if (relationship != null) { + properties = (LinkedHashMap) ((EntityType) typeDefinition).getValue(PROPERTIES, relationship, false); + } + if (properties == null) { + properties = new LinkedHashMap(); + } + if (properties == null) { + properties = (LinkedHashMap) entityTpl.get(PROPERTIES); + } + if (properties == null) { + properties = new LinkedHashMap(); + } + + if (properties != null) { + for (Map.Entry me : properties.entrySet()) { + String pname = me.getKey(); + Object pvalue = me.getValue(); + LinkedHashMap propsDef = ((StatefulEntityType) typeDefinition).getPropertiesDef(); + if (propsDef != null && propsDef.get(pname) != null) { + if (properties.get(pname) != null) { + pvalue = properties.get(name); + } + PropertyDef pd = (PropertyDef) propsDef.get(pname); + Property prop = new Property(pname, pvalue, pd.getSchema(), customDef); + props.add(prop); + } + } + } + ArrayList pds = ((StatefulEntityType) typeDefinition).getPropertiesDefObjects(); + for (PropertyDef p : pds) { + if (p.getDefault() != null && properties.get(p.getName()) == null) { + Property prop = new Property(p.getName(), (LinkedHashMap) p.getDefault(), p.getSchema(), customDef); + props.add(prop); + } + } + return props; + } + + public void validate() { + _validateProperties(entityTpl, (StatefulEntityType) typeDefinition); + } + + // getters/setters + public NodeTemplate getTarget() { + return target; + } + + public NodeTemplate getSource() { + return source; + } + + public void setSource(NodeTemplate nt) { + source = nt; + } + + public void setTarget(NodeTemplate nt) { + target = nt; + } + + @Override + public String toString() { + return "RelationshipTemplate{" + + "name='" + name + '\'' + + ", target=" + target.getName() + + ", source=" + source.getName() + + ", _properties=" + _properties + + '}'; + } + +} + +/*python + +from toscaparser.entity_template import EntityTemplate +from toscaparser.properties import Property + +SECTIONS = (DERIVED_FROM, PROPERTIES, REQUIREMENTS, + INTERFACES, CAPABILITIES, TYPE) = \ + ('derived_from', 'properties', 'requirements', 'interfaces', + 'capabilities', 'type') + +log = logging.getLogger('tosca') + + +class RelationshipTemplate(EntityTemplate): + '''Relationship template.''' + def __init__(self, relationship_template, name, custom_def=None, + target=None, source=None): + super(RelationshipTemplate, self).__init__(name, + relationship_template, + 'relationship_type', + custom_def) + self.name = name.lower() + self.target = target + self.source = source + + def get_properties_objects(self): + '''Return properties objects for this template.''' + if self._properties is None: + self._properties = self._create_relationship_properties() + return self._properties + + def _create_relationship_properties(self): + props = [] + properties = {} + relationship = self.entity_tpl.get('relationship') + + if not relationship: + for value in self.entity_tpl.values(): + if isinstance(value, dict): + relationship = value.get('relationship') + break + + if relationship: + properties = self.type_definition.get_value(self.PROPERTIES, + relationship) or {} + if not properties: + properties = self.entity_tpl.get(self.PROPERTIES) or {} + + if properties: + for name, value in properties.items(): + props_def = self.type_definition.get_properties_def() + if props_def and name in props_def: + if name in properties.keys(): + value = properties.get(name) + prop = Property(name, value, + props_def[name].schema, self.custom_def) + props.append(prop) + for p in self.type_definition.get_properties_def_objects(): + if p.default is not None and p.name not in properties.keys(): + prop = Property(p.name, p.default, p.schema, self.custom_def) + props.append(prop) + return props + + def validate(self): + self._validate_properties(self.entity_tpl, self.type_definition)*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Repository.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Repository.java new file mode 100644 index 0000000..ee5e5bc --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Repository.java @@ -0,0 +1,137 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.UrlUtils; + +import java.util.LinkedHashMap; + +public class Repository { + + private static final String DESCRIPTION = "description"; + private static final String URL = "url"; + private static final String CREDENTIAL = "credential"; + private static final String SECTIONS[] = {DESCRIPTION, URL, CREDENTIAL}; + + private String name; + private Object reposit; + private String url; + + @SuppressWarnings("unchecked") + public Repository(String repName, Object repValue) { + name = repName; + reposit = repValue; + if (reposit instanceof LinkedHashMap) { + url = (String) ((LinkedHashMap) reposit).get("url"); + if (url == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE229", String.format( + "MissingRequiredFieldError: Repository \"%s\" is missing required field \"url\"", + name))); + } + } + loadAndValidate(name, reposit); + } + + @SuppressWarnings("unchecked") + private void loadAndValidate(String val, Object repositDef) { + String keyname = val; + if (repositDef instanceof LinkedHashMap) { + for (String key : ((LinkedHashMap) reposit).keySet()) { + boolean bFound = false; + for (String sect : SECTIONS) { + if (key.equals(sect)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE230", String.format( + "UnknownFieldError: repositories \"%s\" contains unknown field \"%s\"", + keyname, key))); + } + } + + String repositUrl = (String) ((LinkedHashMap) repositDef).get("url"); + if (repositUrl != null) { + boolean urlVal = UrlUtils.validateUrl(repositUrl); + if (!urlVal) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE231", String.format( + "URLException: repsositories \"%s\" Invalid Url", keyname))); + } + } + } + } + + @Override + public String toString() { + return "Repository{" + + "name='" + name + '\'' + + ", reposit=" + reposit + + ", url='" + url + '\'' + + '}'; + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.common.exception import URLException +from toscaparser.utils.gettextutils import _ +import org.openecomp.sdc.toscaparser.api.utils.urlutils + +SECTIONS = (DESCRIPTION, URL, CREDENTIAL) = \ + ('description', 'url', 'credential') + + +class Repository(object): + def __init__(self, repositories, values): + self.name = repositories + self.reposit = values + if isinstance(self.reposit, dict): + if 'url' not in self.reposit.keys(): + ValidationIssueCollector.appendException( + MissingRequiredFieldError(what=_('Repository "%s"') + % self.name, required='url')) + self.url = self.reposit['url'] + self.load_and_validate(self.name, self.reposit) + + def load_and_validate(self, val, reposit_def): + self.keyname = val + if isinstance(reposit_def, dict): + for key in reposit_def.keys(): + if key not in SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what=_('repositories "%s"') + % self.keyname, field=key)) + + if URL in reposit_def.keys(): + reposit_url = reposit_def.get(URL) + url_val = toscaparser.utils.urlutils.UrlUtils.\ + validate_url(reposit_url) + if url_val is not True: + ValidationIssueCollector.appendException( + URLException(what=_('repsositories "%s" Invalid Url') + % self.keyname)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java new file mode 100644 index 0000000..227b2a9 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java @@ -0,0 +1,111 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + + +public class RequirementAssignment { + + private String name; + private String nodeName; + private String capabilityName; + private Object relationship; + + public RequirementAssignment(String reqName, String nodeName) { + this.name = reqName; + this.nodeName = nodeName; + } + + public RequirementAssignment(String reqName, String nodeName, String capabilityName) { + this.name = reqName; + this.nodeName = nodeName; + this.capabilityName = capabilityName; + } + + public RequirementAssignment(String reqName, String nodeName, String capabilityName, Object relationship) { + this.name = reqName; + this.nodeName = nodeName; + this.capabilityName = capabilityName; + this.relationship = relationship; + } + + /** + * Get the name for requirement assignment. + * + * @return the name for requirement assignment. + */ + public String getName() { + return name; + } + + /** + * Set the name for requirement + * + * @param name - the name for requirement to set + */ + public void setName(String name) { + this.name = name; + } + + /** + * Get the node name for requirement assignment. + * + * @return the node name for requirement + */ + public String getNodeTemplateName() { + return nodeName; + } + + /** + * Set the node name for requirement + * + * @param nodeName - the node name for requirement to set + */ + public void setNodeTemplateName(String nodeName) { + this.nodeName = nodeName; + } + + /** + * Get the capability name for requirement assignment. + * + * @return the capability name for requirement + */ + public String getCapabilityName() { + return capabilityName; + } + + /** + * Set the capability name for requirement assignment. + * + * @param capabilityName - the capability name for requirement to set + */ + public void setCapabilityName(String capabilityName) { + this.capabilityName = capabilityName; + } + + /** + * Get the relationship object for requirement + * + * @return the relationship object for requirement + */ + public Object getRelationship() { + return relationship; + } +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java new file mode 100644 index 0000000..2ba6230 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java @@ -0,0 +1,59 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +public class RequirementAssignments { + + private List requirementAssignmentList; + + public RequirementAssignments(List requirementAssignments) { + this.requirementAssignmentList = requirementAssignments != null ? new ArrayList<>(requirementAssignments) : new ArrayList<>(); + } + + /** + * Get all requirement assignments for Node Template.
+ * This object can be either the original one, holding all requirement assignments for this node template,or a filtered one, holding a filtered subset.
+ * + * @return list of requirement assignments for the node template.
+ * If there are no requirement assignments, empty list is returned. + */ + public List getAll() { + return new ArrayList<>(requirementAssignmentList); + } + + /** + * Filter requirement assignments by requirement name. + * + * @param reqName - The name of requirement + * @return RequirementAssignments object, containing requirement assignments of this type.
+ * If no such found, filtering will result in an empty collection. + */ + public RequirementAssignments getRequirementsByName(String reqName) { + List requirementAssignments = requirementAssignmentList.stream() + .filter(req -> req.getName().equals(reqName)).collect(Collectors.toList()); + + return new RequirementAssignments(requirementAssignments); + } +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java new file mode 100644 index 0000000..a622a9a --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java @@ -0,0 +1,539 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.NodeType; +import org.onap.sdc.toscaparser.api.elements.PropertyDef; +import org.onap.sdc.toscaparser.api.parameters.Input; +import org.onap.sdc.toscaparser.api.parameters.Output; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; + + +public class SubstitutionMappings { + // SubstitutionMappings class declaration + + // SubstitutionMappings exports the topology template as an + // implementation of a Node type. + + private static final String NODE_TYPE = "node_type"; + private static final String REQUIREMENTS = "requirements"; + private static final String CAPABILITIES = "capabilities"; + + private static final String SECTIONS[] = {NODE_TYPE, REQUIREMENTS, CAPABILITIES}; + + private static final String OPTIONAL_OUTPUTS[] = {"tosca_id", "tosca_name", "state"}; + + private LinkedHashMap subMappingDef; + private ArrayList nodetemplates; + private ArrayList inputs; + private ArrayList outputs; + private ArrayList groups; + private NodeTemplate subMappedNodeTemplate; + private LinkedHashMap customDefs; + private LinkedHashMap _capabilities; + private LinkedHashMap _requirements; + + public SubstitutionMappings(LinkedHashMap smsubMappingDef, + ArrayList smnodetemplates, + ArrayList sminputs, + ArrayList smoutputs, + ArrayList smgroups, + NodeTemplate smsubMappedNodeTemplate, + LinkedHashMap smcustomDefs) { + + subMappingDef = smsubMappingDef; + nodetemplates = smnodetemplates; + inputs = sminputs != null ? sminputs : new ArrayList(); + outputs = smoutputs != null ? smoutputs : new ArrayList(); + groups = smgroups != null ? smgroups : new ArrayList(); + subMappedNodeTemplate = smsubMappedNodeTemplate; + customDefs = smcustomDefs != null ? smcustomDefs : new LinkedHashMap(); + _validate(); + + _capabilities = null; + _requirements = null; + } + + public String getType() { + if (subMappingDef != null) { + return (String) subMappingDef.get(NODE_TYPE); + } + return null; + } + + public ArrayList getNodeTemplates() { + return nodetemplates; + } + + /* + @classmethod + def get_node_type(cls, sub_mapping_def): + if isinstance(sub_mapping_def, dict): + return sub_mapping_def.get(cls.NODE_TYPE) + */ + + public static String stGetNodeType(LinkedHashMap _subMappingDef) { + if (_subMappingDef instanceof LinkedHashMap) { + return (String) _subMappingDef.get(NODE_TYPE); + } + return null; + } + + public String getNodeType() { + return (String) subMappingDef.get(NODE_TYPE); + } + + public ArrayList getInputs() { + return inputs; + } + + public ArrayList getGroups() { + return groups; + } + + public LinkedHashMap getCapabilities() { + return (LinkedHashMap) subMappingDef.get(CAPABILITIES); + } + + public LinkedHashMap getRequirements() { + return (LinkedHashMap) subMappingDef.get(REQUIREMENTS); + } + + public NodeType getNodeDefinition() { + return new NodeType(getNodeType(), customDefs); + } + + private void _validate() { + // Basic validation + _validateKeys(); + _validateType(); + + // SubstitutionMapping class syntax validation + _validateInputs(); + _validateCapabilities(); + _validateRequirements(); + _validateOutputs(); + } + + private void _validateKeys() { + // validate the keys of substitution mappings + for (String key : subMappingDef.keySet()) { + boolean bFound = false; + for (String s : SECTIONS) { + if (s.equals(key)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE232", String.format( + "UnknownFieldError: SubstitutionMappings contain unknown field \"%s\"", + key))); + } + } + } + + private void _validateType() { + // validate the node_type of substitution mappings + String nodeType = (String) subMappingDef.get(NODE_TYPE); + if (nodeType == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE233", String.format( + "MissingRequiredFieldError: SubstitutionMappings used in topology_template is missing required field \"%s\"", + NODE_TYPE))); + } + Object nodeTypeDef = customDefs.get(nodeType); + if (nodeTypeDef == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE234", String.format( + "InvalidNodeTypeError: \"%s\" is invalid", nodeType))); + } + } + + private void _validateInputs() { + // validate the inputs of substitution mappings. + + // The inputs defined by the topology template have to match the + // properties of the node type or the substituted node. If there are + // more inputs than the substituted node has properties, default values + //must be defined for those inputs. + + HashSet allInputs = new HashSet<>(); + for (Input inp : inputs) { + allInputs.add(inp.getName()); + } + HashSet requiredProperties = new HashSet<>(); + for (PropertyDef pd : getNodeDefinition().getPropertiesDefObjects()) { + if (pd.isRequired() && pd.getDefault() == null) { + requiredProperties.add(pd.getName()); + } + } + // Must provide inputs for required properties of node type. + for (String property : requiredProperties) { + // Check property which is 'required' and has no 'default' value + if (!allInputs.contains(property)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE235", String.format( + "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", + getNodeType(), property))); + } + } + // If the optional properties of node type need to be customized by + // substituted node, it also is necessary to define inputs for them, + // otherwise they are not mandatory to be defined. + HashSet customizedParameters = new HashSet<>(); + if (subMappedNodeTemplate != null) { + customizedParameters.addAll(subMappedNodeTemplate.getProperties().keySet()); + } + HashSet allProperties = new HashSet( + getNodeDefinition().getPropertiesDef().keySet()); + HashSet diffset = customizedParameters; + diffset.removeAll(allInputs); + for (String parameter : diffset) { + if (allProperties.contains(parameter)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE236", String.format( + "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", + getNodeType(), parameter))); + } + } + // Additional inputs are not in the properties of node type must + // provide default values. Currently the scenario may not happen + // because of parameters validation in nodetemplate, here is a + // guarantee. + for (Input inp : inputs) { + diffset = allInputs; + diffset.removeAll(allProperties); + if (diffset.contains(inp.getName()) && inp.getDefault() == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE237", String.format( + "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing rquired input \"%s\"", + getNodeType(), inp.getName()))); + } + } + } + + private void _validateCapabilities() { + // validate the capabilities of substitution mappings + + // The capabilities must be in node template which be mapped. + LinkedHashMap tplsCapabilities = + (LinkedHashMap) subMappingDef.get(CAPABILITIES); + List nodeCapabilities = null; + if (subMappedNodeTemplate != null) { + nodeCapabilities = subMappedNodeTemplate.getCapabilities().getAll(); + } + if (nodeCapabilities != null) { + for (CapabilityAssignment cap : nodeCapabilities) { + if (tplsCapabilities != null && tplsCapabilities.get(cap.getName()) == null) { + ; //pass + // ValidationIssueCollector.appendException( + // UnknownFieldError(what='SubstitutionMappings', + // field=cap)) + } + } + } + } + + private void _validateRequirements() { + // validate the requirements of substitution mappings + //***************************************************** + //TO-DO - Different from Python code!! one is a bug... + //***************************************************** + // The requirements must be in node template which be mapped. + LinkedHashMap tplsRequirements = + (LinkedHashMap) subMappingDef.get(REQUIREMENTS); + List nodeRequirements = null; + if (subMappedNodeTemplate != null) { + nodeRequirements = subMappedNodeTemplate.getRequirements().getAll(); + } + if (nodeRequirements != null) { + for (RequirementAssignment ro : nodeRequirements) { + String cap = ro.getName(); + if (tplsRequirements != null && tplsRequirements.get(cap) == null) { + ; //pass + // ValidationIssueCollector.appendException( + // UnknownFieldError(what='SubstitutionMappings', + // field=cap)) + } + } + } + } + + private void _validateOutputs() { + // validate the outputs of substitution mappings. + + // The outputs defined by the topology template have to match the + // attributes of the node type or the substituted node template, + // and the observable attributes of the substituted node template + // have to be defined as attributes of the node type or outputs in + // the topology template. + + // The outputs defined by the topology template have to match the + // attributes of the node type according to the specification, but + // it's reasonable that there are more inputs than the node type + // has properties, the specification will be amended? + + for (Output output : outputs) { + Object ado = getNodeDefinition().getAttributesDef(); + if (ado != null && ((LinkedHashMap) ado).get(output.getName()) == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE238", String.format( + "UnknownOutputError: Unknown output \"%s\" in SubstitutionMappings with node_type \"%s\"", + output.getName(), getNodeType()))); + } + } + } + + @Override + public String toString() { + return "SubstitutionMappings{" + +// "subMappingDef=" + subMappingDef + +// ", nodetemplates=" + nodetemplates + +// ", inputs=" + inputs + +// ", outputs=" + outputs + +// ", groups=" + groups + + ", subMappedNodeTemplate=" + (subMappedNodeTemplate == null ? "" : subMappedNodeTemplate.getName()) + +// ", customDefs=" + customDefs + +// ", _capabilities=" + _capabilities + +// ", _requirements=" + _requirements + + '}'; + } + + @Deprecated + public String toLimitedString() { + return "SubstitutionMappings{" + + "subMappingDef=" + subMappingDef + + ", nodetemplates=" + nodetemplates + + ", inputs=" + inputs + + ", outputs=" + outputs + + ", groups=" + groups + + ", subMappedNodeTemplate=" + (subMappedNodeTemplate == null ? "" : subMappedNodeTemplate.getName()) + + ", customDefs=" + customDefs + + ", _capabilities=" + _capabilities + + ", _requirements=" + _requirements + + '}'; + } +} + + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidNodeTypeError +from toscaparser.common.exception import MissingDefaultValueError +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import MissingRequiredInputError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.common.exception import UnknownOutputError +from toscaparser.elements.nodetype import NodeType +from toscaparser.utils.gettextutils import _ + +log = logging.getLogger('tosca') + + +class SubstitutionMappings(object): + '''SubstitutionMappings class declaration + + SubstitutionMappings exports the topology template as an + implementation of a Node type. + ''' + + SECTIONS = (NODE_TYPE, REQUIREMENTS, CAPABILITIES) = \ + ('node_type', 'requirements', 'capabilities') + + OPTIONAL_OUTPUTS = ['tosca_id', 'tosca_name', 'state'] + + def __init__(self, sub_mapping_def, nodetemplates, inputs, outputs, + sub_mapped_node_template, custom_defs): + self.nodetemplates = nodetemplates + self.sub_mapping_def = sub_mapping_def + self.inputs = inputs or [] + self.outputs = outputs or [] + self.sub_mapped_node_template = sub_mapped_node_template + self.custom_defs = custom_defs or {} + self._validate() + + self._capabilities = None + self._requirements = None + + @property + def type(self): + if self.sub_mapping_def: + return self.sub_mapping_def.get(self.NODE_TYPE) + + @classmethod + def get_node_type(cls, sub_mapping_def): + if isinstance(sub_mapping_def, dict): + return sub_mapping_def.get(cls.NODE_TYPE) + + @property + def node_type(self): + return self.sub_mapping_def.get(self.NODE_TYPE) + + @property + def capabilities(self): + return self.sub_mapping_def.get(self.CAPABILITIES) + + @property + def requirements(self): + return self.sub_mapping_def.get(self.REQUIREMENTS) + + @property + def node_definition(self): + return NodeType(self.node_type, self.custom_defs) + + def _validate(self): + # Basic validation + self._validate_keys() + self._validate_type() + + # SubstitutionMapping class syntax validation + self._validate_inputs() + self._validate_capabilities() + self._validate_requirements() + self._validate_outputs() + + def _validate_keys(self): + """validate the keys of substitution mappings.""" + for key in self.sub_mapping_def.keys(): + if key not in self.SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what=_('SubstitutionMappings'), + field=key)) + + def _validate_type(self): + """validate the node_type of substitution mappings.""" + node_type = self.sub_mapping_def.get(self.NODE_TYPE) + if not node_type: + ValidationIssueCollector.appendException( + MissingRequiredFieldError( + what=_('SubstitutionMappings used in topology_template'), + required=self.NODE_TYPE)) + + node_type_def = self.custom_defs.get(node_type) + if not node_type_def: + ValidationIssueCollector.appendException( + InvalidNodeTypeError(what=node_type)) + + def _validate_inputs(self): + """validate the inputs of substitution mappings. + + The inputs defined by the topology template have to match the + properties of the node type or the substituted node. If there are + more inputs than the substituted node has properties, default values + must be defined for those inputs. + """ + + all_inputs = set([input.name for input in self.inputs]) + required_properties = set([p.name for p in + self.node_definition. + get_properties_def_objects() + if p.required and p.default is None]) + # Must provide inputs for required properties of node type. + for property in required_properties: + # Check property which is 'required' and has no 'default' value + if property not in all_inputs: + ValidationIssueCollector.appendException( + MissingRequiredInputError( + what=_('SubstitutionMappings with node_type ') + + self.node_type, + input_name=property)) + + # If the optional properties of node type need to be customized by + # substituted node, it also is necessary to define inputs for them, + # otherwise they are not mandatory to be defined. + customized_parameters = set(self.sub_mapped_node_template + .get_properties().keys() + if self.sub_mapped_node_template else []) + all_properties = set(self.node_definition.get_properties_def()) + for parameter in customized_parameters - all_inputs: + if parameter in all_properties: + ValidationIssueCollector.appendException( + MissingRequiredInputError( + what=_('SubstitutionMappings with node_type ') + + self.node_type, + input_name=parameter)) + + # Additional inputs are not in the properties of node type must + # provide default values. Currently the scenario may not happen + # because of parameters validation in nodetemplate, here is a + # guarantee. + for input in self.inputs: + if input.name in all_inputs - all_properties \ + and input.default is None: + ValidationIssueCollector.appendException( + MissingDefaultValueError( + what=_('SubstitutionMappings with node_type ') + + self.node_type, + input_name=input.name)) + + def _validate_capabilities(self): + """validate the capabilities of substitution mappings.""" + + # The capabilites must be in node template wchich be mapped. + tpls_capabilities = self.sub_mapping_def.get(self.CAPABILITIES) + node_capabiliteys = self.sub_mapped_node_template.get_capabilities() \ + if self.sub_mapped_node_template else None + for cap in node_capabiliteys.keys() if node_capabiliteys else []: + if (tpls_capabilities and + cap not in list(tpls_capabilities.keys())): + pass + # ValidationIssueCollector.appendException( + # UnknownFieldError(what='SubstitutionMappings', + # field=cap)) + + def _validate_requirements(self): + """validate the requirements of substitution mappings.""" + + # The requirements must be in node template wchich be mapped. + tpls_requirements = self.sub_mapping_def.get(self.REQUIREMENTS) + node_requirements = self.sub_mapped_node_template.requirements \ + if self.sub_mapped_node_template else None + for req in node_requirements if node_requirements else []: + if (tpls_requirements and + req not in list(tpls_requirements.keys())): + pass + # ValidationIssueCollector.appendException( + # UnknownFieldError(what='SubstitutionMappings', + # field=req)) + + def _validate_outputs(self): + """validate the outputs of substitution mappings. + + The outputs defined by the topology template have to match the + attributes of the node type or the substituted node template, + and the observable attributes of the substituted node template + have to be defined as attributes of the node type or outputs in + the topology template. + """ + + # The outputs defined by the topology template have to match the + # attributes of the node type according to the specification, but + # it's reasonable that there are more inputs than the node type + # has properties, the specification will be amended? + for output in self.outputs: + if output.name not in self.node_definition.get_attributes_def(): + ValidationIssueCollector.appendException( + UnknownOutputError( + where=_('SubstitutionMappings with node_type ') + + self.node_type, + output_name=output.name))*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java new file mode 100644 index 0000000..efc6948 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java @@ -0,0 +1,866 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.InterfacesDef; +import org.onap.sdc.toscaparser.api.elements.NodeType; +import org.onap.sdc.toscaparser.api.elements.RelationshipType; +import org.onap.sdc.toscaparser.api.functions.Function; +import org.onap.sdc.toscaparser.api.functions.GetAttribute; +import org.onap.sdc.toscaparser.api.functions.GetInput; +import org.onap.sdc.toscaparser.api.parameters.Input; +import org.onap.sdc.toscaparser.api.parameters.Output; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.Map; + +public class TopologyTemplate { + + private static final String DESCRIPTION = "description"; + private static final String INPUTS = "inputs"; + private static final String NODE_TEMPLATES = "node_templates"; + private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; + private static final String OUTPUTS = "outputs"; + private static final String GROUPS = "groups"; + private static final String SUBSTITUTION_MAPPINGS = "substitution_mappings"; + private static final String POLICIES = "policies"; + private static final String METADATA = "metadata"; + + private static String[] SECTIONS = { + DESCRIPTION, INPUTS, NODE_TEMPLATES, RELATIONSHIP_TEMPLATES, + OUTPUTS, GROUPS, SUBSTITUTION_MAPPINGS, POLICIES, METADATA + }; + + private LinkedHashMap tpl; + LinkedHashMap metaData; + private ArrayList inputs; + private ArrayList outputs; + private ArrayList relationshipTemplates; + private ArrayList nodeTemplates; + private LinkedHashMap customDefs; + private LinkedHashMap relTypes;//TYPE + private NodeTemplate subMappedNodeTemplate; + private ArrayList groups; + private ArrayList policies; + private LinkedHashMap parsedParams = null;//TYPE + private String description; + private ToscaGraph graph; + private SubstitutionMappings substitutionMappings; + private boolean resolveGetInput; + + public TopologyTemplate( + LinkedHashMap _template, + LinkedHashMap _customDefs, + LinkedHashMap _relTypes,//TYPE + LinkedHashMap _parsedParams, + NodeTemplate _subMappedNodeTemplate, + boolean _resolveGetInput) { + + tpl = _template; + if (tpl != null) { + subMappedNodeTemplate = _subMappedNodeTemplate; + metaData = _metaData(); + customDefs = _customDefs; + relTypes = _relTypes; + parsedParams = _parsedParams; + resolveGetInput = _resolveGetInput; + _validateField(); + description = _tplDescription(); + inputs = _inputs(); + relationshipTemplates = _relationshipTemplates(); + //todo: pass subMappedNodeTemplate to ET constractor + nodeTemplates = _nodeTemplates(); + outputs = _outputs(); + if (nodeTemplates != null) { + graph = new ToscaGraph(nodeTemplates); + } + groups = _groups(); + policies = _policies(); + _processIntrinsicFunctions(); + substitutionMappings = _substitutionMappings(); + } + } + + @SuppressWarnings("unchecked") + private ArrayList _inputs() { + ArrayList alInputs = new ArrayList<>(); + for (String name : _tplInputs().keySet()) { + Object attrs = _tplInputs().get(name); + Input input = new Input(name, (LinkedHashMap) attrs, customDefs); + if (parsedParams != null && parsedParams.get(name) != null) { + input.validate(parsedParams.get(name)); + } else { + Object _default = input.getDefault(); + if (_default != null) { + input.validate(_default); + } + } + if ((parsedParams != null && parsedParams.get(input.getName()) == null || parsedParams == null) + && input.isRequired() && input.getDefault() == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", + String.format("MissingRequiredFieldError: The required input \"%s\" was not provided" + , input.getName())) + ); + } + alInputs.add(input); + } + return alInputs; + + } + + private LinkedHashMap _metaData() { + if (tpl.get(METADATA) != null) { + return (LinkedHashMap) tpl.get(METADATA); + } else { + return new LinkedHashMap(); + } + + } + + private ArrayList _nodeTemplates() { + ArrayList alNodeTemplates = new ArrayList<>(); + LinkedHashMap tpls = _tplNodeTemplates(); + if (tpls != null) { + for (String name : tpls.keySet()) { + NodeTemplate tpl = new NodeTemplate(name, + tpls, + customDefs, + relationshipTemplates, + relTypes, + subMappedNodeTemplate); + if (tpl.getTypeDefinition() != null) { + boolean b = NodeType.TOSCA_DEF.get(tpl.getType()) != null; + if (b || (tpl.getCustomDef() != null && !tpl.getCustomDef().isEmpty())) { + tpl.validate(); + alNodeTemplates.add(tpl); + } + } + } + } + return alNodeTemplates; + } + + @SuppressWarnings("unchecked") + private ArrayList _relationshipTemplates() { + ArrayList alRelationshipTemplates = new ArrayList<>(); + LinkedHashMap tpls = _tplRelationshipTemplates(); + if (tpls != null) { + for (String name : tpls.keySet()) { + RelationshipTemplate tpl = new RelationshipTemplate( + (LinkedHashMap) tpls.get(name), name, customDefs, null, null, subMappedNodeTemplate); + + alRelationshipTemplates.add(tpl); + } + } + return alRelationshipTemplates; + } + + private ArrayList _outputs() { + ArrayList alOutputs = new ArrayList<>(); + for (Map.Entry me : _tplOutputs().entrySet()) { + String oname = me.getKey(); + LinkedHashMap oattrs = (LinkedHashMap) me.getValue(); + Output o = new Output(oname, oattrs); + o.validate(); + alOutputs.add(o); + } + return alOutputs; + } + + private SubstitutionMappings _substitutionMappings() { + LinkedHashMap tplSubstitutionMapping = (LinkedHashMap) _tplSubstitutionMappings(); + + //*** the commenting-out below and the weaker condition are in the Python source + // #if tpl_substitution_mapping and self.sub_mapped_node_template: + if (tplSubstitutionMapping != null && tplSubstitutionMapping.size() > 0) { + return new SubstitutionMappings(tplSubstitutionMapping, + nodeTemplates, + inputs, + outputs, + groups, + subMappedNodeTemplate, + customDefs); + } + return null; + + } + + @SuppressWarnings("unchecked") + private ArrayList _policies() { + ArrayList alPolicies = new ArrayList<>(); + for (Map.Entry me : _tplPolicies().entrySet()) { + String policyName = me.getKey(); + LinkedHashMap policyTpl = (LinkedHashMap) me.getValue(); + ArrayList targetList = (ArrayList) policyTpl.get("targets"); + ArrayList targetNodes = new ArrayList<>(); + ArrayList targetObjects = new ArrayList<>(); + ArrayList targetGroups = new ArrayList<>(); + String targetsType = "groups"; + if (targetList != null && targetList.size() >= 1) { + targetGroups = _getPolicyGroups(targetList); + if (targetGroups == null || targetGroups.isEmpty()) { + targetsType = "node_templates"; + targetNodes = _getGroupMembers(targetList); + for (NodeTemplate nt : targetNodes) { + targetObjects.add(nt); + } + } else { + for (Group gr : targetGroups) { + targetObjects.add(gr); + } + } + } + Policy policyObj = new Policy(policyName, + policyTpl, + targetObjects, + targetsType, + customDefs, + subMappedNodeTemplate); + alPolicies.add(policyObj); + } + return alPolicies; + } + + private ArrayList _groups() { + ArrayList groups = new ArrayList<>(); + ArrayList memberNodes = null; + for (Map.Entry me : _tplGroups().entrySet()) { + String groupName = me.getKey(); + LinkedHashMap groupTpl = (LinkedHashMap) me.getValue(); + ArrayList memberNames = (ArrayList) groupTpl.get("members"); + if (memberNames != null) { + DataEntity.validateDatatype("list", memberNames, null, null, null); + if (memberNames.size() < 1 || + (new HashSet(memberNames)).size() != memberNames.size()) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE005", String.format( + "InvalidGroupTargetException: Member nodes \"%s\" should be >= 1 and not repeated", + memberNames.toString()))); + } else { + memberNodes = _getGroupMembers(memberNames); + } + } + Group group = new Group(groupName, + groupTpl, + memberNodes, + customDefs, subMappedNodeTemplate); + groups.add(group); + } + return groups; + } + + private ArrayList _getGroupMembers(ArrayList memberNames) { + ArrayList memberNodes = new ArrayList<>(); + _validateGroupMembers(memberNames); + for (String member : memberNames) { + for (NodeTemplate node : nodeTemplates) { + if (member.equals(node.getName())) { + memberNodes.add(node); + } + } + } + return memberNodes; + } + + private ArrayList _getPolicyGroups(ArrayList memberNames) { + ArrayList memberGroups = new ArrayList<>(); + for (String member : memberNames) { + for (Group group : groups) { + if (member.equals(group.getName())) { + memberGroups.add(group); + } + } + } + return memberGroups; + } + + private void _validateGroupMembers(ArrayList members) { + ArrayList nodeNames = new ArrayList<>(); + for (NodeTemplate node : nodeTemplates) { + nodeNames.add(node.getName()); + } + for (String member : members) { + if (!nodeNames.contains(member)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE239", String.format( + "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"", member))); + } + } + } + + // topology template can act like node template + // it is exposed by substitution_mappings. + + public String nodetype() { + return substitutionMappings.getNodeType(); + } + + public LinkedHashMap capabilities() { + return substitutionMappings.getCapabilities(); + } + + public LinkedHashMap requirements() { + return substitutionMappings.getRequirements(); + } + + private String _tplDescription() { + return (String) tpl.get(DESCRIPTION); + //if description: + // return description.rstrip() + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplInputs() { + if (tpl.get(INPUTS) != null) { + return (LinkedHashMap) tpl.get(INPUTS); + } + return new LinkedHashMap(); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplNodeTemplates() { + return (LinkedHashMap) tpl.get(NODE_TEMPLATES); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplRelationshipTemplates() { + if (tpl.get(RELATIONSHIP_TEMPLATES) != null) { + return (LinkedHashMap) tpl.get(RELATIONSHIP_TEMPLATES); + } + return new LinkedHashMap(); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplOutputs() { + if (tpl.get(OUTPUTS) != null) { + return (LinkedHashMap) tpl.get(OUTPUTS); + } + return new LinkedHashMap(); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplSubstitutionMappings() { + if (tpl.get(SUBSTITUTION_MAPPINGS) != null) { + return (LinkedHashMap) tpl.get(SUBSTITUTION_MAPPINGS); + } + return new LinkedHashMap(); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplGroups() { + if (tpl.get(GROUPS) != null) { + return (LinkedHashMap) tpl.get(GROUPS); + } + return new LinkedHashMap(); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplPolicies() { + if (tpl.get(POLICIES) != null) { + return (LinkedHashMap) tpl.get(POLICIES); + } + return new LinkedHashMap<>(); + } + + private void _validateField() { + for (String name : tpl.keySet()) { + boolean bFound = false; + for (String section : SECTIONS) { + if (name.equals(section)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE240", String.format( + "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"", name))); + } + } + } + + @SuppressWarnings("unchecked") + private void _processIntrinsicFunctions() { + // Process intrinsic functions + + // Current implementation processes functions within node template + // properties, requirements, interfaces inputs and template outputs. + + if (nodeTemplates != null) { + for (NodeTemplate nt : nodeTemplates) { + for (Property prop : nt.getPropertiesObjects()) { + prop.setValue(Function.getFunction(this, nt, prop.getValue(), resolveGetInput)); + } + for (InterfacesDef ifd : nt.getInterfaces()) { + LinkedHashMap ifin = ifd.getInputs(); + if (ifin != null) { + for (Map.Entry me : ifin.entrySet()) { + String name = me.getKey(); + Object value = Function.getFunction(this, nt, me.getValue(), resolveGetInput); + ifd.setInput(name, value); + } + } + } + if (nt.getRequirements() != null) { + for (RequirementAssignment req : nt.getRequirements().getAll()) { + LinkedHashMap rel; + Object t = req.getRelationship(); + // it can be a string or a LHM... + if (t instanceof LinkedHashMap) { + rel = (LinkedHashMap) t; + } else { + // we set it to null to fail the next test + // and avoid the get("proprties") + rel = null; + } + + if (rel != null && rel.get("properties") != null) { + LinkedHashMap relprops = + (LinkedHashMap) rel.get("properties"); + for (String key : relprops.keySet()) { + Object value = relprops.get(key); + Object func = Function.getFunction(this, req, value, resolveGetInput); + relprops.put(key, func); + } + } + } + } + if (nt.getCapabilitiesObjects() != null) { + for (CapabilityAssignment cap : nt.getCapabilitiesObjects()) { + if (cap.getPropertiesObjects() != null) { + for (Property prop : cap.getPropertiesObjects()) { + Object propvalue = Function.getFunction(this, nt, prop.getValue(), resolveGetInput); + if (propvalue instanceof GetInput) { + propvalue = ((GetInput) propvalue).result(); + for (String p : cap.getProperties().keySet()) { + //Object v = cap.getProperties().get(p); + if (p.equals(prop.getName())) { + cap.setProperty(p, propvalue); + } + } + } + } + } + } + } + for (RelationshipType rel : nt.getRelationships().keySet()) { + NodeTemplate node = nt.getRelationships().get(rel); + ArrayList relTpls = node.getRelationshipTemplate(); + if (relTpls != null) { + for (RelationshipTemplate relTpl : relTpls) { + // TT 5 + for (InterfacesDef iface : relTpl.getInterfaces()) { + if (iface.getInputs() != null) { + for (String name : iface.getInputs().keySet()) { + Object value = iface.getInputs().get(name); + Object func = Function.getFunction( + this, + relTpl, + value, + resolveGetInput); + iface.setInput(name, func); + } + } + } + } + } + } + } + } + for (Output output : outputs) { + Object func = Function.getFunction(this, outputs, output.getValue(), resolveGetInput); + if (func instanceof GetAttribute) { + output.setAttr(Output.VALUE, func); + } + } + } + + public static String getSubMappingNodeType(LinkedHashMap topologyTpl) { + if (topologyTpl != null && topologyTpl instanceof LinkedHashMap) { + Object submapTpl = topologyTpl.get(SUBSTITUTION_MAPPINGS); + return SubstitutionMappings.stGetNodeType((LinkedHashMap) submapTpl); + } + return null; + } + + // getters + + public LinkedHashMap getTpl() { + return tpl; + } + + public LinkedHashMap getMetadata() { + return metaData; + } + + public ArrayList getInputs() { + return inputs; + } + + public ArrayList getOutputs() { + return outputs; + } + + public ArrayList getPolicies() { + return policies; + } + + public ArrayList getRelationshipTemplates() { + return relationshipTemplates; + } + + public ArrayList getNodeTemplates() { + return nodeTemplates; + } + + public ArrayList getGroups() { + return groups; + } + + public SubstitutionMappings getSubstitutionMappings() { + return substitutionMappings; + } + + public LinkedHashMap getParsedParams() { + return parsedParams; + } + + public boolean getResolveGetInput() { + return resolveGetInput; + } + + public LinkedHashMap getCustomDefs() { + return customDefs; + } +} + +/*python + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + + +import logging + +from toscaparser.common import exception +from toscaparser.dataentity import DataEntity +from toscaparser import functions +from toscaparser.groups import Group +from toscaparser.nodetemplate import NodeTemplate +from toscaparser.parameters import Input +from toscaparser.parameters import Output +from toscaparser.policy import Policy +from toscaparser.relationship_template import RelationshipTemplate +from toscaparser.substitution_mappings import SubstitutionMappings +from toscaparser.tpl_relationship_graph import ToscaGraph +from toscaparser.utils.gettextutils import _ + + +# Topology template key names +SECTIONS = (DESCRIPTION, INPUTS, NODE_TEMPLATES, + RELATIONSHIP_TEMPLATES, OUTPUTS, GROUPS, + SUBSTITUION_MAPPINGS, POLICIES) = \ + ('description', 'inputs', 'node_templates', + 'relationship_templates', 'outputs', 'groups', + 'substitution_mappings', 'policies') + +log = logging.getLogger("tosca.model") + + +class TopologyTemplate(object): + + '''Load the template data.''' + def __init__(self, template, custom_defs, + rel_types=None, parsed_params=None, + sub_mapped_node_template=None): + self.tpl = template + self.sub_mapped_node_template = sub_mapped_node_template + if self.tpl: + self.custom_defs = custom_defs + self.rel_types = rel_types + self.parsed_params = parsed_params + self._validate_field() + self.description = self._tpl_description() + self.inputs = self._inputs() + self.relationship_templates = self._relationship_templates() + self.nodetemplates = self._nodetemplates() + self.outputs = self._outputs() + if hasattr(self, 'nodetemplates'): + self.graph = ToscaGraph(self.nodetemplates) + self.groups = self._groups() + self.policies = self._policies() + self._process_intrinsic_functions() + self.substitution_mappings = self._substitution_mappings() + + def _inputs(self): + inputs = [] + for name, attrs in self._tpl_inputs().items(): + input = Input(name, attrs) + if self.parsed_params and name in self.parsed_params: + input.validate(self.parsed_params[name]) + else: + default = input.default + if default: + input.validate(default) + if (self.parsed_params and input.name not in self.parsed_params + or self.parsed_params is None) and input.required \ + and input.default is None: + log.warning(_('The required parameter %s ' + 'is not provided') % input.name) + + inputs.append(input) + return inputs + + def _nodetemplates(self): + nodetemplates = [] + tpls = self._tpl_nodetemplates() + if tpls: + for name in tpls: + tpl = NodeTemplate(name, tpls, self.custom_defs, + self.relationship_templates, + self.rel_types) + if (tpl.type_definition and + (tpl.type in tpl.type_definition.TOSCA_DEF or + (tpl.type not in tpl.type_definition.TOSCA_DEF and + bool(tpl.custom_def)))): + tpl.validate(self) + nodetemplates.append(tpl) + return nodetemplates + + def _relationship_templates(self): + rel_templates = [] + tpls = self._tpl_relationship_templates() + for name in tpls: + tpl = RelationshipTemplate(tpls[name], name, self.custom_defs) + rel_templates.append(tpl) + return rel_templates + + def _outputs(self): + outputs = [] + for name, attrs in self._tpl_outputs().items(): + output = Output(name, attrs) + output.validate() + outputs.append(output) + return outputs + + def _substitution_mappings(self): + tpl_substitution_mapping = self._tpl_substitution_mappings() + # if tpl_substitution_mapping and self.sub_mapped_node_template: + if tpl_substitution_mapping: + return SubstitutionMappings(tpl_substitution_mapping, + self.nodetemplates, + self.inputs, + self.outputs, + self.sub_mapped_node_template, + self.custom_defs) + + def _policies(self): + policies = [] + for policy in self._tpl_policies(): + for policy_name, policy_tpl in policy.items(): + target_list = policy_tpl.get('targets') + if target_list and len(target_list) >= 1: + target_objects = [] + targets_type = "groups" + target_objects = self._get_policy_groups(target_list) + if not target_objects: + targets_type = "node_templates" + target_objects = self._get_group_members(target_list) + policyObj = Policy(policy_name, policy_tpl, + target_objects, targets_type, + self.custom_defs) + policies.append(policyObj) + return policies + + def _groups(self): + groups = [] + member_nodes = None + for group_name, group_tpl in self._tpl_groups().items(): + member_names = group_tpl.get('members') + if member_names is not None: + DataEntity.validate_datatype('list', member_names) + if len(member_names) < 1 or \ + len(member_names) != len(set(member_names)): + exception.ValidationIssueCollector.appendException( + exception.InvalidGroupTargetException( + message=_('Member nodes "%s" should be >= 1 ' + 'and not repeated') % member_names)) + else: + member_nodes = self._get_group_members(member_names) + group = Group(group_name, group_tpl, + member_nodes, + self.custom_defs) + groups.append(group) + return groups + + def _get_group_members(self, member_names): + member_nodes = [] + self._validate_group_members(member_names) + for member in member_names: + for node in self.nodetemplates: + if node.name == member: + member_nodes.append(node) + return member_nodes + + def _get_policy_groups(self, member_names): + member_groups = [] + for member in member_names: + for group in self.groups: + if group.name == member: + member_groups.append(group) + return member_groups + + def _validate_group_members(self, members): + node_names = [] + for node in self.nodetemplates: + node_names.append(node.name) + for member in members: + if member not in node_names: + exception.ValidationIssueCollector.appendException( + exception.InvalidGroupTargetException( + message=_('Target member "%s" is not found in ' + 'node_templates') % member)) + + # topology template can act like node template + # it is exposed by substitution_mappings. + def nodetype(self): + return self.substitution_mappings.node_type \ + if self.substitution_mappings else None + + def capabilities(self): + return self.substitution_mappings.capabilities \ + if self.substitution_mappings else None + + def requirements(self): + return self.substitution_mappings.requirements \ + if self.substitution_mappings else None + + def _tpl_description(self): + description = self.tpl.get(DESCRIPTION) + if description: + return description.rstrip() + + def _tpl_inputs(self): + return self.tpl.get(INPUTS) or {} + + def _tpl_nodetemplates(self): + return self.tpl.get(NODE_TEMPLATES) + + def _tpl_relationship_templates(self): + return self.tpl.get(RELATIONSHIP_TEMPLATES) or {} + + def _tpl_outputs(self): + return self.tpl.get(OUTPUTS) or {} + + def _tpl_substitution_mappings(self): + return self.tpl.get(SUBSTITUION_MAPPINGS) or {} + + def _tpl_groups(self): + return self.tpl.get(GROUPS) or {} + + def _tpl_policies(self): + return self.tpl.get(POLICIES) or {} + + def _validate_field(self): + for name in self.tpl: + if name not in SECTIONS: + exception.ValidationIssueCollector.appendException( + exception.UnknownFieldError(what='Template', field=name)) + + def _process_intrinsic_functions(self): + """Process intrinsic functions + + Current implementation processes functions within node template + properties, requirements, interfaces inputs and template outputs. + """ + if hasattr(self, 'nodetemplates'): + for node_template in self.nodetemplates: + for prop in node_template.get_properties_objects(): + prop.value = functions.get_function(self, + node_template, + prop.value) + for interface in node_template.interfaces: + if interface.inputs: + for name, value in interface.inputs.items(): + interface.inputs[name] = functions.get_function( + self, + node_template, + value) + if node_template.requirements and \ + isinstance(node_template.requirements, list): + for req in node_template.requirements: + rel = req + for req_name, req_item in req.items(): + if isinstance(req_item, dict): + rel = req_item.get('relationship') + break + if rel and 'properties' in rel: + for key, value in rel['properties'].items(): + rel['properties'][key] = \ + functions.get_function(self, + req, + value) + if node_template.get_capabilities_objects(): + for cap in node_template.get_capabilities_objects(): + if cap.get_properties_objects(): + for prop in cap.get_properties_objects(): + propvalue = functions.get_function( + self, + node_template, + prop.value) + if isinstance(propvalue, functions.GetInput): + propvalue = propvalue.result() + for p, v in cap._properties.items(): + if p == prop.name: + cap._properties[p] = propvalue + for rel, node in node_template.relationships.items(): + rel_tpls = node.relationship_tpl + if rel_tpls: + for rel_tpl in rel_tpls: + for interface in rel_tpl.interfaces: + if interface.inputs: + for name, value in \ + interface.inputs.items(): + interface.inputs[name] = \ + functions.get_function(self, + rel_tpl, + value) + for output in self.outputs: + func = functions.get_function(self, self.outputs, output.value) + if isinstance(func, functions.GetAttribute): + output.attrs[output.VALUE] = func + + @classmethod + def get_sub_mapping_node_type(cls, topology_tpl): + if topology_tpl and isinstance(topology_tpl, dict): + submap_tpl = topology_tpl.get(SUBSTITUION_MAPPINGS) + return SubstitutionMappings.get_node_type(submap_tpl) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java new file mode 100644 index 0000000..1706cdc --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java @@ -0,0 +1,129 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.elements.RelationshipType; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +//import java.util.Iterator; + +public class ToscaGraph { + // Graph of Tosca Node Templates + + private ArrayList nodeTemplates; + private LinkedHashMap vertices; + + public ToscaGraph(ArrayList inodeTemplates) { + nodeTemplates = inodeTemplates; + vertices = new LinkedHashMap(); + create(); + } + + private void createVertex(NodeTemplate node) { + if (vertices.get(node.getName()) == null) { + vertices.put(node.getName(), node); + } + } + + private void createEdge(NodeTemplate node1, + NodeTemplate node2, + RelationshipType relation) { + if (vertices.get(node1.getName()) == null) { + createVertex(node1); + vertices.get(node1.name)._addNext(node2, relation); + } + } + + public NodeTemplate vertex(String name) { + if (vertices.get(name) != null) { + return vertices.get(name); + } + return null; + } + +// public Iterator getIter() { +// return vertices.values().iterator(); +// } + + private void create() { + for (NodeTemplate node : nodeTemplates) { + LinkedHashMap relation = node.getRelationships(); + if (relation != null) { + for (RelationshipType rel : relation.keySet()) { + NodeTemplate nodeTpls = relation.get(rel); + for (NodeTemplate tpl : nodeTemplates) { + if (tpl.getName().equals(nodeTpls.getName())) { + createEdge(node, tpl, rel); + } + } + } + } + createVertex(node); + } + } + + @Override + public String toString() { + return "ToscaGraph{" + + "nodeTemplates=" + nodeTemplates + + ", vertices=" + vertices + + '}'; + } +} + +/*python + +class ToscaGraph(object): + '''Graph of Tosca Node Templates.''' + def __init__(self, nodetemplates): + self.nodetemplates = nodetemplates + self.vertices = {} + self._create() + + def _create_vertex(self, node): + if node not in self.vertices: + self.vertices[node.name] = node + + def _create_edge(self, node1, node2, relationship): + if node1 not in self.vertices: + self._create_vertex(node1) + self.vertices[node1.name]._add_next(node2, + relationship) + + def vertex(self, node): + if node in self.vertices: + return self.vertices[node] + + def __iter__(self): + return iter(self.vertices.values()) + + def _create(self): + for node in self.nodetemplates: + relation = node.relationships + if relation: + for rel, nodetpls in relation.items(): + for tpl in self.nodetemplates: + if tpl.name == nodetpls.name: + self._create_edge(node, tpl, rel) + self._create_vertex(node) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java new file mode 100644 index 0000000..ddb8ddb --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java @@ -0,0 +1,1267 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (c) 2017 AT&T Intellectual Property. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * Modifications copyright (c) 2019 Fujitsu Limited. + * ================================================================================ + */ +package org.onap.sdc.toscaparser.api; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Predicate; + +import org.onap.sdc.toscaparser.api.common.JToscaException; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.common.ValidationIssueCollector; +import org.onap.sdc.toscaparser.api.elements.EntityType; +import org.onap.sdc.toscaparser.api.elements.DataType; +import org.onap.sdc.toscaparser.api.elements.Metadata; +import org.onap.sdc.toscaparser.api.extensions.ExtTools; +import org.onap.sdc.toscaparser.api.parameters.Input; +import org.onap.sdc.toscaparser.api.parameters.Output; +import org.onap.sdc.toscaparser.api.prereq.CSAR; +import org.onap.sdc.toscaparser.api.utils.JToscaErrorCodes; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.Yaml; + +public class ToscaTemplate extends Object { + + public static final int MAX_LEVELS = 20; + private static Logger log = LoggerFactory.getLogger(ToscaTemplate.class.getName()); + + // TOSCA template key names + private static final String DEFINITION_VERSION = "tosca_definitions_version"; + private static final String DEFAULT_NAMESPACE = "tosca_default_namespace"; + private static final String TEMPLATE_NAME = "template_name"; + private static final String TOPOLOGY_TEMPLATE = "topology_template"; + private static final String TEMPLATE_AUTHOR = "template_author"; + private static final String TEMPLATE_VERSION = "template_version"; + private static final String DESCRIPTION = "description"; + private static final String IMPORTS = "imports"; + private static final String DSL_DEFINITIONS = "dsl_definitions"; + private static final String NODE_TYPES = "node_types"; + private static final String RELATIONSHIP_TYPES = "relationship_types"; + private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; + private static final String CAPABILITY_TYPES = "capability_types"; + private static final String ARTIFACT_TYPES = "artifact_types"; + private static final String DATA_TYPES = "data_types"; + private static final String INTERFACE_TYPES = "interface_types"; + private static final String POLICY_TYPES = "policy_types"; + private static final String GROUP_TYPES = "group_types"; + private static final String REPOSITORIES = "repositories"; + + private static String SECTIONS[] = { + DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME, + TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION, + DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES, + RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES, + CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES, + INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES, REPOSITORIES + }; + + // Sections that are specific to individual template definitions + private static final String METADATA = "metadata"; + private static ArrayList SPECIAL_SECTIONS; + + private ExtTools exttools = new ExtTools(); + + private ArrayList VALID_TEMPLATE_VERSIONS; + private LinkedHashMap> ADDITIONAL_SECTIONS; + + private boolean isFile; + private String path; + private String inputPath; + private String rootPath; + private LinkedHashMap parsedParams; + private boolean resolveGetInput; + private LinkedHashMap tpl; + private String version; + private ArrayList imports; + private LinkedHashMap relationshipTypes; + private Metadata metaData; + private String description; + private TopologyTemplate topologyTemplate; + private ArrayList repositories; + private ArrayList inputs; + private ArrayList relationshipTemplates; + private ArrayList nodeTemplates; + private ArrayList outputs; + private ArrayList policies; + private ArrayList groups; + private ConcurrentHashMap nestedToscaTplsWithTopology; + private ArrayList nestedToscaTemplatesWithTopology; + private ToscaGraph graph; + private String csarTempDir; + private int nestingLoopCounter; + private LinkedHashMap> metaProperties; + private Set processedImports; + private LinkedHashMap customDefsFinal = new LinkedHashMap<>(); + private HashSet dataTypes; + + public ToscaTemplate(String _path, + LinkedHashMap _parsedParams, + boolean aFile, + LinkedHashMap yamlDictTpl) throws JToscaException { + init(_path, _parsedParams, aFile, yamlDictTpl, true); + } + + public ToscaTemplate(String _path, + LinkedHashMap _parsedParams, + boolean aFile, + LinkedHashMap yamlDictTpl, boolean resolveGetInput) throws JToscaException { + init(_path, _parsedParams, aFile, yamlDictTpl, resolveGetInput); + } + + @SuppressWarnings("unchecked") + private void init(String _path, + LinkedHashMap _parsedParams, + boolean aFile, + LinkedHashMap yamlDictTpl, boolean _resolveGetInput) throws JToscaException { + + ThreadLocalsHolder.setCollector(new ValidationIssueCollector()); + + VALID_TEMPLATE_VERSIONS = new ArrayList<>(); + VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_0"); + VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_1"); + VALID_TEMPLATE_VERSIONS.addAll(exttools.getVersions()); + ADDITIONAL_SECTIONS = new LinkedHashMap<>(); + SPECIAL_SECTIONS = new ArrayList<>(); + SPECIAL_SECTIONS.add(METADATA); + ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_0", SPECIAL_SECTIONS); + ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_1", SPECIAL_SECTIONS); + ADDITIONAL_SECTIONS.putAll(exttools.getSections()); + + //long startTime = System.nanoTime(); + + + isFile = aFile; + inputPath = null; + path = null; + tpl = null; + csarTempDir = null; + nestedToscaTplsWithTopology = new ConcurrentHashMap<>(); + nestedToscaTemplatesWithTopology = new ArrayList(); + resolveGetInput = _resolveGetInput; + metaProperties = new LinkedHashMap<>(); + + if (_path != null && !_path.isEmpty()) { + // save the original input path + inputPath = _path; + // get the actual path (will change with CSAR) + path = _getPath(_path); + // load the YAML template + if (path != null && !path.isEmpty()) { + try (InputStream input = new FileInputStream(new File(path));) { + //System.out.println("Loading YAML file " + path); + log.debug("ToscaTemplate Loading YAMEL file {}", path); + Yaml yaml = new Yaml(); + Object data = yaml.load(input); + this.tpl = (LinkedHashMap) data; + } catch (FileNotFoundException e) { + log.error("ToscaTemplate - Exception loading yaml: {}", e.getMessage()); + log.error("Exception", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", + "ToscaTemplate - Exception loading yaml: -> " + e.getMessage())); + return; + } catch (Exception e) { + log.error("ToscaTemplate - Error loading yaml, aborting -> ", e.getMessage()); + log.error("Exception", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", + "ToscaTemplate - Error loading yaml, aborting -> " + e.getMessage())); + return; + } + + if (yamlDictTpl != null) { + //msg = (_('Both path and yaml_dict_tpl arguments were ' + // 'provided. Using path and ignoring yaml_dict_tpl.')) + //log.info(msg) + log.debug("ToscaTemplate - Both path and yaml_dict_tpl arguments were provided. Using path and ignoring yaml_dict_tpl"); + } + } else { + // no input to process... + _abort(); + } + } else { + if (yamlDictTpl != null) { + tpl = yamlDictTpl; + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE244", + "ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse")); + log.debug("ToscaTemplate ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse"); + + } + } + + if (tpl != null) { + parsedParams = _parsedParams; + _validateField(); + this.rootPath = path; + this.processedImports = new HashSet(); + this.imports = _tplImports(); + this.version = _tplVersion(); + this.metaData = _tplMetaData(); + this.relationshipTypes = _tplRelationshipTypes(); + this.description = _tplDescription(); + this.dataTypes = getTopologyDataTypes(); + this.topologyTemplate = _topologyTemplate(); + this.repositories = _tplRepositories(); + if (topologyTemplate.getTpl() != null) { + this.inputs = _inputs(); + this.relationshipTemplates = _relationshipTemplates(); + this.nodeTemplates = _nodeTemplates(); + this.outputs = _outputs(); + this.policies = _policies(); + this.groups = _groups(); +// _handleNestedToscaTemplatesWithTopology(); + _handleNestedToscaTemplatesWithTopology(topologyTemplate); + graph = new ToscaGraph(nodeTemplates); + } + } + + if (csarTempDir != null) { + CSAR.deleteDir(new File(csarTempDir)); + csarTempDir = null; + } + + verifyTemplate(); + + } + + private void _abort() throws JToscaException { + // print out all exceptions caught + verifyTemplate(); + throw new JToscaException("jtosca aborting", JToscaErrorCodes.PATH_NOT_VALID.getValue()); + } + + private TopologyTemplate _topologyTemplate() { + return new TopologyTemplate( + _tplTopologyTemplate(), + _getAllCustomDefs(imports), + relationshipTypes, + parsedParams, + null, + resolveGetInput); + } + + private ArrayList _inputs() { + return topologyTemplate.getInputs(); + } + + private ArrayList _nodeTemplates() { + return topologyTemplate.getNodeTemplates(); + } + + private ArrayList _relationshipTemplates() { + return topologyTemplate.getRelationshipTemplates(); + } + + private ArrayList _outputs() { + return topologyTemplate.getOutputs(); + } + + private String _tplVersion() { + return (String) tpl.get(DEFINITION_VERSION); + } + + @SuppressWarnings("unchecked") + private Metadata _tplMetaData() { + Object mdo = tpl.get(METADATA); + if (mdo instanceof LinkedHashMap) { + return new Metadata((Map) mdo); + } else { + return null; + } + } + + private String _tplDescription() { + return (String) tpl.get(DESCRIPTION); + } + + @SuppressWarnings("unchecked") + private ArrayList _tplImports() { + return (ArrayList) tpl.get(IMPORTS); + } + + @SuppressWarnings("unchecked") + private ArrayList _tplRepositories() { + LinkedHashMap repositories = + (LinkedHashMap) tpl.get(REPOSITORIES); + ArrayList reposit = new ArrayList<>(); + if (repositories != null) { + for (Map.Entry me : repositories.entrySet()) { + Repository reposits = new Repository(me.getKey(), me.getValue()); + reposit.add(reposits); + } + } + return reposit; + } + + private LinkedHashMap _tplRelationshipTypes() { + return (LinkedHashMap) _getCustomTypes(RELATIONSHIP_TYPES, null); + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _tplTopologyTemplate() { + return (LinkedHashMap) tpl.get(TOPOLOGY_TEMPLATE); + } + + private ArrayList _policies() { + return topologyTemplate.getPolicies(); + } + + private ArrayList _groups() { + return topologyTemplate.getGroups(); + } + + /** + * Read datatypes field + * + * @return return list of datatypes. + */ + @SuppressWarnings("unchecked") + private HashSet getTopologyDataTypes() { + LinkedHashMap value = + (LinkedHashMap) tpl.get(DATA_TYPES); + HashSet datatypes = new HashSet<>(); + if (value != null) { + customDefsFinal.putAll(value); + for (Map.Entry me : value.entrySet()) { + DataType datatype = new DataType(me.getKey(), value); + datatypes.add(datatype); + } + } + + + return datatypes; + } + + /** + * This method is used to get consolidated custom definitions from all imports + * It is logically divided in two parts to handle imports; map and list formats. + * Before processing the imports; it sorts them to make sure the current directory imports are + * being processed first and then others. Once sorted; it processes each import one by one in + * recursive manner. + * To avoid cyclic dependency among imports; this method uses a set to keep track of all + * imports which are already processed and filters the imports which occurs more than once. + * + * @param alImports all imports which needs to be processed + * @return the linked hash map containing all import definitions + */ + + @SuppressWarnings("unchecked") + private LinkedHashMap _getAllCustomDefs(Object alImports) { + + + String types[] = { + IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, + DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES + }; + + List> imports = (List>) alImports; + if (imports != null && !imports.isEmpty()) { + if (imports.get(0) instanceof LinkedHashMap) { + imports = sortImports(imports); + + for (Map map : imports) { + List> singleImportList = new ArrayList<>(); + singleImportList.add(map); + + Map importNameDetails = getValidFileNameForImportReference(singleImportList); + singleImportList = filterImportsForRecursion(singleImportList, importNameDetails); + + if (!singleImportList.get(0).isEmpty()) { + LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(singleImportList)); + processedImports.add(importNameDetails.get("importFileName")); + + if (customDefs != null) { + customDefsFinal.putAll(customDefs); + + if (customDefs.get(IMPORTS) != null) { + resetPathForRecursiveImports(importNameDetails.get("importRelativeName")); + LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); + customDefsFinal.putAll(importDefs); + } + } + } + } + } else { + LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(imports)); + if (customDefs != null) { + customDefsFinal.putAll(customDefs); + + if (customDefs.get(IMPORTS) != null) { + LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); + customDefsFinal.putAll(importDefs); + } + } + } + } + + // As imports are not custom_types, remove from the dict + customDefsFinal.remove(IMPORTS); + + return customDefsFinal; + } + + /** + * This method is used to sort the imports in order so that same directory + * imports will be processed first + * + * @param customImports the custom imports + * @return the sorted list of imports + */ + private List> sortImports(List> customImports) { + List> finalList1 = new ArrayList<>(); + List> finalList2 = new ArrayList<>(); + Iterator> itr = customImports.iterator(); + while (itr.hasNext()) { + Map innerMap = itr.next(); + if (innerMap.toString().contains("../")) { + finalList2.add(innerMap); + itr.remove(); + } else if (innerMap.toString().contains("/")) { + finalList1.add(innerMap); + itr.remove(); + } + } + + customImports.addAll(finalList1); + customImports.addAll(finalList2); + return customImports; + } + + /** + * This method is used to reset PATH variable after processing of current import file is done + * This is required because of relative path nature of imports present in files. + * + * @param currImportRelativeName the current import relative name + */ + private void resetPathForRecursiveImports(String currImportRelativeName) { + path = getPath(path, currImportRelativeName); + } + + /** + * This is a recursive method which starts from current import and then recursively finds a + * valid path relative to current import file name. + * By doing this it handles all nested hierarchy of imports defined in CSARs + * + * @param path the path + * @param importFileName the import file name + * @return the string containing updated path value + */ + private String getPath(String path, String importFileName) { + String tempFullPath = (Paths.get(path).toAbsolutePath().getParent() + .toString() + File.separator + importFileName.replace("../", "")).replace('\\', '/'); + String tempPartialPath = (Paths.get(path).toAbsolutePath().getParent().toString()).replace('\\', '/'); + if (Files.exists(Paths.get(tempFullPath))) + return tempFullPath; + else + return getPath(tempPartialPath, importFileName); + } + + /** + * This method is used to get full path name for the file which needs to be processed. It helps + * in situation where files are present in different directory and are references as relative + * paths. + * + * @param customImports the custom imports + * @return the map containing import file full and relative paths + */ + private Map getValidFileNameForImportReference(List> customImports) { + String importFileName; + Map retMap = new HashMap<>(); + for (Map map1 : customImports) { + for (Map.Entry entry : map1.entrySet()) { + Map innerMostMap = (Map) entry.getValue(); + Iterator> it = innerMostMap.entrySet().iterator(); + while (it.hasNext()) { + Map.Entry val = it.next(); + if (val.getValue().contains("/")) { + importFileName = (Paths.get(rootPath).toAbsolutePath().getParent().toString() + File + .separator + val.getValue().replace("../", "")).replace('\\', '/'); + } else { + importFileName = (Paths.get(path).toAbsolutePath().getParent().toString() + File + .separator + val.getValue().replace("../", "")).replace('\\', '/'); + } + retMap.put("importFileName", importFileName); + retMap.put("importRelativeName", val.getValue()); + } + } + } + return retMap; + } + + /** + * This method is used to filter the imports which already gets processed in previous step. + * It handles the use case of cyclic dependency in imports which may cause Stack Overflow + * exception + * + * @param customImports the custom imports + * @param importNameDetails the import name details + * @return the list containing filtered imports + */ + private List> filterImportsForRecursion(List> + customImports, Map importNameDetails) { + for (Map map1 : customImports) { + for (Map.Entry entry : map1.entrySet()) { + Map innerMostMap = (Map) entry.getValue(); + Iterator> it = innerMostMap.entrySet().iterator(); + while (it.hasNext()) { + it.next(); + if (processedImports.contains(importNameDetails.get("importFileName"))) { + it.remove(); + } + } + } + } + + // Remove Empty elements + Iterator> itr = customImports.iterator(); + while (itr.hasNext()) { + Map innerMap = itr.next(); + Predicate predicate = p -> p.values().isEmpty(); + innerMap.values().removeIf(predicate); + } + + return customImports; + } + + @SuppressWarnings("unchecked") + private LinkedHashMap _getCustomTypes(Object typeDefinitions, ArrayList alImports) { + + // Handle custom types defined in imported template files + // This method loads the custom type definitions referenced in "imports" + // section of the TOSCA YAML template. + + LinkedHashMap customDefs = new LinkedHashMap(); + ArrayList typeDefs = new ArrayList(); + if (typeDefinitions instanceof String[]) { + for (String s : (String[]) typeDefinitions) { + typeDefs.add(s); + } + } else { + typeDefs.add((String) typeDefinitions); + } + + if (alImports == null) { + alImports = _tplImports(); + } + + if (alImports != null) { + ImportsLoader customService = new ImportsLoader(alImports, path, typeDefs, tpl); + ArrayList> nestedToscaTpls = customService.getNestedToscaTpls(); + _updateNestedToscaTplsWithTopology(nestedToscaTpls); + + customDefs = customService.getCustomDefs(); + if (customDefs == null) { + return null; + } + } + + //Handle custom types defined in current template file + for (String td : typeDefs) { + if (!td.equals(IMPORTS)) { + LinkedHashMap innerCustomTypes = (LinkedHashMap) tpl.get(td); + if (innerCustomTypes != null) { + customDefs.putAll(innerCustomTypes); + } + } + } + return customDefs; + } + + private void _updateNestedToscaTplsWithTopology(ArrayList> nestedToscaTpls) { + for (LinkedHashMap ntpl : nestedToscaTpls) { + // there is just one key:value pair in ntpl + for (Map.Entry me : ntpl.entrySet()) { + String fileName = me.getKey(); + @SuppressWarnings("unchecked") + LinkedHashMap toscaTpl = (LinkedHashMap) me.getValue(); + if (toscaTpl.get(TOPOLOGY_TEMPLATE) != null) { + if (nestedToscaTplsWithTopology.get(fileName) == null) { + nestedToscaTplsWithTopology.putAll(ntpl); + } + } + } + } + } + + // multi level nesting - RECURSIVE + @SuppressWarnings("unchecked") + private void _handleNestedToscaTemplatesWithTopology(TopologyTemplate tt) { + if (++nestingLoopCounter > MAX_LEVELS) { + log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting"); + return; + } + // Reset Processed Imports for nested templates + this.processedImports = new HashSet<>(); + for (Map.Entry me : nestedToscaTplsWithTopology.entrySet()) { + LinkedHashMap toscaTpl = + (LinkedHashMap) me.getValue(); + for (NodeTemplate nt : tt.getNodeTemplates()) { + if (_isSubMappedNode(nt, toscaTpl)) { + parsedParams = _getParamsForNestedTemplate(nt); + ArrayList alim = (ArrayList) toscaTpl.get(IMPORTS); + LinkedHashMap topologyTpl = + (LinkedHashMap) toscaTpl.get(TOPOLOGY_TEMPLATE); + TopologyTemplate topologyWithSubMapping = + new TopologyTemplate(topologyTpl, + _getAllCustomDefs(alim), + relationshipTypes, + parsedParams, + nt, + resolveGetInput); + nt.setOriginComponentTemplate(topologyWithSubMapping); + if (topologyWithSubMapping.getSubstitutionMappings() != null) { + // Record nested topology templates in top level template + //nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); + // Set substitution mapping object for mapped node + nt.setSubMappingToscaTemplate( + topologyWithSubMapping.getSubstitutionMappings()); + _handleNestedToscaTemplatesWithTopology(topologyWithSubMapping); + } + } + } + } + } + +// private void _handleNestedToscaTemplatesWithTopology() { +// for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { +// String fname = me.getKey(); +// LinkedHashMap toscaTpl = +// (LinkedHashMap)me.getValue(); +// for(NodeTemplate nt: nodeTemplates) { +// if(_isSubMappedNode(nt,toscaTpl)) { +// parsedParams = _getParamsForNestedTemplate(nt); +// ArrayList alim = (ArrayList)toscaTpl.get(IMPORTS); +// LinkedHashMap topologyTpl = +// (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE); +// TopologyTemplate topologyWithSubMapping = +// new TopologyTemplate(topologyTpl, +// //_getAllCustomDefs(null), +// _getAllCustomDefs(alim), +// relationshipTypes, +// parsedParams, +// nt); +// if(topologyWithSubMapping.getSubstitutionMappings() != null) { +// // Record nested topology templates in top level template +// nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); +// // Set substitution mapping object for mapped node +// nt.setSubMappingToscaTemplate( +// topologyWithSubMapping.getSubstitutionMappings()); +// } +// } +// } +// } +// } + + private void _validateField() { + String sVersion = _tplVersion(); + if (sVersion == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE245", String.format( + "MissingRequiredField: Template is missing required field \"%s\"", DEFINITION_VERSION))); + } else { + _validateVersion(sVersion); + this.version = sVersion; + } + + for (String sKey : tpl.keySet()) { + boolean bFound = false; + for (String sSection : SECTIONS) { + if (sKey.equals(sSection)) { + bFound = true; + break; + } + } + // check ADDITIONAL_SECTIONS + if (!bFound) { + if (ADDITIONAL_SECTIONS.get(version) != null && + ADDITIONAL_SECTIONS.get(version).contains(sKey)) { + bFound = true; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE246", String.format( + "UnknownFieldError: Template contains unknown field \"%s\"", + sKey))); + } + } + } + + private void _validateVersion(String sVersion) { + boolean bFound = false; + for (String vtv : VALID_TEMPLATE_VERSIONS) { + if (sVersion.equals(vtv)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE247", String.format( + "InvalidTemplateVersion: \"%s\" is invalid. Valid versions are %s", + sVersion, VALID_TEMPLATE_VERSIONS.toString()))); + } else if ((!sVersion.equals("tosca_simple_yaml_1_0") && !sVersion.equals("tosca_simple_yaml_1_1"))) { + EntityType.updateDefinitions(sVersion); + + } + } + + private String _getPath(String _path) throws JToscaException { + if (_path.toLowerCase().endsWith(".yaml") || _path.toLowerCase().endsWith(".yml")) { + return _path; + } else if (_path.toLowerCase().endsWith(".zip") || _path.toLowerCase().endsWith(".csar")) { + // a CSAR archive + CSAR csar = new CSAR(_path, isFile); + if (csar.validate()) { + try { + csar.decompress(); + metaProperties = csar.getMetaProperties(); + } catch (IOException e) { + log.error("ToscaTemplate - _getPath - IOException trying to decompress {}", _path); + return null; + } + isFile = true; // the file has been decompressed locally + csar.cleanup(); + csarTempDir = csar.getTempDir(); + return csar.getTempDir() + File.separator + csar.getMainTemplate(); + } + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE248", "ValueError: " + _path + " is not a valid file")); + return null; + } + return null; + } + + private void verifyTemplate() throws JToscaException { + //Criticals + int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); + if (validationIssuesCaught > 0) { + List validationIssueStrings = ThreadLocalsHolder.getCollector().getValidationIssueReport(); + log.trace("####################################################################################################"); + log.trace("ToscaTemplate - verifyTemplate - {} Parsing Critical{} occurred...", validationIssuesCaught, (validationIssuesCaught > 1 ? "s" : "")); + for (String s : validationIssueStrings) { + log.trace("{}. CSAR name - {}", s, inputPath); + } + log.trace("####################################################################################################"); + } + + } + + public String getPath() { + return path; + } + + public String getVersion() { + return version; + } + + public String getDescription() { + return description; + } + + public TopologyTemplate getTopologyTemplate() { + return topologyTemplate; + } + + public Metadata getMetaData() { + return metaData; + } + + public ArrayList getInputs() { + if (inputs != null) { + inputs.stream().forEach(Input::resetAnnotaions); + } + return inputs; + } + + public ArrayList getOutputs() { + return outputs; + } + + public ArrayList getPolicies() { + return policies; + } + + public ArrayList getGroups() { + return groups; + } + + public ArrayList getNodeTemplates() { + return nodeTemplates; + } + + public LinkedHashMap getMetaProperties(String propertiesFile) { + return metaProperties.get(propertiesFile); + } + +// private boolean _isSubMappedNode(NodeTemplate nt,LinkedHashMap toscaTpl) { +// // Return True if the nodetemple is substituted +// if(nt != null && nt.getSubMappingToscaTemplate() == null && +// getSubMappingNodeType(toscaTpl).equals(nt.getType()) && +// nt.getInterfaces().size() < 1) { +// return true; +// } +// return false; +// } + + private boolean _isSubMappedNode(NodeTemplate nt, LinkedHashMap toscaTpl) { + // Return True if the nodetemple is substituted + if (nt != null && nt.getSubMappingToscaTemplate() == null && + getSubMappingNodeType(toscaTpl).equals(nt.getType()) && + nt.getInterfaces().size() < 1) { + return true; + } + return false; + } + + private LinkedHashMap _getParamsForNestedTemplate(NodeTemplate nt) { + // Return total params for nested_template + LinkedHashMap pparams; + if (parsedParams != null) { + pparams = parsedParams; + } else { + pparams = new LinkedHashMap(); + } + if (nt != null) { + for (String pname : nt.getProperties().keySet()) { + pparams.put(pname, nt.getPropertyValue(pname)); + } + } + return pparams; + } + + @SuppressWarnings("unchecked") + private String getSubMappingNodeType(LinkedHashMap toscaTpl) { + // Return substitution mappings node type + if (toscaTpl != null) { + return TopologyTemplate.getSubMappingNodeType( + (LinkedHashMap) toscaTpl.get(TOPOLOGY_TEMPLATE)); + } + return null; + } + + public boolean hasNestedTemplates() { + // Return True if the tosca template has nested templates + return nestedToscaTemplatesWithTopology != null && + nestedToscaTemplatesWithTopology.size() >= 1; + + } + + public ArrayList getNestedTemplates() { + return nestedToscaTemplatesWithTopology; + } + + public ConcurrentHashMap getNestedTopologyTemplates() { + return nestedToscaTplsWithTopology; + } + + /** + * Get datatypes. + * + * @return return list of datatypes. + */ + public HashSet getDataTypes() { + return dataTypes; + } + + @Override + public String toString() { + return "ToscaTemplate{" + + "exttools=" + exttools + + ", VALID_TEMPLATE_VERSIONS=" + VALID_TEMPLATE_VERSIONS + + ", ADDITIONAL_SECTIONS=" + ADDITIONAL_SECTIONS + + ", isFile=" + isFile + + ", path='" + path + '\'' + + ", inputPath='" + inputPath + '\'' + + ", parsedParams=" + parsedParams + + ", tpl=" + tpl + + ", version='" + version + '\'' + + ", imports=" + imports + + ", relationshipTypes=" + relationshipTypes + + ", metaData=" + metaData + + ", description='" + description + '\'' + + ", topologyTemplate=" + topologyTemplate + + ", repositories=" + repositories + + ", inputs=" + inputs + + ", relationshipTemplates=" + relationshipTemplates + + ", nodeTemplates=" + nodeTemplates + + ", outputs=" + outputs + + ", policies=" + policies + + ", nestedToscaTplsWithTopology=" + nestedToscaTplsWithTopology + + ", nestedToscaTemplatesWithTopology=" + nestedToscaTemplatesWithTopology + + ", graph=" + graph + + ", csarTempDir='" + csarTempDir + '\'' + + ", nestingLoopCounter=" + nestingLoopCounter + + ", dataTypes=" + dataTypes + + '}'; + } + + public List getInputs(boolean annotationsRequired) { + if (inputs != null && annotationsRequired) { + inputs.stream().forEach(Input::parseAnnotations); + return inputs; + } + return getInputs(); + } +} + +/*python + +import logging +import os + +from copy import deepcopy +from toscaparser.common.exception import ValidationIssueCollector.collector +from toscaparser.common.exception import InvalidTemplateVersion +from toscaparser.common.exception import MissingRequiredFieldError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.common.exception import ValidationError +from toscaparser.elements.entity_type import update_definitions +from toscaparser.extensions.exttools import ExtTools +import org.openecomp.sdc.toscaparser.api.imports +from toscaparser.prereq.csar import CSAR +from toscaparser.repositories import Repository +from toscaparser.topology_template import TopologyTemplate +from toscaparser.tpl_relationship_graph import ToscaGraph +from toscaparser.utils.gettextutils import _ +import org.openecomp.sdc.toscaparser.api.utils.yamlparser + + +# TOSCA template key names +SECTIONS = (DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME, + TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION, + DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES, + RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES, + CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES, INTERFACE_TYPES, + POLICY_TYPES, GROUP_TYPES, REPOSITORIES) = \ + ('tosca_definitions_version', 'tosca_default_namespace', + 'template_name', 'topology_template', 'template_author', + 'template_version', 'description', 'imports', 'dsl_definitions', + 'node_types', 'relationship_types', 'relationship_templates', + 'capability_types', 'artifact_types', 'data_types', + 'interface_types', 'policy_types', 'group_types', 'repositories') +# Sections that are specific to individual template definitions +SPECIAL_SECTIONS = (METADATA) = ('metadata') + +log = logging.getLogger("tosca.model") + +YAML_LOADER = toscaparser.utils.yamlparser.load_yaml + + +class ToscaTemplate(object): + exttools = ExtTools() + + VALID_TEMPLATE_VERSIONS = ['tosca_simple_yaml_1_0'] + + VALID_TEMPLATE_VERSIONS.extend(exttools.get_versions()) + + ADDITIONAL_SECTIONS = {'tosca_simple_yaml_1_0': SPECIAL_SECTIONS} + + ADDITIONAL_SECTIONS.update(exttools.get_sections()) + + '''Load the template data.''' + def __init__(self, path=None, parsed_params=None, a_file=True, + yaml_dict_tpl=None): + + ValidationIssueCollector.collector.start() + self.a_file = a_file + self.input_path = None + self.path = None + self.tpl = None + self.nested_tosca_tpls_with_topology = {} + self.nested_tosca_templates_with_topology = [] + if path: + self.input_path = path + self.path = self._get_path(path) + if self.path: + self.tpl = YAML_LOADER(self.path, self.a_file) + if yaml_dict_tpl: + msg = (_('Both path and yaml_dict_tpl arguments were ' + 'provided. Using path and ignoring yaml_dict_tpl.')) + log.info(msg) + print(msg) + else: + if yaml_dict_tpl: + self.tpl = yaml_dict_tpl + else: + ValidationIssueCollector.collector.appendException( + ValueError(_('No path or yaml_dict_tpl was provided. ' + 'There is nothing to parse.'))) + + if self.tpl: + self.parsed_params = parsed_params + self._validate_field() + self.version = self._tpl_version() + self.relationship_types = self._tpl_relationship_types() + self.description = self._tpl_description() + self.topology_template = self._topology_template() + self.repositories = self._tpl_repositories() + if self.topology_template.tpl: + self.inputs = self._inputs() + self.relationship_templates = self._relationship_templates() + self.nodetemplates = self._nodetemplates() + self.outputs = self._outputs() + self._handle_nested_tosca_templates_with_topology() + self.graph = ToscaGraph(self.nodetemplates) + + ValidationIssueCollector.collector.stop() + self.verify_template() + + def _topology_template(self): + return TopologyTemplate(self._tpl_topology_template(), + self._get_all_custom_defs(), + self.relationship_types, + self.parsed_params, + None) + + def _inputs(self): + return self.topology_template.inputs + + def _nodetemplates(self): + return self.topology_template.nodetemplates + + def _relationship_templates(self): + return self.topology_template.relationship_templates + + def _outputs(self): + return self.topology_template.outputs + + def _tpl_version(self): + return self.tpl.get(DEFINITION_VERSION) + + def _tpl_description(self): + desc = self.tpl.get(DESCRIPTION) + if desc: + return desc.rstrip() + + def _tpl_imports(self): + return self.tpl.get(IMPORTS) + + def _tpl_repositories(self): + repositories = self.tpl.get(REPOSITORIES) + reposit = [] + if repositories: + for name, val in repositories.items(): + reposits = Repository(name, val) + reposit.append(reposits) + return reposit + + def _tpl_relationship_types(self): + return self._get_custom_types(RELATIONSHIP_TYPES) + + def _tpl_relationship_templates(self): + topology_template = self._tpl_topology_template() + return topology_template.get(RELATIONSHIP_TEMPLATES) + + def _tpl_topology_template(self): + return self.tpl.get(TOPOLOGY_TEMPLATE) + + def _get_all_custom_defs(self, imports=None): + types = [IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, + DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES] + custom_defs_final = {} + custom_defs = self._get_custom_types(types, imports) + if custom_defs: + custom_defs_final.update(custom_defs) + if custom_defs.get(IMPORTS): + import_defs = self._get_all_custom_defs( + custom_defs.get(IMPORTS)) + custom_defs_final.update(import_defs) + + # As imports are not custom_types, removing from the dict + custom_defs_final.pop(IMPORTS, None) + return custom_defs_final + + def _get_custom_types(self, type_definitions, imports=None): + """Handle custom types defined in imported template files + + This method loads the custom type definitions referenced in "imports" + section of the TOSCA YAML template. + """ + custom_defs = {} + type_defs = [] + if not isinstance(type_definitions, list): + type_defs.append(type_definitions) + else: + type_defs = type_definitions + + if not imports: + imports = self._tpl_imports() + + if imports: + custom_service = toscaparser.imports.\ + ImportsLoader(imports, self.path, + type_defs, self.tpl) + + nested_tosca_tpls = custom_service.get_nested_tosca_tpls() + self._update_nested_tosca_tpls_with_topology(nested_tosca_tpls) + + custom_defs = custom_service.get_custom_defs() + if not custom_defs: + return + + # Handle custom types defined in current template file + for type_def in type_defs: + if type_def != IMPORTS: + inner_custom_types = self.tpl.get(type_def) or {} + if inner_custom_types: + custom_defs.update(inner_custom_types) + return custom_defs + + def _update_nested_tosca_tpls_with_topology(self, nested_tosca_tpls): + for tpl in nested_tosca_tpls: + filename, tosca_tpl = list(tpl.items())[0] + if (tosca_tpl.get(TOPOLOGY_TEMPLATE) and + filename not in list( + self.nested_tosca_tpls_with_topology.keys())): + self.nested_tosca_tpls_with_topology.update(tpl) + + def _handle_nested_tosca_templates_with_topology(self): + for fname, tosca_tpl in self.nested_tosca_tpls_with_topology.items(): + for nodetemplate in self.nodetemplates: + if self._is_sub_mapped_node(nodetemplate, tosca_tpl): + parsed_params = self._get_params_for_nested_template( + nodetemplate) + topology_tpl = tosca_tpl.get(TOPOLOGY_TEMPLATE) + topology_with_sub_mapping = TopologyTemplate( + topology_tpl, + self._get_all_custom_defs(), + self.relationship_types, + parsed_params, + nodetemplate) + if topology_with_sub_mapping.substitution_mappings: + # Record nested topo templates in top level template + self.nested_tosca_templates_with_topology.\ + append(topology_with_sub_mapping) + # Set substitution mapping object for mapped node + nodetemplate.sub_mapping_tosca_template = \ + topology_with_sub_mapping.substitution_mappings + + def _validate_field(self): + version = self._tpl_version() + if not version: + ValidationIssueCollector.collector.appendException( + MissingRequiredFieldError(what='Template', + required=DEFINITION_VERSION)) + else: + self._validate_version(version) + self.version = version + + for name in self.tpl: + if (name not in SECTIONS and + name not in self.ADDITIONAL_SECTIONS.get(version, ())): + ValidationIssueCollector.collector.appendException( + UnknownFieldError(what='Template', field=name)) + + def _validate_version(self, version): + if version not in self.VALID_TEMPLATE_VERSIONS: + ValidationIssueCollector.collector.appendException( + InvalidTemplateVersion( + what=version, + valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS))) + else: + if version != 'tosca_simple_yaml_1_0': + update_definitions(version) + + def _get_path(self, path): + if path.lower().endswith(('.yaml','.yml')): + return path + elif path.lower().endswith(('.zip', '.csar')): + # a CSAR archive + csar = CSAR(path, self.a_file) + if csar.validate(): + csar.decompress() + self.a_file = True # the file has been decompressed locally + return os.path.join(csar.temp_dir, csar.get_main_template()) + else: + ValidationIssueCollector.collector.appendException( + ValueError(_('"%(path)s" is not a valid file.') + % {'path': path})) + + def verify_template(self): + if ValidationIssueCollector.collector.exceptionsCaught(): + if self.input_path: + raise ValidationError( + message=(_('\nThe input "%(path)s" failed validation with ' + 'the following error(s): \n\n\t') + % {'path': self.input_path}) + + '\n\t'.join(ValidationIssueCollector.collector.getExceptionsReport())) + else: + raise ValidationError( + message=_('\nThe pre-parsed input failed validation with ' + 'the following error(s): \n\n\t') + + '\n\t'.join(ValidationIssueCollector.collector.getExceptionsReport())) + else: + if self.input_path: + msg = (_('The input "%(path)s" successfully passed ' + 'validation.') % {'path': self.input_path}) + else: + msg = _('The pre-parsed input successfully passed validation.') + + log.info(msg) + + def _is_sub_mapped_node(self, nodetemplate, tosca_tpl): + """Return True if the nodetemple is substituted.""" + if (nodetemplate and not nodetemplate.sub_mapping_tosca_template and + self.get_sub_mapping_node_type(tosca_tpl) == nodetemplate.type + and len(nodetemplate.interfaces) < 1): + return True + else: + return False + + def _get_params_for_nested_template(self, nodetemplate): + """Return total params for nested_template.""" + parsed_params = deepcopy(self.parsed_params) \ + if self.parsed_params else {} + if nodetemplate: + for pname in nodetemplate.get_properties(): + parsed_params.update({pname: + nodetemplate.get_property_value(pname)}) + return parsed_params + + def get_sub_mapping_node_type(self, tosca_tpl): + """Return substitution mappings node type.""" + if tosca_tpl: + return TopologyTemplate.get_sub_mapping_node_type( + tosca_tpl.get(TOPOLOGY_TEMPLATE)) + + def _has_substitution_mappings(self): + """Return True if the template has valid substitution mappings.""" + return self.topology_template is not None and \ + self.topology_template.substitution_mappings is not None + + def has_nested_templates(self): + """Return True if the tosca template has nested templates.""" + return self.nested_tosca_templates_with_topology is not None and \ + len(self.nested_tosca_templates_with_topology) >= 1 +*/ \ No newline at end of file diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java new file mode 100644 index 0000000..c78978f --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java @@ -0,0 +1,201 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.ValidateUtils; + +import java.util.LinkedHashMap; + +public class Triggers extends EntityTemplate { + + private static final String DESCRIPTION = "description"; + private static final String EVENT = "event_type"; + private static final String SCHEDULE = "schedule"; + private static final String TARGET_FILTER = "target_filter"; + private static final String CONDITION = "condition"; + private static final String ACTION = "action"; + + private static final String[] SECTIONS = { + DESCRIPTION, EVENT, SCHEDULE, TARGET_FILTER, CONDITION, ACTION + }; + + private static final String METER_NAME = "meter_name"; + private static final String CONSTRAINT = "constraint"; + private static final String PERIOD = "period"; + private static final String EVALUATIONS = "evaluations"; + private static final String METHOD = "method"; + private static final String THRESHOLD = "threshold"; + private static final String COMPARISON_OPERATOR = "comparison_operator"; + + private static final String[] CONDITION_KEYNAMES = { + METER_NAME, CONSTRAINT, PERIOD, EVALUATIONS, METHOD, THRESHOLD, COMPARISON_OPERATOR + }; + + private String name; + private LinkedHashMap triggerTpl; + + public Triggers(String name, LinkedHashMap triggerTpl) { + super(); // dummy. don't want super + this.name = name; + this.triggerTpl = triggerTpl; + validateKeys(); + validateCondition(); + validateInput(); + } + + public String getDescription() { + return (String) triggerTpl.get("description"); + } + + public String getEvent() { + return (String) triggerTpl.get("event_type"); + } + + public LinkedHashMap getSchedule() { + return (LinkedHashMap) triggerTpl.get("schedule"); + } + + public LinkedHashMap getTargetFilter() { + return (LinkedHashMap) triggerTpl.get("target_filter"); + } + + public LinkedHashMap getCondition() { + return (LinkedHashMap) triggerTpl.get("condition"); + } + + public LinkedHashMap getAction() { + return (LinkedHashMap) triggerTpl.get("action"); + } + + private void validateKeys() { + for (String key : triggerTpl.keySet()) { + boolean bFound = false; + for (int i = 0; i < SECTIONS.length; i++) { + if (key.equals(SECTIONS[i])) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE249", String.format( + "UnknownFieldError: Triggers \"%s\" contains unknown field \"%s\"", + name, key))); + } + } + } + + private void validateCondition() { + for (String key : getCondition().keySet()) { + boolean bFound = false; + for (int i = 0; i < CONDITION_KEYNAMES.length; i++) { + if (key.equals(CONDITION_KEYNAMES[i])) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE250", String.format( + "UnknownFieldError: Triggers \"%s\" contains unknown field \"%s\"", + name, key))); + } + } + } + + private void validateInput() { + for (String key : getCondition().keySet()) { + Object value = getCondition().get(key); + if (key.equals(PERIOD) || key.equals(EVALUATIONS)) { + ValidateUtils.validateInteger(value); + } else if (key.equals(THRESHOLD)) { + ValidateUtils.validateNumeric(value); + } else if (key.equals(METER_NAME) || key.equals(METHOD)) { + ValidateUtils.validateString(value); + } + } + } + + @Override + public String toString() { + return "Triggers{" + + "name='" + name + '\'' + + ", triggerTpl=" + triggerTpl + + '}'; + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import UnknownFieldError +from toscaparser.entity_template import EntityTemplate + +SECTIONS = (DESCRIPTION, EVENT, SCHEDULE, TARGET_FILTER, CONDITION, ACTION) = \ + ('description', 'event_type', 'schedule', + 'target_filter', 'condition', 'action') +CONDITION_KEYNAMES = (CONTRAINT, PERIOD, EVALUATIONS, METHOD) = \ + ('constraint', 'period', 'evaluations', 'method') +log = logging.getLogger('tosca') + + +class Triggers(EntityTemplate): + + '''Triggers defined in policies of topology template''' + + def __init__(self, name, trigger_tpl): + self.name = name + self.trigger_tpl = trigger_tpl + self._validate_keys() + self._validate_condition() + + def get_description(self): + return self.trigger_tpl['description'] + + def get_event(self): + return self.trigger_tpl['event_type'] + + def get_schedule(self): + return self.trigger_tpl['schedule'] + + def get_target_filter(self): + return self.trigger_tpl['target_filter'] + + def get_condition(self): + return self.trigger_tpl['condition'] + + def get_action(self): + return self.trigger_tpl['action'] + + def _validate_keys(self): + for key in self.trigger_tpl.keys(): + if key not in SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Triggers "%s"' % self.name, + field=key)) + + def _validate_condition(self): + for key in self.get_condition(): + if key not in CONDITION_KEYNAMES: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Triggers "%s"' % self.name, + field=key)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java new file mode 100644 index 0000000..f2bb650 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java @@ -0,0 +1,101 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class UnsupportedType { + + // Note: TOSCA spec version related + + /* + The tosca.nodes.Storage.ObjectStorage and tosca.nodes.Storage.BlockStorage + used here as un_supported_types are part of the name changes in TOSCA spec + version 1.1. The original name as specified in version 1.0 are, + tosca.nodes.BlockStorage and tosca.nodes.ObjectStorage which are supported + by the tosca-parser. Since there are little overlapping in version support + currently in the tosca-parser, the names tosca.nodes.Storage.ObjectStorage + and tosca.nodes.Storage.BlockStorage are used here to demonstrate the usage + of un_supported_types. As tosca-parser move to provide support for version + 1.1 and higher, they will be removed. + */ + + private UnsupportedType() { + } + + private static final String[] UNSUPPORTED_TYPES = { + "tosca.test.invalidtype", + "tosca.nodes.Storage.ObjectStorage", + "tosca.nodes.Storage.BlockStorage"}; + + public static boolean validateType(String entityType) { + for (String ust : UNSUPPORTED_TYPES) { + if (ust.equals(entityType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE251", String.format( + "UnsupportedTypeError: Entity type \"%s\" is not supported", entityType))); + return true; + } + } + return false; + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import UnsupportedTypeError +from toscaparser.utils.gettextutils import _ + +log = logging.getLogger('tosca') + + +class UnsupportedType(object): + + """Note: TOSCA spec version related + + The tosca.nodes.Storage.ObjectStorage and tosca.nodes.Storage.BlockStorage + used here as un_supported_types are part of the name changes in TOSCA spec + version 1.1. The original name as specified in version 1.0 are, + tosca.nodes.BlockStorage and tosca.nodes.ObjectStorage which are supported + by the tosca-parser. Since there are little overlapping in version support + currently in the tosca-parser, the names tosca.nodes.Storage.ObjectStorage + and tosca.nodes.Storage.BlockStorage are used here to demonstrate the usage + of un_supported_types. As tosca-parser move to provide support for version + 1.1 and higher, they will be removed. + """ + un_supported_types = ['tosca.test.invalidtype', + 'tosca.nodes.Storage.ObjectStorage', + 'tosca.nodes.Storage.BlockStorage'] + + def __init__(self): + pass + + @staticmethod + def validate_type(entitytype): + if entitytype in UnsupportedType.un_supported_types: + ValidationIssueCollector.appendException(UnsupportedTypeError( + what=_('%s') + % entitytype)) + return True + else: + return False +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java new file mode 100644 index 0000000..56416c6 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java @@ -0,0 +1,47 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.common; + +public class JToscaException extends Exception { + + private static final long serialVersionUID = 1L; + private String code; + + public JToscaException(String message, String code) { + super(message); + this.code = code; + } + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + //JE1001 - Meta file missing + //JE1002 - Invalid yaml content + //JE1003 - Entry-Definition not defined in meta file + //JE1004 - Entry-Definition file missing + //JE1005 - General Error + //JE1006 - General Error/Path not valid +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java new file mode 100644 index 0000000..cd5cbc5 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java @@ -0,0 +1,75 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.common; + +import java.util.Objects; + +public class JToscaValidationIssue { + + private String code; + private String message; + + + public JToscaValidationIssue(String code, String message) { + super(); + this.code = code; + this.message = message; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + @Override + public String toString() { + return "JToscaError [code=" + code + ", message=" + message + "]"; + } + + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final JToscaValidationIssue that = (JToscaValidationIssue) o; + return Objects.equals(code, that.code) && + Objects.equals(message, that.message); + } + + @Override + public int hashCode() { + return Objects.hash(code, message); + } +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java new file mode 100644 index 0000000..c109ffd --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java @@ -0,0 +1,58 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.common; + +import java.util.IllegalFormatException; + +public class TOSCAException extends Exception { + private String message = "An unkown exception has occurred"; + private static boolean FATAL_EXCEPTION_FORMAT_ERRORS = false; + private String msgFmt = null; + + public TOSCAException(String... strings) { + try { + message = String.format(msgFmt, (Object[]) strings); + } catch (IllegalFormatException e) { + // TODO log + + if (FATAL_EXCEPTION_FORMAT_ERRORS) { + throw e; + } + + } + + } + + public String __str__() { + return message; + } + + public static void generate_inv_schema_property_error(String name, String attr, String value, String valid_values) { + //TODO + + } + + public static void setFatalFormatException(boolean flag) { + FATAL_EXCEPTION_FORMAT_ERRORS = flag; + } + +} + diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java new file mode 100644 index 0000000..71c0401 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java @@ -0,0 +1,57 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.common; + +import java.util.*; + +// Perfectly good enough... + +public class ValidationIssueCollector { + + private Map validationIssues = new HashMap(); + + public void appendValidationIssue(JToscaValidationIssue issue) { + + validationIssues.put(issue.getMessage(), issue); + + } + + public List getValidationIssueReport() { + List report = new ArrayList<>(); + if (!validationIssues.isEmpty()) { + for (JToscaValidationIssue exception : validationIssues.values()) { + report.add("[" + exception.getCode() + "]: " + exception.getMessage()); + } + } + + return report; + } + + public Map getValidationIssues() { + return validationIssues; + } + + + public int validationIssuesCaught() { + return validationIssues.size(); + } + +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java new file mode 100644 index 0000000..9cf8c6c --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java @@ -0,0 +1,121 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import java.util.LinkedHashMap; + +public class ArtifactTypeDef extends StatefulEntityType { + + private String type; + private LinkedHashMap customDef; + private LinkedHashMap properties; + private LinkedHashMap parentArtifacts; + + + public ArtifactTypeDef(String type, LinkedHashMap customDef) { + super(type, ARTIFACT_PREFIX, customDef); + + this.type = type; + this.customDef = customDef; + properties = defs != null ? (LinkedHashMap) defs.get(PROPERTIES) : null; + parentArtifacts = getParentArtifacts(); + } + + private LinkedHashMap getParentArtifacts() { + LinkedHashMap artifacts = new LinkedHashMap<>(); + String parentArtif = null; + if (getParentType() != null) { + parentArtif = getParentType().getType(); + } + if (parentArtif != null && !parentArtif.isEmpty()) { + while (!parentArtif.equals("tosca.artifacts.Root")) { + Object ob = TOSCA_DEF.get(parentArtif); + artifacts.put(parentArtif, ob); + parentArtif = + (String) ((LinkedHashMap) ob).get("derived_from"); + } + } + return artifacts; + } + + public ArtifactTypeDef getParentType() { + // Return a artifact entity from which this entity is derived + if (defs == null) { + return null; + } + String partifactEntity = derivedFrom(defs); + if (partifactEntity != null) { + return new ArtifactTypeDef(partifactEntity, customDef); + } + return null; + } + + public Object getArtifact(String name) { + // Return the definition of an artifact field by name + if (defs != null) { + return defs.get(name); + } + return null; + } + + public String getType() { + return type; + } + +} + +/*python +class ArtifactTypeDef(StatefulEntityType): + '''TOSCA built-in artifacts type.''' + + def __init__(self, atype, custom_def=None): + super(ArtifactTypeDef, self).__init__(atype, self.ARTIFACT_PREFIX, + custom_def) + self.type = atype + self.custom_def = custom_def + self.properties = None + if self.PROPERTIES in self.defs: + self.properties = self.defs[self.PROPERTIES] + self.parent_artifacts = self._get_parent_artifacts() + + def _get_parent_artifacts(self): + artifacts = {} + parent_artif = self.parent_type.type if self.parent_type else None + if parent_artif: + while parent_artif != 'tosca.artifacts.Root': + artifacts[parent_artif] = self.TOSCA_DEF[parent_artif] + parent_artif = artifacts[parent_artif]['derived_from'] + return artifacts + + @property + def parent_type(self): + '''Return a artifact entity from which this entity is derived.''' + if not hasattr(self, 'defs'): + return None + partifact_entity = self.derived_from(self.defs) + if partifact_entity: + return ArtifactTypeDef(partifact_entity, self.custom_def) + + def get_artifact(self, name): + '''Return the definition of an artifact field by name.''' + if name in self.defs: + return self.defs[name] +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java new file mode 100644 index 0000000..e4a30f1 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java @@ -0,0 +1,60 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import java.util.LinkedHashMap; + +public class AttributeDef { + // TOSCA built-in Attribute type + + private String name; + private Object value; + private LinkedHashMap schema; + + public AttributeDef(String adName, Object adValue, LinkedHashMap adSchema) { + name = adName; + value = adValue; + schema = adSchema; + } + + public String getName() { + return name; + } + + public Object getValue() { + return value; + } + + public LinkedHashMap getSchema() { + return schema; + } +} + +/*python + +class AttributeDef(object): + '''TOSCA built-in Attribute type.''' + + def __init__(self, name, value=None, schema=None): + self.name = name + self.value = value + self.schema = schema +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java new file mode 100644 index 0000000..e3c24b3 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java @@ -0,0 +1,240 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public class CapabilityTypeDef extends StatefulEntityType { + // TOSCA built-in capabilities type + + private static final String TOSCA_TYPEURI_CAPABILITY_ROOT = "tosca.capabilities.Root"; + + private String name; + private String nodetype; + private LinkedHashMap customDef; + private LinkedHashMap properties; + private LinkedHashMap parentCapabilities; + + @SuppressWarnings("unchecked") + public CapabilityTypeDef(String cname, String ctype, String ntype, LinkedHashMap ccustomDef) { + super(ctype, CAPABILITY_PREFIX, ccustomDef); + + name = cname; + nodetype = ntype; + properties = null; + customDef = ccustomDef; + if (defs != null) { + properties = (LinkedHashMap) defs.get(PROPERTIES); + } + parentCapabilities = getParentCapabilities(customDef); + } + + @SuppressWarnings("unchecked") + public ArrayList getPropertiesDefObjects() { + // Return a list of property definition objects + ArrayList propsdefs = new ArrayList<>(); + LinkedHashMap parentProperties = new LinkedHashMap<>(); + if (parentCapabilities != null) { + for (Map.Entry me : parentCapabilities.entrySet()) { + parentProperties.put(me.getKey(), ((LinkedHashMap) me.getValue()).get("properties")); + } + } + if (properties != null) { + for (Map.Entry me : properties.entrySet()) { + propsdefs.add(new PropertyDef(me.getKey(), null, (LinkedHashMap) me.getValue())); + } + } + if (parentProperties != null) { + for (Map.Entry me : parentProperties.entrySet()) { + LinkedHashMap props = (LinkedHashMap) me.getValue(); + if (props != null) { + for (Map.Entry pe : props.entrySet()) { + String prop = pe.getKey(); + LinkedHashMap schema = (LinkedHashMap) pe.getValue(); + // add parent property if not overridden by children type + if (properties == null || properties.get(prop) == null) { + propsdefs.add(new PropertyDef(prop, null, schema)); + } + } + } + } + } + return propsdefs; + } + + public LinkedHashMap getPropertiesDef() { + LinkedHashMap pds = new LinkedHashMap<>(); + for (PropertyDef pd : getPropertiesDefObjects()) { + pds.put(pd.getName(), pd); + } + return pds; + } + + public PropertyDef getPropertyDefValue(String pdname) { + // Return the definition of a given property name + LinkedHashMap propsDef = getPropertiesDef(); + if (propsDef != null && propsDef.get(pdname) != null) { + return (PropertyDef) propsDef.get(pdname).getPDValue(); + } + return null; + } + + @SuppressWarnings("unchecked") + private LinkedHashMap getParentCapabilities(LinkedHashMap customDef) { + LinkedHashMap capabilities = new LinkedHashMap<>(); + CapabilityTypeDef parentCap = getParentType(); + if (parentCap != null) { + String sParentCap = parentCap.getType(); + while (!sParentCap.equals(TOSCA_TYPEURI_CAPABILITY_ROOT)) { + if (TOSCA_DEF.get(sParentCap) != null) { + capabilities.put(sParentCap, TOSCA_DEF.get(sParentCap)); + } else if (customDef != null && customDef.get(sParentCap) != null) { + capabilities.put(sParentCap, customDef.get(sParentCap)); + } + sParentCap = (String) ((LinkedHashMap) capabilities.get(sParentCap)).get("derived_from"); + } + } + return capabilities; + } + + public CapabilityTypeDef getParentType() { + // Return a capability this capability is derived from + if (defs == null) { + return null; + } + String pnode = derivedFrom(defs); + if (pnode != null && !pnode.isEmpty()) { + return new CapabilityTypeDef(name, pnode, nodetype, customDef); + } + return null; + } + + public boolean inheritsFrom(ArrayList typeNames) { + // Check this capability is in type_names + + // Check if this capability or some of its parent types + // are in the list of types: type_names + if (typeNames.contains(getType())) { + return true; + } else if (getParentType() != null) { + return getParentType().inheritsFrom(typeNames); + } + return false; + } + + // getters/setters + + public LinkedHashMap getProperties() { + return properties; + } + + public String getName() { + return name; + } +} + +/*python +from toscaparser.elements.property_definition import PropertyDef +from toscaparser.elements.statefulentitytype import StatefulEntityType + + +class CapabilityTypeDef(StatefulEntityType): + '''TOSCA built-in capabilities type.''' + TOSCA_TYPEURI_CAPABILITY_ROOT = 'tosca.capabilities.Root' + + def __init__(self, name, ctype, ntype, custom_def=None): + self.name = name + super(CapabilityTypeDef, self).__init__(ctype, self.CAPABILITY_PREFIX, + custom_def) + self.nodetype = ntype + self.properties = None + self.custom_def = custom_def + if self.PROPERTIES in self.defs: + self.properties = self.defs[self.PROPERTIES] + self.parent_capabilities = self._get_parent_capabilities(custom_def) + + def get_properties_def_objects(self): + '''Return a list of property definition objects.''' + properties = [] + parent_properties = {} + if self.parent_capabilities: + for type, value in self.parent_capabilities.items(): + parent_properties[type] = value.get('properties') + if self.properties: + for prop, schema in self.properties.items(): + properties.append(PropertyDef(prop, None, schema)) + if parent_properties: + for parent, props in parent_properties.items(): + for prop, schema in props.items(): + # add parent property if not overridden by children type + if not self.properties or \ + prop not in self.properties.keys(): + properties.append(PropertyDef(prop, None, schema)) + return properties + + def get_properties_def(self): + '''Return a dictionary of property definition name-object pairs.''' + return {prop.name: prop + for prop in self.get_properties_def_objects()} + + def get_property_def_value(self, name): + '''Return the definition of a given property name.''' + props_def = self.get_properties_def() + if props_def and name in props_def: + return props_def[name].value + + def _get_parent_capabilities(self, custom_def=None): + capabilities = {} + parent_cap = self.parent_type + if parent_cap: + parent_cap = parent_cap.type + while parent_cap != self.TOSCA_TYPEURI_CAPABILITY_ROOT: + if parent_cap in self.TOSCA_DEF.keys(): + capabilities[parent_cap] = self.TOSCA_DEF[parent_cap] + elif custom_def and parent_cap in custom_def.keys(): + capabilities[parent_cap] = custom_def[parent_cap] + parent_cap = capabilities[parent_cap]['derived_from'] + return capabilities + + @property + def parent_type(self): + '''Return a capability this capability is derived from.''' + if not hasattr(self, 'defs'): + return None + pnode = self.derived_from(self.defs) + if pnode: + return CapabilityTypeDef(self.name, pnode, + self.nodetype, self.custom_def) + + def inherits_from(self, type_names): + '''Check this capability is in type_names + + Check if this capability or some of its parent types + are in the list of types: type_names + ''' + if self.type in type_names: + return True + elif self.parent_type: + return self.parent_type.inherits_from(type_names) + else: + return False*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java new file mode 100644 index 0000000..d8cf460 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java @@ -0,0 +1,136 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +public class DataType extends StatefulEntityType { + + LinkedHashMap customDef; + + public DataType(String _dataTypeName, LinkedHashMap _customDef) { + super(_dataTypeName, DATATYPE_NETWORK_PREFIX, _customDef); + + customDef = _customDef; + } + + public DataType getParentType() { + // Return a datatype this datatype is derived from + if (defs != null) { + String ptype = derivedFrom(defs); + if (ptype != null) { + return new DataType(ptype, customDef); + } + } + return null; + } + + public String getValueType() { + // Return 'type' section in the datatype schema + if (defs != null) { + return (String) entityValue(defs, "type"); + } + return null; + } + + public ArrayList getAllPropertiesObjects() { + //Return all properties objects defined in type and parent type + ArrayList propsDef = getPropertiesDefObjects(); + DataType ptype = getParentType(); + while (ptype != null) { + propsDef.addAll(ptype.getPropertiesDefObjects()); + ptype = ptype.getParentType(); + } + return propsDef; + } + + public LinkedHashMap getAllProperties() { + // Return a dictionary of all property definition name-object pairs + LinkedHashMap pno = new LinkedHashMap<>(); + for (PropertyDef pd : getAllPropertiesObjects()) { + pno.put(pd.getName(), pd); + } + return pno; + } + + public Object getAllPropertyValue(String name) { + // Return the value of a given property name + LinkedHashMap propsDef = getAllProperties(); + if (propsDef != null && propsDef.get(name) != null) { + return propsDef.get(name).getPDValue(); + } + return null; + } + + public LinkedHashMap getDefs() { + return defs; + } + +} + +/*python + +from toscaparser.elements.statefulentitytype import StatefulEntityType + + +class DataType(StatefulEntityType): + '''TOSCA built-in and user defined complex data type.''' + + def __init__(self, datatypename, custom_def=None): + super(DataType, self).__init__(datatypename, + self.DATATYPE_NETWORK_PREFIX, + custom_def) + self.custom_def = custom_def + + @property + def parent_type(self): + '''Return a datatype this datatype is derived from.''' + ptype = self.derived_from(self.defs) + if ptype: + return DataType(ptype, self.custom_def) + return None + + @property + def value_type(self): + '''Return 'type' section in the datatype schema.''' + return self.entity_value(self.defs, 'type') + + def get_all_properties_objects(self): + '''Return all properties objects defined in type and parent type.''' + props_def = self.get_properties_def_objects() + ptype = self.parent_type + while ptype: + props_def.extend(ptype.get_properties_def_objects()) + ptype = ptype.parent_type + return props_def + + def get_all_properties(self): + '''Return a dictionary of all property definition name-object pairs.''' + return {prop.name: prop + for prop in self.get_all_properties_objects()} + + def get_all_property_value(self, name): + '''Return the value of a given property name.''' + props_def = self.get_all_properties() + if props_def and name in props_def.key(): + return props_def[name].value +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java new file mode 100644 index 0000000..efc6ac9 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java @@ -0,0 +1,436 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.CopyUtils; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.extensions.ExtTools; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.Yaml; + +public class EntityType { + + private static Logger log = LoggerFactory.getLogger(EntityType.class.getName()); + + private static final String TOSCA_DEFINITION_1_0_YAML = "TOSCA_definition_1_0.yaml"; + protected static final String DERIVED_FROM = "derived_from"; + protected static final String PROPERTIES = "properties"; + protected static final String ATTRIBUTES = "attributes"; + protected static final String REQUIREMENTS = "requirements"; + protected static final String INTERFACES = "interfaces"; + protected static final String CAPABILITIES = "capabilities"; + protected static final String TYPE = "type"; + protected static final String ARTIFACTS = "artifacts"; + + @SuppressWarnings("unused") + private static final String SECTIONS[] = { + DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS, + INTERFACES, CAPABILITIES, TYPE, ARTIFACTS + }; + + public static final String TOSCA_DEF_SECTIONS[] = { + "node_types", "data_types", "artifact_types", + "group_types", "relationship_types", + "capability_types", "interface_types", + "policy_types"}; + + + // TOSCA definition file + //private final static String path = EntityType.class.getProtectionDomain().getCodeSource().getLocation().getPath(); + + //private final static String path = EntityType.class.getClassLoader().getResource("TOSCA_definition_1_0.yaml").getFile(); + //private final static String TOSCA_DEF_FILE = EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); + + private static LinkedHashMap TOSCA_DEF_LOAD_AS_IS = loadTdf(); + + //EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); + + @SuppressWarnings("unchecked") + private static LinkedHashMap loadTdf() { + String toscaDefLocation = EntityType.class.getClassLoader().getResource(TOSCA_DEFINITION_1_0_YAML).getFile(); + InputStream input = EntityType.class.getClassLoader().getResourceAsStream(TOSCA_DEFINITION_1_0_YAML); + if (input == null) { + log.error("EntityType - loadTdf - Couldn't load TOSCA_DEF_FILE {}", toscaDefLocation); + } + Yaml yaml = new Yaml(); + Object loaded = yaml.load(input); + //@SuppressWarnings("unchecked") + return (LinkedHashMap) loaded; + } + + // Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS + public static LinkedHashMap TOSCA_DEF; + + static { + TOSCA_DEF = new LinkedHashMap(); + for (String section : TOSCA_DEF_SECTIONS) { + @SuppressWarnings("unchecked") + LinkedHashMap value = (LinkedHashMap) TOSCA_DEF_LOAD_AS_IS.get(section); + if (value != null) { + for (String key : value.keySet()) { + TOSCA_DEF.put(key, value.get(key)); + } + } + } + } + + public static final String DEPENDSON = "tosca.relationships.DependsOn"; + public static final String HOSTEDON = "tosca.relationships.HostedOn"; + public static final String CONNECTSTO = "tosca.relationships.ConnectsTo"; + public static final String ATTACHESTO = "tosca.relationships.AttachesTo"; + public static final String LINKSTO = "tosca.relationships.network.LinksTo"; + public static final String BINDSTO = "tosca.relationships.network.BindsTo"; + + public static final String RELATIONSHIP_TYPE[] = { + "tosca.relationships.DependsOn", + "tosca.relationships.HostedOn", + "tosca.relationships.ConnectsTo", + "tosca.relationships.AttachesTo", + "tosca.relationships.network.LinksTo", + "tosca.relationships.network.BindsTo"}; + + public static final String NODE_PREFIX = "tosca.nodes."; + public static final String RELATIONSHIP_PREFIX = "tosca.relationships."; + public static final String CAPABILITY_PREFIX = "tosca.capabilities."; + public static final String INTERFACE_PREFIX = "tosca.interfaces."; + public static final String ARTIFACT_PREFIX = "tosca.artifacts."; + public static final String POLICY_PREFIX = "tosca.policies."; + public static final String GROUP_PREFIX = "tosca.groups."; + //currently the data types are defined only for network + // but may have changes in the future. + public static final String DATATYPE_PREFIX = "tosca.datatypes."; + public static final String DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + "network."; + public static final String TOSCA = "tosca"; + + protected String type; + protected LinkedHashMap defs = null; + + public Object getParentType() { + return null; + } + + public String derivedFrom(LinkedHashMap defs) { + // Return a type this type is derived from + return (String) entityValue(defs, "derived_from"); + } + + public boolean isDerivedFrom(String type_str) { + // Check if object inherits from the given type + // Returns true if this object is derived from 'type_str' + // False otherwise. + if (type == null || this.type.isEmpty()) { + return false; + } else if (type == type_str) { + return true; + } else if (getParentType() != null) { + return ((EntityType) getParentType()).isDerivedFrom(type_str); + } else { + return false; + } + } + + public Object entityValue(LinkedHashMap defs, String key) { + if (defs != null) { + return defs.get(key); + } + return null; + } + + @SuppressWarnings("unchecked") + public Object getValue(String ndtype, LinkedHashMap _defs, boolean parent) { + Object value = null; + if (_defs == null) { + if (defs == null) { + return null; + } + _defs = this.defs; + } + Object defndt = _defs.get(ndtype); + if (defndt != null) { + // copy the value to avoid that next operations add items in the + // item definitions + //value = copy.copy(defs[ndtype]) + value = CopyUtils.copyLhmOrAl(defndt); + } + + if (parent) { + EntityType p = this; + if (p != null) { + while (p != null) { + if (p.defs != null && p.defs.get(ndtype) != null) { + // get the parent value + Object parentValue = p.defs.get(ndtype); + if (value != null) { + if (value instanceof LinkedHashMap) { + for (Map.Entry me : ((LinkedHashMap) parentValue).entrySet()) { + String k = me.getKey(); + if (((LinkedHashMap) value).get(k) == null) { + ((LinkedHashMap) value).put(k, me.getValue()); + } + } + } + if (value instanceof ArrayList) { + for (Object pValue : (ArrayList) parentValue) { + if (!((ArrayList) value).contains(pValue)) { + ((ArrayList) value).add(pValue); + } + } + } + } else { + // value = copy.copy(parent_value) + value = CopyUtils.copyLhmOrAl(parentValue); + } + } + p = (EntityType) p.getParentType(); + } + } + } + + return value; + } + + @SuppressWarnings("unchecked") + public Object getDefinition(String ndtype) { + Object value = null; + LinkedHashMap _defs; + // no point in hasattr, because we have it, and it + // doesn't do anything except emit an exception anyway + //if not hasattr(self, 'defs'): + // defs = None + // ValidationIssueCollector.appendException( + // ValidationError(message="defs is " + str(defs))) + //else: + // defs = self.defs + _defs = this.defs; + + + if (_defs != null && _defs.get(ndtype) != null) { + value = _defs.get(ndtype); + } + + Object p = getParentType(); + if (p != null) { + Object inherited = ((EntityType) p).getDefinition(ndtype); + if (inherited != null) { + // inherited = dict(inherited) WTF?!? + if (value == null) { + value = inherited; + } else { + //????? + //inherited.update(value) + //value.update(inherited) + for (Map.Entry me : ((LinkedHashMap) inherited).entrySet()) { + ((LinkedHashMap) value).put(me.getKey(), me.getValue()); + } + } + } + } + return value; + } + + public static void updateDefinitions(String version) { + ExtTools exttools = new ExtTools(); + String extensionDefsFile = exttools.getDefsFile(version); + + try (InputStream input = EntityType.class.getClassLoader().getResourceAsStream(extensionDefsFile);) { + Yaml yaml = new Yaml(); + LinkedHashMap nfvDefFile = (LinkedHashMap) yaml.load(input); + LinkedHashMap nfvDef = new LinkedHashMap<>(); + for (String section : TOSCA_DEF_SECTIONS) { + if (nfvDefFile.get(section) != null) { + LinkedHashMap value = + (LinkedHashMap) nfvDefFile.get(section); + for (String key : value.keySet()) { + nfvDef.put(key, value.get(key)); + } + } + } + TOSCA_DEF.putAll(nfvDef); + } catch (IOException e) { + log.error("EntityType - updateDefinitions - Failed to update definitions from defs file {}", extensionDefsFile); + log.error("Exception:", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE280", + String.format("Failed to update definitions from defs file \"%s\" ", extensionDefsFile))); + return; + } + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import ValidationError +from toscaparser.extensions.exttools import ExtTools +import org.onap.sdc.toscaparser.api.utils.yamlparser + +log = logging.getLogger('tosca') + + +class EntityType(object): + '''Base class for TOSCA elements.''' + + SECTIONS = (DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS, + INTERFACES, CAPABILITIES, TYPE, ARTIFACTS) = \ + ('derived_from', 'properties', 'attributes', 'requirements', + 'interfaces', 'capabilities', 'type', 'artifacts') + + TOSCA_DEF_SECTIONS = ['node_types', 'data_types', 'artifact_types', + 'group_types', 'relationship_types', + 'capability_types', 'interface_types', + 'policy_types'] + + '''TOSCA definition file.''' + TOSCA_DEF_FILE = os.path.join( + os.path.dirname(os.path.abspath(__file__)), + "TOSCA_definition_1_0.yaml") + + loader = toscaparser.utils.yamlparser.load_yaml + + TOSCA_DEF_LOAD_AS_IS = loader(TOSCA_DEF_FILE) + + # Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS + TOSCA_DEF = {} + for section in TOSCA_DEF_SECTIONS: + if section in TOSCA_DEF_LOAD_AS_IS.keys(): + value = TOSCA_DEF_LOAD_AS_IS[section] + for key in value.keys(): + TOSCA_DEF[key] = value[key] + + RELATIONSHIP_TYPE = (DEPENDSON, HOSTEDON, CONNECTSTO, ATTACHESTO, + LINKSTO, BINDSTO) = \ + ('tosca.relationships.DependsOn', + 'tosca.relationships.HostedOn', + 'tosca.relationships.ConnectsTo', + 'tosca.relationships.AttachesTo', + 'tosca.relationships.network.LinksTo', + 'tosca.relationships.network.BindsTo') + + NODE_PREFIX = 'tosca.nodes.' + RELATIONSHIP_PREFIX = 'tosca.relationships.' + CAPABILITY_PREFIX = 'tosca.capabilities.' + INTERFACE_PREFIX = 'tosca.interfaces.' + ARTIFACT_PREFIX = 'tosca.artifacts.' + POLICY_PREFIX = 'tosca.policies.' + GROUP_PREFIX = 'tosca.groups.' + # currently the data types are defined only for network + # but may have changes in the future. + DATATYPE_PREFIX = 'tosca.datatypes.' + DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + 'network.' + TOSCA = 'tosca' + + def derived_from(self, defs): + '''Return a type this type is derived from.''' + return self.entity_value(defs, 'derived_from') + + def is_derived_from(self, type_str): + '''Check if object inherits from the given type. + + Returns true if this object is derived from 'type_str'. + False otherwise. + ''' + if not self.type: + return False + elif self.type == type_str: + return True + elif self.parent_type: + return self.parent_type.is_derived_from(type_str) + else: + return False + + def entity_value(self, defs, key): + if key in defs: + return defs[key] + + def get_value(self, ndtype, defs=None, parent=None): + value = None + if defs is None: + if not hasattr(self, 'defs'): + return None + defs = self.defs + if ndtype in defs: + # copy the value to avoid that next operations add items in the + # item definitions + value = copy.copy(defs[ndtype]) + if parent: + p = self + if p: + while p: + if ndtype in p.defs: + # get the parent value + parent_value = p.defs[ndtype] + if value: + if isinstance(value, dict): + for k, v in parent_value.items(): + if k not in value.keys(): + value[k] = v + if isinstance(value, list): + for p_value in parent_value: + if p_value not in value: + value.append(p_value) + else: + value = copy.copy(parent_value) + p = p.parent_type + return value + + def get_definition(self, ndtype): + value = None + if not hasattr(self, 'defs'): + defs = None + ValidationIssueCollector.appendException( + ValidationError(message="defs is " + str(defs))) + else: + defs = self.defs + if defs is not None and ndtype in defs: + value = defs[ndtype] + p = self.parent_type + if p: + inherited = p.get_definition(ndtype) + if inherited: + inherited = dict(inherited) + if not value: + value = inherited + else: + inherited.update(value) + value.update(inherited) + return value + + +def update_definitions(version): + exttools = ExtTools() + extension_defs_file = exttools.get_defs_file(version) + loader = toscaparser.utils.yamlparser.load_yaml + nfv_def_file = loader(extension_defs_file) + nfv_def = {} + for section in EntityType.TOSCA_DEF_SECTIONS: + if section in nfv_def_file.keys(): + value = nfv_def_file[section] + for key in value.keys(): + nfv_def[key] = value[key] + EntityType.TOSCA_DEF.update(nfv_def) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java new file mode 100644 index 0000000..db6f2b7 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java @@ -0,0 +1,263 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public class GroupType extends StatefulEntityType { + + private static final String DERIVED_FROM = "derived_from"; + private static final String VERSION = "version"; + private static final String METADATA = "metadata"; + private static final String DESCRIPTION = "description"; + private static final String PROPERTIES = "properties"; + private static final String MEMBERS = "members"; + private static final String INTERFACES = "interfaces"; + + private static final String[] SECTIONS = { + DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; + + private String groupType; + private LinkedHashMap customDef; + private String groupDescription; + private String groupVersion; + //private LinkedHashMap groupProperties; + //private ArrayList groupMembers; + private LinkedHashMap metaData; + + @SuppressWarnings("unchecked") + public GroupType(String groupType, LinkedHashMap customDef) { + super(groupType, GROUP_PREFIX, customDef); + + this.groupType = groupType; + this.customDef = customDef; + validateFields(); + if (defs != null) { + groupDescription = (String) defs.get(DESCRIPTION); + groupVersion = (String) defs.get(VERSION); + //groupProperties = (LinkedHashMap)defs.get(PROPERTIES); + //groupMembers = (ArrayList)defs.get(MEMBERS); + Object mdo = defs.get(METADATA); + if (mdo instanceof LinkedHashMap) { + metaData = (LinkedHashMap) mdo; + } else { + metaData = null; + } + + if (metaData != null) { + validateMetadata(metaData); + } + } + } + + public GroupType getParentType() { + // Return a group statefulentity of this entity is derived from. + if (defs == null) { + return null; + } + String pgroupEntity = derivedFrom(defs); + if (pgroupEntity != null) { + return new GroupType(pgroupEntity, customDef); + } + return null; + } + + public String getDescription() { + return groupDescription; + } + + public String getVersion() { + return groupVersion; + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getInterfaces() { + Object ifo = getValue(INTERFACES, null, false); + if (ifo instanceof LinkedHashMap) { + return (LinkedHashMap) ifo; + } + return new LinkedHashMap(); + } + + private void validateFields() { + if (defs != null) { + for (String name : defs.keySet()) { + boolean bFound = false; + for (String sect : SECTIONS) { + if (name.equals(sect)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE120", String.format( + "UnknownFieldError: Group Type \"%s\" contains unknown field \"%s\"", + groupType, name))); + } + } + } + } + + @SuppressWarnings("unchecked") + private void validateMetadata(LinkedHashMap metadata) { + String mtt = (String) metadata.get("type"); + if (mtt != null && !mtt.equals("map") && !mtt.equals("tosca:map")) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE121", String.format( + "InvalidTypeError: \"%s\" defined in group for metadata is invalid", + mtt))); + } + for (String entrySchema : metadata.keySet()) { + Object estob = metadata.get(entrySchema); + if (estob instanceof LinkedHashMap) { + String est = (String) ((LinkedHashMap) estob).get("type"); + if (!est.equals("string")) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE122", String.format( + "InvalidTypeError: \"%s\" defined in group for metadata \"%s\" is invalid", + est, entrySchema))); + } + } + } + } + + public String getType() { + return groupType; + } + + @SuppressWarnings("unchecked") + public ArrayList getCapabilitiesObjects() { + // Return a list of capability objects + ArrayList typecapabilities = new ArrayList<>(); + LinkedHashMap caps = (LinkedHashMap) getValue(CAPABILITIES, null, true); + if (caps != null) { + // 'cname' is symbolic name of the capability + // 'cvalue' is a dict { 'type': } + for (Map.Entry me : caps.entrySet()) { + String cname = me.getKey(); + LinkedHashMap cvalue = (LinkedHashMap) me.getValue(); + String ctype = cvalue.get("type"); + CapabilityTypeDef cap = new CapabilityTypeDef(cname, ctype, type, customDef); + typecapabilities.add(cap); + } + } + return typecapabilities; + } + + public LinkedHashMap getCapabilities() { + // Return a dictionary of capability name-objects pairs + LinkedHashMap caps = new LinkedHashMap<>(); + for (CapabilityTypeDef ctd : getCapabilitiesObjects()) { + caps.put(ctd.getName(), ctd); + } + return caps; + } + +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidTypeError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.statefulentitytype import StatefulEntityType + + +class GroupType(StatefulEntityType): + '''TOSCA built-in group type.''' + + SECTIONS = (DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, + MEMBERS, INTERFACES) = \ + ("derived_from", "version", "metadata", "description", + "properties", "members", "interfaces") + + def __init__(self, grouptype, custom_def=None): + super(GroupType, self).__init__(grouptype, self.GROUP_PREFIX, + custom_def) + self.custom_def = custom_def + self.grouptype = grouptype + self._validate_fields() + self.group_description = None + if self.DESCRIPTION in self.defs: + self.group_description = self.defs[self.DESCRIPTION] + + self.group_version = None + if self.VERSION in self.defs: + self.group_version = self.defs[self.VERSION] + + self.group_properties = None + if self.PROPERTIES in self.defs: + self.group_properties = self.defs[self.PROPERTIES] + + self.group_members = None + if self.MEMBERS in self.defs: + self.group_members = self.defs[self.MEMBERS] + + if self.METADATA in self.defs: + self.meta_data = self.defs[self.METADATA] + self._validate_metadata(self.meta_data) + + @property + def parent_type(self): + '''Return a group statefulentity of this entity is derived from.''' + if not hasattr(self, 'defs'): + return None + pgroup_entity = self.derived_from(self.defs) + if pgroup_entity: + return GroupType(pgroup_entity, self.custom_def) + + @property + def description(self): + return self.group_description + + @property + def version(self): + return self.group_version + + @property + def interfaces(self): + return self.get_value(self.INTERFACES) + + def _validate_fields(self): + if self.defs: + for name in self.defs.keys(): + if name not in self.SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Group Type %s' + % self.grouptype, field=name)) + + def _validate_metadata(self, meta_data): + if not meta_data.get('type') in ['map', 'tosca:map']: + ValidationIssueCollector.appendException( + InvalidTypeError(what='"%s" defined in group for ' + 'metadata' % (meta_data.get('type')))) + for entry_schema, entry_schema_type in meta_data.items(): + if isinstance(entry_schema_type, dict) and not \ + entry_schema_type.get('type') == 'string': + ValidationIssueCollector.appendException( + InvalidTypeError(what='"%s" defined in group for ' + 'metadata "%s"' + % (entry_schema_type.get('type'), + entry_schema))) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java new file mode 100644 index 0000000..2862a11 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java @@ -0,0 +1,283 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.EntityTemplate; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public class InterfacesDef extends StatefulEntityType { + + public static final String LIFECYCLE = "tosca.interfaces.node.lifecycle.Standard"; + public static final String CONFIGURE = "tosca.interfaces.relationship.Configure"; + public static final String LIFECYCLE_SHORTNAME = "Standard"; + public static final String CONFIGURE_SHORTNAME = "Configure"; + + public static final String[] SECTIONS = { + LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME, CONFIGURE_SHORTNAME + }; + + public static final String IMPLEMENTATION = "implementation"; + public static final String DESCRIPTION = "description"; + public static final String INPUTS = "inputs"; + + public static final String[] INTERFACE_DEF_RESERVED_WORDS = { + "type", "inputs", "derived_from", "version", "description"}; + + private EntityType ntype; + private EntityTemplate nodeTemplate; + + private String operationName; + private Object operationDef; + private Object implementation; + private LinkedHashMap inputs; + private String description; + + @SuppressWarnings("unchecked") + public InterfacesDef(EntityType inodeType, + String interfaceType, + EntityTemplate inodeTemplate, + String iname, + Object ivalue) { + // void + super(); + + ntype = inodeType; + nodeTemplate = inodeTemplate; + type = interfaceType; + operationName = iname; + operationDef = ivalue; + implementation = null; + inputs = null; + defs = new LinkedHashMap<>(); + + if (interfaceType.equals(LIFECYCLE_SHORTNAME)) { + interfaceType = LIFECYCLE; + } + if (interfaceType.equals(CONFIGURE_SHORTNAME)) { + interfaceType = CONFIGURE; + } + + // only NodeType has getInterfaces "hasattr(ntype,interfaces)" + // while RelationshipType does not + if (ntype instanceof NodeType) { + if (((NodeType) ntype).getInterfaces() != null + && ((NodeType) ntype).getInterfaces().values().contains(interfaceType)) { + LinkedHashMap nii = (LinkedHashMap) + ((NodeType) ntype).getInterfaces().get(interfaceType); + interfaceType = (String) nii.get("type"); + } + } + if (inodeType != null) { + if (nodeTemplate != null && nodeTemplate.getCustomDef() != null + && nodeTemplate.getCustomDef().containsKey(interfaceType)) { + defs = (LinkedHashMap) + nodeTemplate.getCustomDef().get(interfaceType); + } else { + defs = (LinkedHashMap) TOSCA_DEF.get(interfaceType); + } + } + + if (ivalue != null) { + if (ivalue instanceof LinkedHashMap) { + for (Map.Entry me : ((LinkedHashMap) ivalue).entrySet()) { + if (me.getKey().equals(IMPLEMENTATION)) { + implementation = me.getValue(); + } else if (me.getKey().equals(INPUTS)) { + inputs = (LinkedHashMap) me.getValue(); + } else if (me.getKey().equals(DESCRIPTION)) { + description = (String) me.getValue(); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE123", String.format( + "UnknownFieldError: \"interfaces\" of template \"%s\" contain unknown field \"%s\"", + nodeTemplate.getName(), me.getKey()))); + } + } + } + } + } + + public ArrayList getLifecycleOps() { + if (defs != null) { + if (type.equals(LIFECYCLE)) { + return ops(); + } + } + return null; + } + + public ArrayList getInterfaceOps() { + if (defs != null) { + ArrayList ops = ops(); + ArrayList idrw = new ArrayList<>(); + for (int i = 0; i < InterfacesDef.INTERFACE_DEF_RESERVED_WORDS.length; i++) { + idrw.add(InterfacesDef.INTERFACE_DEF_RESERVED_WORDS[i]); + } + ops.removeAll(idrw); + return ops; + } + return null; + } + + public ArrayList getConfigureOps() { + if (defs != null) { + if (type.equals(CONFIGURE)) { + return ops(); + } + } + return null; + } + + private ArrayList ops() { + return new ArrayList(defs.keySet()); + } + + // getters/setters + + public LinkedHashMap getInputs() { + return inputs; + } + + public void setInput(String name, Object value) { + inputs.put(name, value); + } + + public Object getImplementation() { + return implementation; + } + + public void setImplementation(Object implementation) { + this.implementation = implementation; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getOperationName() { + return operationName; + } + + public void setOperationName(String operationName) { + this.operationName = operationName; + } +} + + + +/*python + +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.statefulentitytype import StatefulEntityType + +SECTIONS = (LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME, + CONFIGURE_SHORTNAME) = \ + ('tosca.interfaces.node.lifecycle.Standard', + 'tosca.interfaces.relationship.Configure', + 'Standard', 'Configure') + +INTERFACEVALUE = (IMPLEMENTATION, INPUTS) = ('implementation', 'inputs') + +INTERFACE_DEF_RESERVED_WORDS = ['type', 'inputs', 'derived_from', 'version', + 'description'] + + +class InterfacesDef(StatefulEntityType): + '''TOSCA built-in interfaces type.''' + + def __init__(self, node_type, interfacetype, + node_template=None, name=None, value=None): + self.ntype = node_type + self.node_template = node_template + self.type = interfacetype + self.name = name + self.value = value + self.implementation = None + self.inputs = None + self.defs = {} + if interfacetype == LIFECYCLE_SHORTNAME: + interfacetype = LIFECYCLE + if interfacetype == CONFIGURE_SHORTNAME: + interfacetype = CONFIGURE + if hasattr(self.ntype, 'interfaces') \ + and self.ntype.interfaces \ + and interfacetype in self.ntype.interfaces: + interfacetype = self.ntype.interfaces[interfacetype]['type'] + if node_type: + if self.node_template and self.node_template.custom_def \ + and interfacetype in self.node_template.custom_def: + self.defs = self.node_template.custom_def[interfacetype] + else: + self.defs = self.TOSCA_DEF[interfacetype] + if value: + if isinstance(self.value, dict): + for i, j in self.value.items(): + if i == IMPLEMENTATION: + self.implementation = j + elif i == INPUTS: + self.inputs = j + else: + what = ('"interfaces" of template "%s"' % + self.node_template.name) + ValidationIssueCollector.appendException( + UnknownFieldError(what=what, field=i)) + else: + self.implementation = value + + @property + def lifecycle_ops(self): + if self.defs: + if self.type == LIFECYCLE: + return self._ops() + + @property + def configure_ops(self): + if self.defs: + if self.type == CONFIGURE: + return self._ops() + + def _ops(self): + ops = [] + for name in list(self.defs.keys()): + ops.append(name) + return ops +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java new file mode 100644 index 0000000..f3de49e --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java @@ -0,0 +1,62 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import java.util.AbstractMap; +import java.util.HashMap; +import java.util.Map; +import java.util.stream.Collectors; + +public class Metadata { + + private final Map metadataMap; + + public Metadata(Map metadataMap) { + this.metadataMap = metadataMap != null ? metadataMap : new HashMap<>(); + } + + public String getValue(String key) { + + Object obj = this.metadataMap.get(key); + if (obj != null) { + return String.valueOf(obj); + } + return null; + } + + /** + * Get all properties of a Metadata object.
+ * This object represents the "metadata" section of some entity. + * + * @return all properties of this Metadata, as a key-value. + */ + public Map getAllProperties() { + return metadataMap.entrySet().stream().map(e -> new AbstractMap.SimpleEntry(e.getKey(), String.valueOf(e.getValue()))).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + } + + @Override + public String toString() { + return "Metadata{" + + "metadataMap=" + metadataMap + + '}'; + } + +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java new file mode 100644 index 0000000..c251be9 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java @@ -0,0 +1,549 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public class NodeType extends StatefulEntityType { + // TOSCA built-in node type + + private static final String DERIVED_FROM = "derived_from"; + private static final String METADATA = "metadata"; + private static final String PROPERTIES = "properties"; + private static final String VERSION = "version"; + private static final String DESCRIPTION = "description"; + private static final String ATTRIBUTES = "attributes"; + private static final String REQUIREMENTS = "requirements"; + private static final String CAPABILITIES = "capabilities"; + private static final String INTERFACES = "interfaces"; + private static final String ARTIFACTS = "artifacts"; + + private static final String SECTIONS[] = { + DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS + }; + + private String ntype; + public LinkedHashMap customDef; + + public NodeType(String nttype, LinkedHashMap ntcustomDef) { + super(nttype, NODE_PREFIX, ntcustomDef); + ntype = nttype; + customDef = ntcustomDef; + _validateKeys(); + } + + public Object getParentType() { + // Return a node this node is derived from + if (defs == null) { + return null; + } + String pnode = derivedFrom(defs); + if (pnode != null && !pnode.isEmpty()) { + return new NodeType(pnode, customDef); + } + return null; + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getRelationship() { + // Return a dictionary of relationships to other node types + + // This method returns a dictionary of named relationships that nodes + // of the current node type (self) can have to other nodes (of specific + // types) in a TOSCA template. + + LinkedHashMap relationship = new LinkedHashMap<>(); + ArrayList> requires; + Object treq = getAllRequirements(); + if (treq != null) { + // NOTE(sdmonov): Check if requires is a dict. + // If it is a dict convert it to a list of dicts. + // This is needed because currently the code below supports only + // lists as requirements definition. The following check will + // make sure if a map (dict) was provided it will be converted to + // a list before proceeding to the parsing. + if (treq instanceof LinkedHashMap) { + requires = new ArrayList<>(); + for (Map.Entry me : ((LinkedHashMap) treq).entrySet()) { + LinkedHashMap tl = new LinkedHashMap<>(); + tl.put(me.getKey(), me.getValue()); + requires.add(tl); + } + } else { + requires = (ArrayList>) treq; + } + + String keyword = null; + String nodeType = null; + for (LinkedHashMap require : requires) { + String relation = null; + for (Map.Entry re : require.entrySet()) { + String key = re.getKey(); + LinkedHashMap req = (LinkedHashMap) re.getValue(); + if (req.get("relationship") != null) { + Object trelation = req.get("relationship"); + // trelation is a string or a dict with "type" mapped to the string we want + if (trelation instanceof String) { + relation = (String) trelation; + } else { + if (((LinkedHashMap) trelation).get("type") != null) { + relation = (String) ((LinkedHashMap) trelation).get("type"); + } + } + nodeType = (String) req.get("node"); + //BUG meaningless?? LinkedHashMap value = req; + if (nodeType != null) { + keyword = "node"; + } else { + String getRelation = null; + // If nodeTypeByCap is a dict and has a type key + // we need to lookup the node type using + // the capability type + String captype = (String) req.get("capability"); + nodeType = _getNodeTypeByCap(captype); + if (nodeType != null) { + getRelation = _getRelation(key, nodeType); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( + "NodeTypeRequirementForCapabilityUnfulfilled: Node type: \"%s\" with requrement \"%s\" for node type with capability type \"%s\" is not found\\unfulfilled", this.ntype, key, captype))); + } + if (getRelation != null) { + relation = getRelation; + } + keyword = key; + } + } + } + if (relation == null || nodeType == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( + "NodeTypeForRelationUnfulfilled: Node type \"%s\" - relationship type \"%s\" is unfulfilled", this.ntype, relation))); + } else { + RelationshipType rtype = new RelationshipType(relation, keyword, customDef); + NodeType relatednode = new NodeType(nodeType, customDef); + relationship.put(rtype, relatednode); + } + } + } + return relationship; + + } + + @SuppressWarnings("unchecked") + private String _getNodeTypeByCap(String cap) { + // Find the node type that has the provided capability + + // This method will lookup all node types if they have the + // provided capability. + // Filter the node types + ArrayList nodeTypes = new ArrayList<>(); + for (String nt : customDef.keySet()) { + if (nt.startsWith(NODE_PREFIX) || nt.startsWith("org.openecomp") && !nt.equals("tosca.nodes.Root")) { + nodeTypes.add(nt); + } + } + for (String nt : nodeTypes) { + LinkedHashMap nodeDef = (LinkedHashMap) customDef.get(nt); + if (nodeDef instanceof LinkedHashMap && nodeDef.get("capabilities") != null) { + LinkedHashMap nodeCaps = (LinkedHashMap) nodeDef.get("capabilities"); + if (nodeCaps != null) { + for (Object val : nodeCaps.values()) { + if (val instanceof LinkedHashMap) { + String tp = (String) ((LinkedHashMap) val).get("type"); + if (tp != null && tp.equals(cap)) { + return nt; + } + } + } + } + } + } + return null; + } + + @SuppressWarnings("unchecked") + private String _getRelation(String key, String ndtype) { + String relation = null; + NodeType ntype = new NodeType(ndtype, customDef); + LinkedHashMap caps = ntype.getCapabilities(); + if (caps != null && caps.get(key) != null) { + CapabilityTypeDef c = caps.get(key); + for (int i = 0; i < RELATIONSHIP_TYPE.length; i++) { + String r = RELATIONSHIP_TYPE[i]; + if (r != null) { + relation = r; + break; + } + LinkedHashMap rtypedef = (LinkedHashMap) customDef.get(r); + for (Object o : rtypedef.values()) { + LinkedHashMap properties = (LinkedHashMap) o; + if (properties.get(c.getType()) != null) { + relation = r; + break; + } + } + if (relation != null) { + break; + } else { + for (Object o : rtypedef.values()) { + LinkedHashMap properties = (LinkedHashMap) o; + if (properties.get(c.getParentType()) != null) { + relation = r; + break; + } + } + } + } + } + return relation; + } + + @SuppressWarnings("unchecked") + public ArrayList getCapabilitiesObjects() { + // Return a list of capability objects + ArrayList typecapabilities = new ArrayList<>(); + LinkedHashMap caps = (LinkedHashMap) getValue(CAPABILITIES, null, true); + if (caps != null) { + // 'cname' is symbolic name of the capability + // 'cvalue' is a dict { 'type': } + for (Map.Entry me : caps.entrySet()) { + String cname = me.getKey(); + LinkedHashMap cvalue = (LinkedHashMap) me.getValue(); + String ctype = cvalue.get("type"); + CapabilityTypeDef cap = new CapabilityTypeDef(cname, ctype, type, customDef); + typecapabilities.add(cap); + } + } + return typecapabilities; + } + + public LinkedHashMap getCapabilities() { + // Return a dictionary of capability name-objects pairs + LinkedHashMap caps = new LinkedHashMap<>(); + for (CapabilityTypeDef ctd : getCapabilitiesObjects()) { + caps.put(ctd.getName(), ctd); + } + return caps; + } + + @SuppressWarnings("unchecked") + public ArrayList getRequirements() { + return (ArrayList) getValue(REQUIREMENTS, null, true); + } + + public ArrayList getAllRequirements() { + return getRequirements(); + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getInterfaces() { + return (LinkedHashMap) getValue(INTERFACES, null, false); + } + + + @SuppressWarnings("unchecked") + public ArrayList getLifecycleInputs() { + // Return inputs to life cycle operations if found + ArrayList inputs = new ArrayList<>(); + LinkedHashMap interfaces = getInterfaces(); + if (interfaces != null) { + for (Map.Entry me : interfaces.entrySet()) { + String iname = me.getKey(); + LinkedHashMap ivalue = (LinkedHashMap) me.getValue(); + if (iname.equals(InterfacesDef.LIFECYCLE)) { + for (Map.Entry ie : ivalue.entrySet()) { + if (ie.getKey().equals("input")) { + LinkedHashMap y = (LinkedHashMap) ie.getValue(); + for (String i : y.keySet()) { + inputs.add(i); + } + } + } + } + } + } + return inputs; + } + + public ArrayList getLifecycleOperations() { + // Return available life cycle operations if found + ArrayList ops = null; + LinkedHashMap interfaces = getInterfaces(); + if (interfaces != null) { + InterfacesDef i = new InterfacesDef(this, InterfacesDef.LIFECYCLE, null, null, null); + ops = i.getLifecycleOps(); + } + return ops; + } + + public CapabilityTypeDef getCapability(String name) { + //BUG?? the python code has to be wrong + // it refers to a bad attribute 'value'... + LinkedHashMap caps = getCapabilities(); + if (caps != null) { + return caps.get(name); + } + return null; + /* + def get_capability(self, name): + caps = self.get_capabilities() + if caps and name in caps.keys(): + return caps[name].value + */ + } + + public String getCapabilityType(String name) { + //BUG?? the python code has to be wrong + // it refers to a bad attribute 'value'... + CapabilityTypeDef captype = getCapability(name); + if (captype != null) { + return captype.getType(); + } + return null; + /* + def get_capability_type(self, name): + captype = self.get_capability(name) + if captype and name in captype.keys(): + return captype[name].value + */ + } + + private void _validateKeys() { + if (defs != null) { + for (String key : defs.keySet()) { + boolean bFound = false; + for (int i = 0; i < SECTIONS.length; i++) { + if (key.equals(SECTIONS[i])) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE124", String.format( + "UnknownFieldError: Nodetype \"%s\" has unknown field \"%s\"", ntype, key))); + } + } + } + } + +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.capabilitytype import CapabilityTypeDef +import org.openecomp.sdc.toscaparser.api.elements.interfaces as ifaces +from toscaparser.elements.interfaces import InterfacesDef +from toscaparser.elements.relationshiptype import RelationshipType +from toscaparser.elements.statefulentitytype import StatefulEntityType + + +class NodeType(StatefulEntityType): + '''TOSCA built-in node type.''' + SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS) = \ + ('derived_from', 'metadata', 'properties', 'version', + 'description', 'attributes', 'requirements', 'capabilities', + 'interfaces', 'artifacts') + + def __init__(self, ntype, custom_def=None): + super(NodeType, self).__init__(ntype, self.NODE_PREFIX, custom_def) + self.ntype = ntype + self.custom_def = custom_def + self._validate_keys() + + @property + def parent_type(self): + '''Return a node this node is derived from.''' + if not hasattr(self, 'defs'): + return None + pnode = self.derived_from(self.defs) + if pnode: + return NodeType(pnode, self.custom_def) + + @property + def relationship(self): + '''Return a dictionary of relationships to other node types. + + This method returns a dictionary of named relationships that nodes + of the current node type (self) can have to other nodes (of specific + types) in a TOSCA template. + + ''' + relationship = {} + requires = self.get_all_requirements() + if requires: + # NOTE(sdmonov): Check if requires is a dict. + # If it is a dict convert it to a list of dicts. + # This is needed because currently the code below supports only + # lists as requirements definition. The following check will + # make sure if a map (dict) was provided it will be converted to + # a list before proceeding to the parsing. + if isinstance(requires, dict): + requires = [{key: value} for key, value in requires.items()] + + keyword = None + node_type = None + for require in requires: + for key, req in require.items(): + if 'relationship' in req: + relation = req.get('relationship') + if 'type' in relation: + relation = relation.get('type') + node_type = req.get('node') + value = req + if node_type: + keyword = 'node' + else: + # If value is a dict and has a type key + # we need to lookup the node type using + # the capability type + value = req + if isinstance(value, dict): + captype = value['capability'] + value = (self. + _get_node_type_by_cap(key, captype)) + relation = self._get_relation(key, value) + keyword = key + node_type = value + rtype = RelationshipType(relation, keyword, self.custom_def) + relatednode = NodeType(node_type, self.custom_def) + relationship[rtype] = relatednode + return relationship + + def _get_node_type_by_cap(self, key, cap): + '''Find the node type that has the provided capability + + This method will lookup all node types if they have the + provided capability. + ''' + + # Filter the node types + node_types = [node_type for node_type in self.TOSCA_DEF.keys() + if node_type.startswith(self.NODE_PREFIX) and + node_type != 'tosca.nodes.Root'] + + for node_type in node_types: + node_def = self.TOSCA_DEF[node_type] + if isinstance(node_def, dict) and 'capabilities' in node_def: + node_caps = node_def['capabilities'] + for value in node_caps.values(): + if isinstance(value, dict) and \ + 'type' in value and value['type'] == cap: + return node_type + + def _get_relation(self, key, ndtype): + relation = None + ntype = NodeType(ndtype) + caps = ntype.get_capabilities() + if caps and key in caps.keys(): + c = caps[key] + for r in self.RELATIONSHIP_TYPE: + rtypedef = ntype.TOSCA_DEF[r] + for properties in rtypedef.values(): + if c.type in properties: + relation = r + break + if relation: + break + else: + for properties in rtypedef.values(): + if c.parent_type in properties: + relation = r + break + return relation + + def get_capabilities_objects(self): + '''Return a list of capability objects.''' + typecapabilities = [] + caps = self.get_value(self.CAPABILITIES, None, True) + if caps: + # 'name' is symbolic name of the capability + # 'value' is a dict { 'type': } + for name, value in caps.items(): + ctype = value.get('type') + cap = CapabilityTypeDef(name, ctype, self.type, + self.custom_def) + typecapabilities.append(cap) + return typecapabilities + + def get_capabilities(self): + '''Return a dictionary of capability name-objects pairs.''' + return {cap.name: cap + for cap in self.get_capabilities_objects()} + + @property + def requirements(self): + return self.get_value(self.REQUIREMENTS, None, True) + + def get_all_requirements(self): + return self.requirements + + @property + def interfaces(self): + return self.get_value(self.INTERFACES) + + @property + def lifecycle_inputs(self): + '''Return inputs to life cycle operations if found.''' + inputs = [] + interfaces = self.interfaces + if interfaces: + for name, value in interfaces.items(): + if name == ifaces.LIFECYCLE: + for x, y in value.items(): + if x == 'inputs': + for i in y.iterkeys(): + inputs.append(i) + return inputs + + @property + def lifecycle_operations(self): + '''Return available life cycle operations if found.''' + ops = None + interfaces = self.interfaces + if interfaces: + i = InterfacesDef(self.type, ifaces.LIFECYCLE) + ops = i.lifecycle_ops + return ops + + def get_capability(self, name): + caps = self.get_capabilities() + if caps and name in caps.keys(): + return caps[name].value + + def get_capability_type(self, name): + captype = self.get_capability(name) + if captype and name in captype.keys(): + return captype[name].value + + def _validate_keys(self): + if self.defs: + for key in self.defs.keys(): + if key not in self.SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Nodetype"%s"' % self.ntype, + field=key)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java new file mode 100644 index 0000000..b227a31 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java @@ -0,0 +1,309 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.TOSCAVersionProperty; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +public class PolicyType extends StatefulEntityType { + + private static final String DERIVED_FROM = "derived_from"; + private static final String METADATA = "metadata"; + private static final String PROPERTIES = "properties"; + private static final String VERSION = "version"; + private static final String DESCRIPTION = "description"; + private static final String TARGETS = "targets"; + private static final String TRIGGERS = "triggers"; + private static final String TYPE = "type"; + + private static final String[] SECTIONS = { + DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS, TRIGGERS, TYPE + }; + + private LinkedHashMap customDef; + private String policyDescription; + private Object policyVersion; + private LinkedHashMap properties; + private LinkedHashMap parentPolicies; + private LinkedHashMap metaData; + private ArrayList targetsList; + + + public PolicyType(String type, LinkedHashMap customDef) { + super(type, POLICY_PREFIX, customDef); + + this.type = type; + this.customDef = customDef; + validateKeys(); + + metaData = null; + if (defs != null && defs.get(METADATA) != null) { + metaData = (LinkedHashMap) defs.get(METADATA); + validateMetadata(metaData); + } + + properties = null; + if (defs != null && defs.get(PROPERTIES) != null) { + properties = (LinkedHashMap) defs.get(PROPERTIES); + } + parentPolicies = getParentPolicies(); + + policyVersion = null; + if (defs != null && defs.get(VERSION) != null) { + policyVersion = (new TOSCAVersionProperty( + defs.get(VERSION).toString())).getVersion(); + } + + policyDescription = null; + if (defs != null && defs.get(DESCRIPTION) != null) { + policyDescription = (String) defs.get(DESCRIPTION); + } + + targetsList = null; + if (defs != null && defs.get(TARGETS) != null) { + targetsList = (ArrayList) defs.get(TARGETS); + validateTargets(targetsList, this.customDef); + } + + } + + private LinkedHashMap getParentPolicies() { + LinkedHashMap policies = new LinkedHashMap<>(); + String parentPolicy; + if (getParentType() != null) { + parentPolicy = getParentType().getType(); + } else { + parentPolicy = null; + } + if (parentPolicy != null) { + while (parentPolicy != null && !parentPolicy.equals("tosca.policies.Root")) { + policies.put(parentPolicy, TOSCA_DEF.get(parentPolicy)); + parentPolicy = (String) + ((LinkedHashMap) policies.get(parentPolicy)).get("derived_from);"); + } + } + return policies; + } + + public String getType() { + return type; + } + + public PolicyType getParentType() { + // Return a policy statefulentity of this node is derived from + if (defs == null) { + return null; + } + String policyEntity = derivedFrom(defs); + if (policyEntity != null) { + return new PolicyType(policyEntity, customDef); + } + return null; + } + + public Object getPolicy(String name) { + // Return the definition of a policy field by name + if (defs != null && defs.get(name) != null) { + return defs.get(name); + } + return null; + } + + public ArrayList getTargets() { + // Return targets + return targetsList; + } + + public String getDescription() { + return policyDescription; + } + + public Object getVersion() { + return policyVersion; + } + + private void validateKeys() { + for (String key : defs.keySet()) { + boolean bFound = false; + for (String sect : SECTIONS) { + if (key.equals(sect)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE125", String.format( + "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"", + type, key))); + } + } + } + + private void validateTargets(ArrayList targetsList, + LinkedHashMap customDef) { + for (String nodetype : targetsList) { + if (customDef.get(nodetype) == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE126", String.format( + "InvalidTypeError: \"%s\" defined in targets for policy \"%s\"", + nodetype, type))); + + } + } + } + + private void validateMetadata(LinkedHashMap metaData) { + String mtype = (String) metaData.get("type"); + if (mtype != null && !mtype.equals("map") && !mtype.equals("tosca:map")) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE127", String.format( + "InvalidTypeError: \"%s\" defined in policy for metadata", + mtype))); + } + for (String entrySchema : this.metaData.keySet()) { + Object estob = this.metaData.get(entrySchema); + if (estob instanceof LinkedHashMap) { + String est = (String) + ((LinkedHashMap) estob).get("type"); + if (!est.equals("string")) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE128", String.format( + "InvalidTypeError: \"%s\" defined in policy for metadata \"%s\"", + est, entrySchema))); + } + } + } + } + +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidTypeError +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.statefulentitytype import StatefulEntityType +from toscaparser.utils.validateutils import TOSCAVersionProperty + + +class PolicyType(StatefulEntityType): + + '''TOSCA built-in policies type.''' + SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS) = \ + ('derived_from', 'metadata', 'properties', 'version', + 'description', 'targets') + + def __init__(self, ptype, custom_def=None): + super(PolicyType, self).__init__(ptype, self.POLICY_PREFIX, + custom_def) + self.type = ptype + self.custom_def = custom_def + self._validate_keys() + + self.meta_data = None + if self.METADATA in self.defs: + self.meta_data = self.defs[self.METADATA] + self._validate_metadata(self.meta_data) + + self.properties = None + if self.PROPERTIES in self.defs: + self.properties = self.defs[self.PROPERTIES] + self.parent_policies = self._get_parent_policies() + + self.policy_version = None + if self.VERSION in self.defs: + self.policy_version = TOSCAVersionProperty( + self.defs[self.VERSION]).get_version() + + self.policy_description = self.defs[self.DESCRIPTION] \ + if self.DESCRIPTION in self.defs else None + + self.targets_list = None + if self.TARGETS in self.defs: + self.targets_list = self.defs[self.TARGETS] + self._validate_targets(self.targets_list, custom_def) + + def _get_parent_policies(self): + policies = {} + parent_policy = self.parent_type.type if self.parent_type else None + if parent_policy: + while parent_policy != 'tosca.policies.Root': + policies[parent_policy] = self.TOSCA_DEF[parent_policy] + parent_policy = policies[parent_policy]['derived_from'] + return policies + + @property + def parent_type(self): + '''Return a policy statefulentity of this node is derived from.''' + if not hasattr(self, 'defs'): + return None + ppolicy_entity = self.derived_from(self.defs) + if ppolicy_entity: + return PolicyType(ppolicy_entity, self.custom_def) + + def get_policy(self, name): + '''Return the definition of a policy field by name.''' + if name in self.defs: + return self.defs[name] + + @property + def targets(self): + '''Return targets.''' + return self.targets_list + + @property + def description(self): + return self.policy_description + + @property + def version(self): + return self.policy_version + + def _validate_keys(self): + for key in self.defs.keys(): + if key not in self.SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Policy "%s"' % self.type, + field=key)) + + def _validate_targets(self, targets_list, custom_def): + for nodetype in targets_list: + if nodetype not in custom_def: + ValidationIssueCollector.appendException( + InvalidTypeError(what='"%s" defined in targets for ' + 'policy "%s"' % (nodetype, self.type))) + + def _validate_metadata(self, meta_data): + if not meta_data.get('type') in ['map', 'tosca:map']: + ValidationIssueCollector.appendException( + InvalidTypeError(what='"%s" defined in policy for ' + 'metadata' % (meta_data.get('type')))) + + for entry_schema, entry_schema_type in meta_data.items(): + if isinstance(entry_schema_type, dict) and not \ + entry_schema_type.get('type') == 'string': + ValidationIssueCollector.appendException( + InvalidTypeError(what='"%s" defined in policy for ' + 'metadata "%s"' + % (entry_schema_type.get('type'), + entry_schema))) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java new file mode 100644 index 0000000..01fb9fc --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java @@ -0,0 +1,177 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.DataEntity; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.ValidateUtils; + +import java.util.LinkedHashMap; + +public class PortSpec { + // Parent class for tosca.datatypes.network.PortSpec type + + private static final String SHORTNAME = "PortSpec"; + private static final String TYPE_URI = "tosca.datatypes.network." + SHORTNAME; + + private static final String PROTOCOL = "protocol"; + private static final String SOURCE = "source"; + private static final String SOURCE_RANGE = "source_range"; + private static final String TARGET = "target"; + private static final String TARGET_RANGE = "target_range"; + + private static final String PROPERTY_NAMES[] = { + PROTOCOL, SOURCE, SOURCE_RANGE, + TARGET, TARGET_RANGE + }; + + // todo(TBD) May want to make this a subclass of DataType + // and change init method to set PortSpec's properties + public PortSpec() { + + } + + // The following additional requirements MUST be tested: + // 1) A valid PortSpec MUST have at least one of the following properties: + // target, target_range, source or source_range. + // 2) A valid PortSpec MUST have a value for the source property that + // is within the numeric range specified by the property source_range + // when source_range is specified. + // 3) A valid PortSpec MUST have a value for the target property that is + // within the numeric range specified by the property target_range + // when target_range is specified. + public static void validateAdditionalReq(Object _properties, + String propName, + LinkedHashMap custom_def) { + + try { + LinkedHashMap properties = (LinkedHashMap) _properties; + Object source = properties.get(PortSpec.SOURCE); + Object sourceRange = properties.get(PortSpec.SOURCE_RANGE); + Object target = properties.get(PortSpec.TARGET); + Object targetRange = properties.get(PortSpec.TARGET_RANGE); + + // verify one of the specified values is set + if (source == null && sourceRange == null && + target == null && targetRange == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE129", String.format( + "InvalidTypeAdditionalRequirementsError: Additional requirements for type \"%s\" not met", + TYPE_URI))); + } + // Validate source value is in specified range + if (source != null && sourceRange != null) { + ValidateUtils.validateValueInRange(source, sourceRange, SOURCE); + } else { + DataEntity portdef = new DataEntity("PortDef", source, null, SOURCE); + portdef.validate(); + } + // Validate target value is in specified range + if (target != null && targetRange != null) { + ValidateUtils.validateValueInRange(target, targetRange, SOURCE); + } else { + DataEntity portdef = new DataEntity("PortDef", source, null, TARGET); + portdef.validate(); + } + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE130", String.format( + "ValueError: \"%s\" do not meet requirements for type \"%s\"", + _properties.toString(), SHORTNAME))); + } + } + +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidTypeAdditionalRequirementsError +from toscaparser.utils.gettextutils import _ +import org.openecomp.sdc.toscaparser.api.utils.validateutils as validateutils + +log = logging.getLogger('tosca') + + +class PortSpec(object): + '''Parent class for tosca.datatypes.network.PortSpec type.''' + + SHORTNAME = 'PortSpec' + TYPE_URI = 'tosca.datatypes.network.' + SHORTNAME + + PROPERTY_NAMES = ( + PROTOCOL, SOURCE, SOURCE_RANGE, + TARGET, TARGET_RANGE + ) = ( + 'protocol', 'source', 'source_range', + 'target', 'target_range' + ) + + # TODO(TBD) May want to make this a subclass of DataType + # and change init method to set PortSpec's properties + def __init__(self): + pass + + # The following additional requirements MUST be tested: + # 1) A valid PortSpec MUST have at least one of the following properties: + # target, target_range, source or source_range. + # 2) A valid PortSpec MUST have a value for the source property that + # is within the numeric range specified by the property source_range + # when source_range is specified. + # 3) A valid PortSpec MUST have a value for the target property that is + # within the numeric range specified by the property target_range + # when target_range is specified. + @staticmethod + def validate_additional_req(properties, prop_name, custom_def=None, ): + try: + source = properties.get(PortSpec.SOURCE) + source_range = properties.get(PortSpec.SOURCE_RANGE) + target = properties.get(PortSpec.TARGET) + target_range = properties.get(PortSpec.TARGET_RANGE) + + # verify one of the specified values is set + if source is None and source_range is None and \ + target is None and target_range is None: + ValidationIssueCollector.appendException( + InvalidTypeAdditionalRequirementsError( + type=PortSpec.TYPE_URI)) + # Validate source value is in specified range + if source and source_range: + validateutils.validate_value_in_range(source, source_range, + PortSpec.SOURCE) + else: + from toscaparser.dataentity import DataEntity + portdef = DataEntity('PortDef', source, None, PortSpec.SOURCE) + portdef.validate() + # Validate target value is in specified range + if target and target_range: + validateutils.validate_value_in_range(target, target_range, + PortSpec.TARGET) + else: + from toscaparser.dataentity import DataEntity + portdef = DataEntity('PortDef', source, None, PortSpec.TARGET) + portdef.validate() + except Exception: + msg = _('"%(value)s" do not meet requirements ' + 'for type "%(type)s".') \ + % {'value': properties, 'type': PortSpec.SHORTNAME} + ValidationIssueCollector.appendException( + ValueError(msg)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java new file mode 100644 index 0000000..484d17e --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java @@ -0,0 +1,249 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import java.util.LinkedHashMap; +import java.util.Map; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class PropertyDef { + + private static final String PROPERTY_KEYNAME_DEFAULT = "default"; + private static final String PROPERTY_KEYNAME_REQUIRED = "required"; + private static final String PROPERTY_KEYNAME_STATUS = "status"; + private static final String VALID_PROPERTY_KEYNAMES[] = { + PROPERTY_KEYNAME_DEFAULT, + PROPERTY_KEYNAME_REQUIRED, + PROPERTY_KEYNAME_STATUS}; + + private static final boolean PROPERTY_REQUIRED_DEFAULT = true; + + private static final String VALID_REQUIRED_VALUES[] = {"true", "false"}; + + private static final String PROPERTY_STATUS_SUPPORTED = "supported"; + private static final String PROPERTY_STATUS_EXPERIMENTAL = "experimental"; + private static final String VALID_STATUS_VALUES[] = { + PROPERTY_STATUS_SUPPORTED, PROPERTY_STATUS_EXPERIMENTAL}; + + private static final String PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED; + + private String name; + private Object value; + private LinkedHashMap schema; + private String _status; + private boolean _required; + + public PropertyDef(String pdName, Object pdValue, + LinkedHashMap pdSchema) { + name = pdName; + value = pdValue; + schema = pdSchema; + _status = PROPERTY_STATUS_DEFAULT; + _required = PROPERTY_REQUIRED_DEFAULT; + + if (schema != null) { + // Validate required 'type' property exists + if (schema.get("type") == null) { + //msg = (_('Schema definition of "%(pname)s" must have a "type" ' + // 'attribute.') % dict(pname=self.name)) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE131", String.format( + "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute", name))); + } + _loadRequiredAttrFromSchema(); + _loadStatusAttrFromSchema(); + } + } + + public Object getDefault() { + if (schema != null) { + for (Map.Entry me : schema.entrySet()) { + if (me.getKey().equals(PROPERTY_KEYNAME_DEFAULT)) { + return me.getValue(); + } + } + } + return null; + } + + public boolean isRequired() { + return _required; + } + + private void _loadRequiredAttrFromSchema() { + // IF 'required' keyname exists verify it's a boolean, + // if so override default + Object val = schema.get(PROPERTY_KEYNAME_REQUIRED); + if (val != null) { + if (val instanceof Boolean) { + _required = (boolean) val; + } else { + //valid_values = ', '.join(self.VALID_REQUIRED_VALUES) + //attr = self.PROPERTY_KEYNAME_REQUIRED + //TOSCAException.generate_inv_schema_property_error(self, + // attr, + // value, + // valid_values) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE132", String.format( + "Schema definition of \"%s\" has \"required\" attribute with an invalid value", + name))); + } + } + } + + public String getStatus() { + return _status; + } + + private void _loadStatusAttrFromSchema() { + // IF 'status' keyname exists verify it's a boolean, + // if so override default + String sts = (String) schema.get(PROPERTY_KEYNAME_STATUS); + if (sts != null) { + boolean bFound = false; + for (String vsv : VALID_STATUS_VALUES) { + if (vsv.equals(sts)) { + bFound = true; + break; + } + } + if (bFound) { + _status = sts; + } else { + //valid_values = ', '.join(self.VALID_STATUS_VALUES) + //attr = self.PROPERTY_KEYNAME_STATUS + //TOSCAException.generate_inv_schema_property_error(self, + // attr, + // value, + // valid_values) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE006", String.format( + "Schema definition of \"%s\" has \"status\" attribute with an invalid value", + name))); + } + } + } + + public String getName() { + return name; + } + + public LinkedHashMap getSchema() { + return schema; + } + + public Object getPDValue() { + // there's getValue in EntityType... + return value; + } + +} +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidSchemaError +from toscaparser.common.exception import TOSCAException +from toscaparser.utils.gettextutils import _ + + +class PropertyDef(object): + '''TOSCA built-in Property type.''' + + VALID_PROPERTY_KEYNAMES = (PROPERTY_KEYNAME_DEFAULT, + PROPERTY_KEYNAME_REQUIRED, + PROPERTY_KEYNAME_STATUS) = \ + ('default', 'required', 'status') + + PROPERTY_REQUIRED_DEFAULT = True + + VALID_REQUIRED_VALUES = ['true', 'false'] + VALID_STATUS_VALUES = (PROPERTY_STATUS_SUPPORTED, + PROPERTY_STATUS_EXPERIMENTAL) = \ + ('supported', 'experimental') + + PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED + + def __init__(self, name, value=None, schema=None): + self.name = name + self.value = value + self.schema = schema + self._status = self.PROPERTY_STATUS_DEFAULT + self._required = self.PROPERTY_REQUIRED_DEFAULT + + # Validate required 'type' property exists + try: + self.schema['type'] + except KeyError: + msg = (_('Schema definition of "%(pname)s" must have a "type" ' + 'attribute.') % dict(pname=self.name)) + ValidationIssueCollector.appendException( + InvalidSchemaError(message=msg)) + + if self.schema: + self._load_required_attr_from_schema() + self._load_status_attr_from_schema() + + @property + def default(self): + if self.schema: + for prop_key, prop_value in self.schema.items(): + if prop_key == self.PROPERTY_KEYNAME_DEFAULT: + return prop_value + return None + + @property + def required(self): + return self._required + + def _load_required_attr_from_schema(self): + # IF 'required' keyname exists verify it's a boolean, + # if so override default + if self.PROPERTY_KEYNAME_REQUIRED in self.schema: + value = self.schema[self.PROPERTY_KEYNAME_REQUIRED] + if isinstance(value, bool): + self._required = value + else: + valid_values = ', '.join(self.VALID_REQUIRED_VALUES) + attr = self.PROPERTY_KEYNAME_REQUIRED + TOSCAException.generate_inv_schema_property_error(self, + attr, + value, + valid_values) + + @property + def status(self): + return self._status + + def _load_status_attr_from_schema(self): + # IF 'status' keyname exists verify it's a valid value, + # if so override default + if self.PROPERTY_KEYNAME_STATUS in self.schema: + value = self.schema[self.PROPERTY_KEYNAME_STATUS] + if value in self.VALID_STATUS_VALUES: + self._status = value + else: + valid_values = ', '.join(self.VALID_STATUS_VALUES) + attr = self.PROPERTY_KEYNAME_STATUS + TOSCAException.generate_inv_schema_property_error(self, + attr, + value, + valid_values) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java new file mode 100644 index 0000000..4c39ec2 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java @@ -0,0 +1,121 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.LinkedHashMap; + +public class RelationshipType extends StatefulEntityType { + + private static final String DERIVED_FROM = "derived_from"; + private static final String VALID_TARGET_TYPES = "valid_target_types"; + private static final String INTERFACES = "interfaces"; + private static final String ATTRIBUTES = "attributes"; + private static final String PROPERTIES = "properties"; + private static final String DESCRIPTION = "description"; + private static final String VERSION = "version"; + private static final String CREDENTIAL = "credential"; + + private static final String[] SECTIONS = { + DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES, + ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, CREDENTIAL}; + + private String capabilityName; + private LinkedHashMap customDef; + + public RelationshipType(String type, String capabilityName, LinkedHashMap customDef) { + super(type, RELATIONSHIP_PREFIX, customDef); + this.capabilityName = capabilityName; + this.customDef = customDef; + } + + public RelationshipType getParentType() { + // Return a relationship this reletionship is derived from.''' + String prel = derivedFrom(defs); + if (prel != null) { + return new RelationshipType(prel, null, customDef); + } + return null; + } + + public Object getValidTargetTypes() { + return entityValue(defs, "valid_target_types"); + } + + private void validateKeys() { + for (String key : defs.keySet()) { + boolean bFound = false; + for (String section : SECTIONS) { + if (key.equals(section)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE133", String.format( + "UnknownFieldError: Relationshiptype \"%s\" has unknown field \"%s\"", type, key))); + } + } + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import UnknownFieldError +from toscaparser.elements.statefulentitytype import StatefulEntityType + + +class RelationshipType(StatefulEntityType): + '''TOSCA built-in relationship type.''' + SECTIONS = (DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES, + ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, + CREDENTIAL) = ('derived_from', 'valid_target_types', + 'interfaces', 'attributes', 'properties', + 'description', 'version', 'credential') + + def __init__(self, type, capability_name=None, custom_def=None): + super(RelationshipType, self).__init__(type, self.RELATIONSHIP_PREFIX, + custom_def) + self.capability_name = capability_name + self.custom_def = custom_def + self._validate_keys() + + @property + def parent_type(self): + '''Return a relationship this reletionship is derived from.''' + prel = self.derived_from(self.defs) + if prel: + return RelationshipType(prel, self.custom_def) + + @property + def valid_target_types(self): + return self.entity_value(self.defs, 'valid_target_types') + + def _validate_keys(self): + for key in self.defs.keys(): + if key not in self.SECTIONS: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Relationshiptype "%s"' % self.type, + field=key)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java new file mode 100644 index 0000000..1eaa8a0 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java @@ -0,0 +1,287 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.ValidateUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.HashMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public abstract class ScalarUnit { + + private static Logger log = LoggerFactory.getLogger(ScalarUnit.class.getName()); + + private static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; + private static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; + private static final String SCALAR_UNIT_TIME = "scalar-unit.time"; + + public static final String[] SCALAR_UNIT_TYPES = { + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME + }; + + private Object value; + private HashMap scalarUnitDict; + private String scalarUnitDefault; + + public ScalarUnit(Object value) { + this.value = value; + scalarUnitDict = new HashMap<>(); + scalarUnitDefault = ""; + } + + void putToScalarUnitDict(String key, Object value) { + scalarUnitDict.put(key, value); + } + + void setScalarUnitDefault(String scalarUnitDefault) { + this.scalarUnitDefault = scalarUnitDefault; + } + + private String checkUnitInScalarStandardUnits(String inputUnit) { + // Check whether the input unit is following specified standard + + // If unit is not following specified standard, convert it to standard + // unit after displaying a warning message. + + if (scalarUnitDict.get(inputUnit) != null) { + return inputUnit; + } else { + for (String key : scalarUnitDict.keySet()) { + if (key.toUpperCase().equals(inputUnit.toUpperCase())) { + log.debug("ScalarUnit - checkUnitInScalarStandardUnits - \n" + + "The unit {} does not follow scalar unit standards\n" + + "using {} instead", + inputUnit, key); + return key; + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE007", String.format( + "'The unit \"%s\" is not valid. Valid units are \n%s", + inputUnit, scalarUnitDict.keySet().toString()))); + return inputUnit; + } + } + + public Object validateScalarUnit() { + Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); + Matcher matcher = pattern.matcher(value.toString()); + if (matcher.find()) { + ValidateUtils.strToNum(matcher.group(1)); + String scalarUnit = checkUnitInScalarStandardUnits(matcher.group(2)); + value = matcher.group(1) + " " + scalarUnit; + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE134", String.format( + "ValueError: \"%s\" is not a valid scalar-unit", value.toString()))); + } + return value; + } + + public double getNumFromScalarUnit(String unit) { + if (unit != null) { + unit = checkUnitInScalarStandardUnits(unit); + } else { + unit = scalarUnitDefault; + } + Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); + Matcher matcher = pattern.matcher(value.toString()); + if (matcher.find()) { + final double minimalNum = 0.0000000000001; + + ValidateUtils.strToNum(matcher.group(1)); + String scalarUnit = checkUnitInScalarStandardUnits(matcher.group(2)); + value = matcher.group(1) + " " + scalarUnit; + Object on1 = ValidateUtils.strToNum(matcher.group(1)) != null ? ValidateUtils.strToNum(matcher.group(1)) : 0; + Object on2 = scalarUnitDict.get(matcher.group(2)) != null ? scalarUnitDict.get(matcher.group(2)) : 0; + Object on3 = scalarUnitDict.get(unit) != null ? scalarUnitDict.get(unit) : 0; + + Double n1 = new Double(on1.toString()); + Double n2 = new Double(on2.toString()); + Double n3 = new Double(on3.toString()); + double converted = n1 * n2 / n3; + + if (Math.abs(converted - Math.round(converted)) < minimalNum) { + converted = Math.round(converted); + } + return converted; + } + return 0.0; + } + + private static HashMap scalarUnitMapping = getScalarUnitMappings(); + + private static HashMap getScalarUnitMappings() { + HashMap map = new HashMap<>(); + map.put(SCALAR_UNIT_FREQUENCY, "ScalarUnitFrequency"); + map.put(SCALAR_UNIT_SIZE, "ScalarUnitSize"); + map.put(SCALAR_UNIT_TIME, "ScalarUnit_Time"); + return map; + } + + public static ScalarUnit getScalarunitClass(String type, Object val) { + if (type.equals(SCALAR_UNIT_SIZE)) { + return new ScalarUnitSize(val); + } else if (type.equals(SCALAR_UNIT_TIME)) { + return new ScalarUnitTime(val); + } else if (type.equals(SCALAR_UNIT_FREQUENCY)) { + return new ScalarUnitFrequency(val); + } + return null; + } + + public static double getScalarunitValue(String type, Object value, String unit) { + if (type.equals(SCALAR_UNIT_SIZE)) { + return (new ScalarUnitSize(value)).getNumFromScalarUnit(unit); + } + if (type.equals(SCALAR_UNIT_TIME)) { + return (new ScalarUnitTime(value)).getNumFromScalarUnit(unit); + } + if (type.equals(SCALAR_UNIT_FREQUENCY)) { + return (new ScalarUnitFrequency(value)).getNumFromScalarUnit(unit); + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE135", String.format( + "TypeError: \"%s\" is not a valid scalar-unit type", type))); + return 0.0; + } + +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.utils.gettextutils import _ +from toscaparser.utils import validateutils + +log = logging.getLogger('tosca') + + +class ScalarUnit(object): + '''Parent class for scalar-unit type.''' + + SCALAR_UNIT_TYPES = ( + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME + ) = ( + 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time' + ) + + def __init__(self, value): + self.value = value + + def _check_unit_in_scalar_standard_units(self, input_unit): + """Check whether the input unit is following specified standard + + If unit is not following specified standard, convert it to standard + unit after displaying a warning message. + """ + if input_unit in self.scalarUnitDict.keys(): + return input_unit + else: + for key in self.scalarUnitDict.keys(): + if key.upper() == input_unit.upper(): + log.warning(_('The unit "%(unit)s" does not follow ' + 'scalar unit standards; using "%(key)s" ' + 'instead.') % {'unit': input_unit, + 'key': key}) + return key + msg = (_('The unit "%(unit)s" is not valid. Valid units are ' + '"%(valid_units)s".') % + {'unit': input_unit, + 'valid_units': sorted(self.scalarUnitDict.keys())}) + ValidationIssueCollector.appendException(ValueError(msg)) + + def validate_scalar_unit(self): + regex = re.compile('([0-9.]+)\s*(\w+)') + try: + result = regex.match(str(self.value)).groups() + validateutils.str_to_num(result[0]) + scalar_unit = self._check_unit_in_scalar_standard_units(result[1]) + self.value = ' '.join([result[0], scalar_unit]) + return self.value + + except Exception: + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a valid scalar-unit.') + % self.value)) + + def get_num_from_scalar_unit(self, unit=None): + if unit: + unit = self._check_unit_in_scalar_standard_units(unit) + else: + unit = self.scalarUnitDefault + self.validate_scalar_unit() + + regex = re.compile('([0-9.]+)\s*(\w+)') + result = regex.match(str(self.value)).groups() + converted = (float(validateutils.str_to_num(result[0])) + * self.scalarUnitDict[result[1]] + / self.scalarUnitDict[unit]) + if converted - int(converted) < 0.0000000000001: + converted = int(converted) + return converted + + +class ScalarUnit_Size(ScalarUnit): + + scalarUnitDefault = 'B' + scalarUnitDict = {'B': 1, 'kB': 1000, 'KiB': 1024, 'MB': 1000000, + 'MiB': 1048576, 'GB': 1000000000, + 'GiB': 1073741824, 'TB': 1000000000000, + 'TiB': 1099511627776} + + +class ScalarUnit_Time(ScalarUnit): + + scalarUnitDefault = 'ms' + scalarUnitDict = {'d': 86400, 'h': 3600, 'm': 60, 's': 1, + 'ms': 0.001, 'us': 0.000001, 'ns': 0.000000001} + + +class ScalarUnit_Frequency(ScalarUnit): + + scalarUnitDefault = 'GHz' + scalarUnitDict = {'Hz': 1, 'kHz': 1000, + 'MHz': 1000000, 'GHz': 1000000000} + + +scalarunit_mapping = { + ScalarUnit.SCALAR_UNIT_FREQUENCY: ScalarUnit_Frequency, + ScalarUnit.SCALAR_UNIT_SIZE: ScalarUnit_Size, + ScalarUnit.SCALAR_UNIT_TIME: ScalarUnit_Time, + } + + +def get_scalarunit_class(type): + return scalarunit_mapping.get(type) + + +def get_scalarunit_value(type, value, unit=None): + if type in ScalarUnit.SCALAR_UNIT_TYPES: + ScalarUnit_Class = get_scalarunit_class(type) + return (ScalarUnit_Class(value). + get_num_from_scalar_unit(unit)) + else: + ValidationIssueCollector.appendException( + TypeError(_('"%s" is not a valid scalar-unit type.') % type)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java new file mode 100644 index 0000000..ed10da9 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java @@ -0,0 +1,39 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +public class ScalarUnitFrequency extends ScalarUnit { + + private static final Long HZ = 1L; + private static final Long KHZ = 1000L; + private static final Long MHZ = 1000000L; + private static final Long GHZ = 1000000000L; + + public ScalarUnitFrequency(Object value) { + super(value); + setScalarUnitDefault("GHz"); + putToScalarUnitDict("Hz", HZ); + putToScalarUnitDict("kHz", KHZ); + putToScalarUnitDict("MHz", MHZ); + putToScalarUnitDict("GHz", GHZ); + } + +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java new file mode 100644 index 0000000..78687a1 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java @@ -0,0 +1,43 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.elements.enums.FileSize; + +public class ScalarUnitSize extends ScalarUnit { + + + + public ScalarUnitSize(Object value) { + super(value); + + setScalarUnitDefault("B"); + putToScalarUnitDict("B", FileSize.B); + putToScalarUnitDict("kB", FileSize.KB); + putToScalarUnitDict("MB", FileSize.MB); + putToScalarUnitDict("GB", FileSize.GB); + putToScalarUnitDict("TB", FileSize.TB); + putToScalarUnitDict("kiB", FileSize.KIB); + putToScalarUnitDict("MiB", FileSize.MIB); + putToScalarUnitDict("GiB", FileSize.GIB); + putToScalarUnitDict("TiB", FileSize.TIB); + } +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java new file mode 100644 index 0000000..8d2c13e --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java @@ -0,0 +1,37 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +public class ScalarUnitTime extends ScalarUnit { + + public ScalarUnitTime(Object value) { + super(value); + setScalarUnitDefault("ms"); + putToScalarUnitDict("d", 86400L); + putToScalarUnitDict("h", 3600L); + putToScalarUnitDict("m", 60L); + putToScalarUnitDict("s", 1L); + putToScalarUnitDict("ms", 0.001); + putToScalarUnitDict("us", 0.000001); + putToScalarUnitDict("ns", 0.000000001); + } + +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java new file mode 100644 index 0000000..b710dda --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java @@ -0,0 +1,234 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.UnsupportedType; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + + +public class StatefulEntityType extends EntityType { + // Class representing TOSCA states + + public static final String[] INTERFACE_NODE_LIFECYCLE_OPERATIONS = { + "create", "configure", "start", "stop", "delete"}; + + public static final String[] INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS = { + "post_configure_source", "post_configure_target", "add_target", "remove_target"}; + + public StatefulEntityType() { + // void constructor for subclasses that don't want super + } + + @SuppressWarnings("unchecked") + public StatefulEntityType(String entityType, String prefix, LinkedHashMap customDef) { + + String entireEntityType = entityType; + if (UnsupportedType.validateType(entireEntityType)) { + defs = null; + } else { + if (entityType.startsWith(TOSCA + ":")) { + entityType = entityType.substring(TOSCA.length() + 1); + entireEntityType = prefix + entityType; + } + if (!entityType.startsWith(TOSCA)) { + entireEntityType = prefix + entityType; + } + if (TOSCA_DEF.get(entireEntityType) != null) { + defs = (LinkedHashMap) TOSCA_DEF.get(entireEntityType); + entityType = entireEntityType; + } else if (customDef != null && customDef.get(entityType) != null) { + defs = (LinkedHashMap) customDef.get(entityType); + } else { + defs = null; + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE136", String.format( + "InvalidTypeError: \"%s\" is not a valid type", entityType))); + } + } + type = entityType; + } + + @SuppressWarnings("unchecked") + public ArrayList getPropertiesDefObjects() { + // Return a list of property definition objects + ArrayList properties = new ArrayList(); + LinkedHashMap props = (LinkedHashMap) getDefinition(PROPERTIES); + if (props != null) { + for (Map.Entry me : props.entrySet()) { + String pdname = me.getKey(); + Object to = me.getValue(); + if (to == null || !(to instanceof LinkedHashMap)) { + String s = to == null ? "null" : to.getClass().getSimpleName(); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE137", String.format( + "Unexpected type error: property \"%s\" has type \"%s\" (expected dict)", pdname, s))); + continue; + } + LinkedHashMap pdschema = (LinkedHashMap) to; + properties.add(new PropertyDef(pdname, null, pdschema)); + } + } + return properties; + } + + public LinkedHashMap getPropertiesDef() { + LinkedHashMap pds = new LinkedHashMap(); + for (PropertyDef pd : getPropertiesDefObjects()) { + pds.put(pd.getName(), pd); + } + return pds; + } + + public PropertyDef getPropertyDefValue(String name) { + // Return the property definition associated with a given name + PropertyDef pd = null; + LinkedHashMap propsDef = getPropertiesDef(); + if (propsDef != null) { + pd = propsDef.get(name); + } + return pd; + } + + public ArrayList getAttributesDefObjects() { + // Return a list of attribute definition objects + @SuppressWarnings("unchecked") + LinkedHashMap attrs = (LinkedHashMap) getValue(ATTRIBUTES, null, true); + ArrayList ads = new ArrayList<>(); + if (attrs != null) { + for (Map.Entry me : attrs.entrySet()) { + String attr = me.getKey(); + @SuppressWarnings("unchecked") + LinkedHashMap adschema = (LinkedHashMap) me.getValue(); + ads.add(new AttributeDef(attr, null, adschema)); + } + } + return ads; + } + + public LinkedHashMap getAttributesDef() { + // Return a dictionary of attribute definition name-object pairs + + LinkedHashMap ads = new LinkedHashMap<>(); + for (AttributeDef ado : getAttributesDefObjects()) { + ads.put(((AttributeDef) ado).getName(), ado); + } + return ads; + } + + public AttributeDef getAttributeDefValue(String name) { + // Return the attribute definition associated with a given name + AttributeDef ad = null; + LinkedHashMap attrsDef = getAttributesDef(); + if (attrsDef != null) { + ad = attrsDef.get(name); + } + return ad; + } + + public String getType() { + return type; + } +} + +/*python + +from toscaparser.common.exception import InvalidTypeError +from toscaparser.elements.attribute_definition import AttributeDef +from toscaparser.elements.entity_type import EntityType +from toscaparser.elements.property_definition import PropertyDef +from toscaparser.unsupportedtype import UnsupportedType + + +class StatefulEntityType(EntityType): + '''Class representing TOSCA states.''' + + interfaces_node_lifecycle_operations = ['create', + 'configure', 'start', + 'stop', 'delete'] + + interfaces_relationship_configure_operations = ['post_configure_source', + 'post_configure_target', + 'add_target', + 'remove_target'] + + def __init__(self, entitytype, prefix, custom_def=None): + entire_entitytype = entitytype + if UnsupportedType.validate_type(entire_entitytype): + self.defs = None + else: + if entitytype.startswith(self.TOSCA + ":"): + entitytype = entitytype[(len(self.TOSCA) + 1):] + entire_entitytype = prefix + entitytype + if not entitytype.startswith(self.TOSCA): + entire_entitytype = prefix + entitytype + if entire_entitytype in list(self.TOSCA_DEF.keys()): + self.defs = self.TOSCA_DEF[entire_entitytype] + entitytype = entire_entitytype + elif custom_def and entitytype in list(custom_def.keys()): + self.defs = custom_def[entitytype] + else: + self.defs = None + ValidationIssueCollector.appendException( + InvalidTypeError(what=entitytype)) + self.type = entitytype + + def get_properties_def_objects(self): + '''Return a list of property definition objects.''' + properties = [] + props = self.get_definition(self.PROPERTIES) + if props: + for prop, schema in props.items(): + properties.append(PropertyDef(prop, None, schema)) + return properties + + def get_properties_def(self): + '''Return a dictionary of property definition name-object pairs.''' + return {prop.name: prop + for prop in self.get_properties_def_objects()} + + def get_property_def_value(self, name): + '''Return the property definition associated with a given name.''' + props_def = self.get_properties_def() + if props_def and name in props_def.keys(): + return props_def[name].value + + def get_attributes_def_objects(self): + '''Return a list of attribute definition objects.''' + attrs = self.get_value(self.ATTRIBUTES, parent=True) + if attrs: + return [AttributeDef(attr, None, schema) + for attr, schema in attrs.items()] + return [] + + def get_attributes_def(self): + '''Return a dictionary of attribute definition name-object pairs.''' + return {attr.name: attr + for attr in self.get_attributes_def_objects()} + + def get_attribute_def_value(self, name): + '''Return the attribute definition associated with a given name.''' + attrs_def = self.get_attributes_def() + if attrs_def and name in attrs_def.keys(): + return attrs_def[name].value +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java new file mode 100644 index 0000000..18dd5ca --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java @@ -0,0 +1,173 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.onap.sdc.toscaparser.api.extensions.ExtTools; + +public class TypeValidation { + + private static final String DEFINITION_VERSION = "tosca_definitions_version"; + private static final String DESCRIPTION = "description"; + private static final String IMPORTS = "imports"; + private static final String DSL_DEFINITIONS = "dsl_definitions"; + private static final String NODE_TYPES = "node_types"; + private static final String REPOSITORIES = "repositories"; + private static final String DATA_TYPES = "data_types"; + private static final String ARTIFACT_TYPES = "artifact_types"; + private static final String GROUP_TYPES = "group_types"; + private static final String RELATIONSHIP_TYPES = "relationship_types"; + private static final String CAPABILITY_TYPES = "capability_types"; + private static final String INTERFACE_TYPES = "interface_types"; + private static final String POLICY_TYPES = "policy_types"; + private static final String TOPOLOGY_TEMPLATE = "topology_template"; + //Pavel + private static final String METADATA = "metadata"; + + private String ALLOWED_TYPE_SECTIONS[] = { + DEFINITION_VERSION, DESCRIPTION, IMPORTS, + DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES, + DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES, + RELATIONSHIP_TYPES, CAPABILITY_TYPES, + INTERFACE_TYPES, POLICY_TYPES, + TOPOLOGY_TEMPLATE, METADATA + }; + + private static ArrayList VALID_TEMPLATE_VERSIONS = _getVTV(); + + private static ArrayList _getVTV() { + ArrayList vtv = new ArrayList<>(); + vtv.add("tosca_simple_yaml_1_0"); + vtv.add("tosca_simple_yaml_1_1"); + ExtTools exttools = new ExtTools(); + vtv.addAll(exttools.getVersions()); + return vtv; + } + + //private LinkedHashMap customTypes; + private Object importDef; + //private String version; + + public TypeValidation(LinkedHashMap _customTypes, + Object _importDef) { + importDef = _importDef; + _validateTypeKeys(_customTypes); + } + + private void _validateTypeKeys(LinkedHashMap customTypes) { + + String sVersion = (String) customTypes.get(DEFINITION_VERSION); + if (sVersion != null) { + _validateTypeVersion(sVersion); + //version = sVersion; + } + for (String name : customTypes.keySet()) { + boolean bFound = false; + for (String ats : ALLOWED_TYPE_SECTIONS) { + if (name.equals(ats)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE138", String.format( + "UnknownFieldError: Template \"%s\" contains unknown field \"%s\"", + importDef.toString(), name))); + } + } + } + + private void _validateTypeVersion(String sVersion) { + boolean bFound = false; + String allowed = ""; + for (String atv : VALID_TEMPLATE_VERSIONS) { + allowed += "\"" + atv + "\" "; + if (sVersion.equals(atv)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE139", String.format( + "InvalidTemplateVersion: version \"%s\" in \"%s\" is not supported\n" + + "Allowed versions: [%s]", + sVersion, importDef.toString(), allowed))); + } + } +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidTemplateVersion +from toscaparser.common.exception import UnknownFieldError +from toscaparser.extensions.exttools import ExtTools + + +class TypeValidation(object): + + ALLOWED_TYPE_SECTIONS = (DEFINITION_VERSION, DESCRIPTION, IMPORTS, + DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES, + DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES, + RELATIONSHIP_TYPES, CAPABILITY_TYPES, + INTERFACE_TYPES, POLICY_TYPES, + TOPOLOGY_TEMPLATE) = \ + ('tosca_definitions_version', 'description', 'imports', + 'dsl_definitions', 'node_types', 'repositories', + 'data_types', 'artifact_types', 'group_types', + 'relationship_types', 'capability_types', + 'interface_types', 'policy_types', 'topology_template') + VALID_TEMPLATE_VERSIONS = ['tosca_simple_yaml_1_0'] + exttools = ExtTools() + VALID_TEMPLATE_VERSIONS.extend(exttools.get_versions()) + + def __init__(self, custom_types, import_def): + self.import_def = import_def + self._validate_type_keys(custom_types) + + def _validate_type_keys(self, custom_type): + version = custom_type[self.DEFINITION_VERSION] \ + if self.DEFINITION_VERSION in custom_type \ + else None + if version: + self._validate_type_version(version) + self.version = version + + for name in custom_type: + if name not in self.ALLOWED_TYPE_SECTIONS: + ValidationIssueCollector.appendException( +# UnknownFieldError(what='Template ' + (self.import_def), + UnknownFieldError(what= (self.import_def), + field=name)) + + def _validate_type_version(self, version): + if version not in self.VALID_TEMPLATE_VERSIONS: + ValidationIssueCollector.appendException( + InvalidTemplateVersion( +# what=version + ' in ' + self.import_def, + what=self.import_def, + valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS))) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java new file mode 100644 index 0000000..dd77659 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java @@ -0,0 +1,309 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.ScalarUnit; +import org.onap.sdc.toscaparser.api.functions.Function; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; + +public abstract class Constraint { + + // Parent class for constraints for a Property or Input + + protected static final String EQUAL = "equal"; + protected static final String GREATER_THAN = "greater_than"; + protected static final String GREATER_OR_EQUAL = "greater_or_equal"; + protected static final String LESS_THAN = "less_than"; + protected static final String LESS_OR_EQUAL = "less_or_equal"; + protected static final String IN_RANGE = "in_range"; + protected static final String VALID_VALUES = "valid_values"; + protected static final String LENGTH = "length"; + protected static final String MIN_LENGTH = "min_length"; + protected static final String MAX_LENGTH = "max_length"; + protected static final String PATTERN = "pattern"; + + protected static final String[] CONSTRAINTS = { + EQUAL, GREATER_THAN, GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, + IN_RANGE, VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN}; + + @SuppressWarnings("unchecked") + public static Constraint factory(String constraintClass, String propname, String proptype, Object constraint) { + + // a factory for the different Constraint classes + // replaces Python's __new__() usage + + if (!(constraint instanceof LinkedHashMap) + || ((LinkedHashMap) constraint).size() != 1) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE101", + "InvalidSchemaError: Invalid constraint schema " + constraint.toString())); + } + + switch (constraintClass) { + case EQUAL: + return new Equal(propname, proptype, constraint); + case GREATER_THAN: + return new GreaterThan(propname, proptype, constraint); + case GREATER_OR_EQUAL: + return new GreaterOrEqual(propname, proptype, constraint); + case LESS_THAN: + return new LessThan(propname, proptype, constraint); + case LESS_OR_EQUAL: + return new LessOrEqual(propname, proptype, constraint); + case IN_RANGE: + return new InRange(propname, proptype, constraint); + case VALID_VALUES: + return new ValidValues(propname, proptype, constraint); + case LENGTH: + return new Length(propname, proptype, constraint); + case MIN_LENGTH: + return new MinLength(propname, proptype, constraint); + case MAX_LENGTH: + return new MaxLength(propname, proptype, constraint); + case PATTERN: + return new Pattern(propname, proptype, constraint); + default: + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE102", String.format( + "InvalidSchemaError: Invalid property \"%s\"", constraintClass))); + return null; + } + } + + private String constraintKey = "TBD"; + protected ArrayList validTypes = new ArrayList<>(); + protected ArrayList validPropTypes = new ArrayList<>(); + + protected String propertyName; + private String propertyType; + protected Object constraintValue; + protected Object constraintValueMsg; + protected Object valueMsg; + + @SuppressWarnings("unchecked") + public Constraint(String propname, String proptype, Object constraint) { + + setValues(); + + propertyName = propname; + propertyType = proptype; + constraintValue = ((LinkedHashMap) constraint).get(constraintKey); + constraintValueMsg = constraintValue; + boolean bFound = false; + for (String s : ScalarUnit.SCALAR_UNIT_TYPES) { + if (s.equals(propertyType)) { + bFound = true; + break; + } + } + if (bFound) { + constraintValue = _getScalarUnitConstraintValue(); + } + // check if constraint is valid for property type + bFound = false; + for (String s : validPropTypes) { + if (s.equals(propertyType)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE103", String.format( + "InvalidSchemaError: Property \"%s\" is not valid for data type \"%s\"", + constraintKey, propertyType))); + } + } + + public ArrayList getValidTypes() { + return validTypes; + } + + public void addValidTypes(List validTypes) { + this.validTypes.addAll(validTypes); + } + + public ArrayList getValidPropTypes() { + return validPropTypes; + } + + public String getPropertyType() { + return propertyType; + } + + public Object getConstraintValue() { + return constraintValue; + } + + public Object getConstraintValueMsg() { + return constraintValueMsg; + } + + public Object getValueMsg() { + return valueMsg; + } + + public void setConstraintKey(String constraintKey) { + this.constraintKey = constraintKey; + } + + public void setValidTypes(ArrayList validTypes) { + this.validTypes = validTypes; + } + + public void setValidPropTypes(ArrayList validPropTypes) { + this.validPropTypes = validPropTypes; + } + + public void setPropertyType(String propertyType) { + this.propertyType = propertyType; + } + + public void setConstraintValue(Object constraintValue) { + this.constraintValue = constraintValue; + } + + public void setConstraintValueMsg(Object constraintValueMsg) { + this.constraintValueMsg = constraintValueMsg; + } + + public void setValueMsg(Object valueMsg) { + this.valueMsg = valueMsg; + } + + @SuppressWarnings("unchecked") + private Object _getScalarUnitConstraintValue() { + // code differs from Python because of class creation + if (constraintValue instanceof ArrayList) { + ArrayList ret = new ArrayList<>(); + for (Object v : (ArrayList) constraintValue) { + ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType, v); + ret.add(su.getNumFromScalarUnit(null)); + } + return ret; + } else { + ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType, constraintValue); + return su.getNumFromScalarUnit(null); + } + } + + public void validate(Object value) { + if (Function.isFunction(value)) { + //skipping constraints check for functions + return; + } + + valueMsg = value; + boolean bFound = false; + for (String s : ScalarUnit.SCALAR_UNIT_TYPES) { + if (s.equals(propertyType)) { + bFound = true; + break; + } + } + if (bFound) { + value = ScalarUnit.getScalarunitValue(propertyType, value, null); + } + if (!isValid(value)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE008", "ValidationError: " + errMsg(value))); + } + } + + protected abstract boolean isValid(Object value); + + protected abstract void setValues(); + + protected abstract String errMsg(Object value); + +} + +/*python + +class Constraint(object): + '''Parent class for constraints for a Property or Input.''' + + CONSTRAINTS = (EQUAL, GREATER_THAN, + GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, IN_RANGE, + VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN) = \ + ('equal', 'greater_than', 'greater_or_equal', 'less_than', + 'less_or_equal', 'in_range', 'valid_values', 'length', + 'min_length', 'max_length', 'pattern') + + def __new__(cls, property_name, property_type, constraint): + if cls is not Constraint: + return super(Constraint, cls).__new__(cls) + + if(not isinstance(constraint, collections.Mapping) or + len(constraint) != 1): + ValidationIssueCollector.appendException( + InvalidSchemaError(message=_('Invalid constraint schema.'))) + + for type in constraint.keys(): + ConstraintClass = get_constraint_class(type) + if not ConstraintClass: + msg = _('Invalid property "%s".') % type + ValidationIssueCollector.appendException( + InvalidSchemaError(message=msg)) + + return ConstraintClass(property_name, property_type, constraint) + + def __init__(self, property_name, property_type, constraint): + self.property_name = property_name + self.property_type = property_type + self.constraint_value = constraint[self.constraint_key] + self.constraint_value_msg = self.constraint_value + if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: + self.constraint_value = self._get_scalarunit_constraint_value() + # check if constraint is valid for property type + if property_type not in self.valid_prop_types: + msg = _('Property "%(ctype)s" is not valid for data type ' + '"%(dtype)s".') % dict( + ctype=self.constraint_key, + dtype=property_type) + ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) + + def _get_scalarunit_constraint_value(self): + if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: + ScalarUnit_Class = (scalarunit. + get_scalarunit_class(self.property_type)) + if isinstance(self.constraint_value, list): + return [ScalarUnit_Class(v).get_num_from_scalar_unit() + for v in self.constraint_value] + else: + return (ScalarUnit_Class(self.constraint_value). + get_num_from_scalar_unit()) + + def _err_msg(self, value): + return _('Property "%s" could not be validated.') % self.property_name + + def validate(self, value): + self.value_msg = value + if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: + value = scalarunit.get_scalarunit_value(self.property_type, value) + if not self._is_valid(value): + err_msg = self._err_msg(value) + ValidationIssueCollector.appendException( + ValidationError(message=err_msg)) + + +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java new file mode 100644 index 0000000..f480099 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java @@ -0,0 +1,77 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements.constraints; + +import java.util.Arrays; + +public class Equal extends Constraint { + + protected void setValues() { + + setConstraintKey(EQUAL); + validPropTypes.addAll(Arrays.asList(Schema.PROPERTY_TYPES)); + + } + + public Equal(String name, String type, Object c) { + super(name, type, c); + + } + + protected boolean isValid(Object val) { + // equality of objects is tricky so we're comparing + // the toString() representation + return val.toString().equals(constraintValue.toString()); + } + + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" is not equal to \"%s\"", + valueMsg, propertyName, constraintValueMsg); + } + +} + +/*python + +class Equal(Constraint): +"""Constraint class for "equal" + +Constrains a property or parameter to a value equal to ('=') +the value declared. +""" + +constraint_key = Constraint.EQUAL + +valid_prop_types = Schema.PROPERTY_TYPES + +def _is_valid(self, value): + if value == self.constraint_value: + return True + + return False + +def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" is not ' + 'equal to "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + cvalue=self.constraint_value_msg)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java new file mode 100644 index 0000000..0cb8f36 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java @@ -0,0 +1,130 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.functions.Function; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.Arrays; +import java.util.Date; + +public class GreaterOrEqual extends Constraint { + // Constraint class for "greater_or_equal" + + // Constrains a property or parameter to a value greater than or equal + // to ('>=') the value declared. + + protected void setValues() { + + setConstraintKey(GREATER_OR_EQUAL); + + // timestamps are loaded as Date objects + addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public GreaterOrEqual(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE104", "InvalidSchemaError: The property \"greater_or_equal\" expects comparable values")); + } + } + + + @Override + protected boolean isValid(Object value) { + if (Function.isFunction(value)) { + return true; + } + + // timestamps + if (value instanceof Date) { + if (constraintValue instanceof Date) { + return !((Date) value).before((Date) constraintValue); + } + return false; + } + // all others + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 >= n2; + } + + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be greater or equal to \"%s\"", + valueMsg, propertyName, constraintValueMsg); + } +} + +/*python + +class GreaterOrEqual(Constraint): +"""Constraint class for "greater_or_equal" + +Constrains a property or parameter to a value greater than or equal +to ('>=') the value declared. +""" + +constraint_key = Constraint.GREATER_OR_EQUAL + +valid_types = (int, float, datetime.date, + datetime.time, datetime.datetime) + +valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, + Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, + Schema.SCALAR_UNIT_TIME) + +def __init__(self, property_name, property_type, constraint): + super(GreaterOrEqual, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, self.valid_types): + ThreadLocalsHolder.getCollector().appendException( + InvalidSchemaError(message=_('The property ' + '"greater_or_equal" expects ' + 'comparable values.'))) + +def _is_valid(self, value): + if toscaparser.functions.is_function(value) or \ + value >= self.constraint_value: + return True + return False + +def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' + 'greater than or equal to "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + cvalue=self.constraint_value_msg)) + + +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java new file mode 100644 index 0000000..b501907 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java @@ -0,0 +1,120 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.Arrays; +import java.util.Date; + +public class GreaterThan extends Constraint { + + @Override + protected void setValues() { + + setConstraintKey(GREATER_THAN); + + // timestamps are loaded as Date objects + addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public GreaterThan(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE105", "InvalidSchemaError: The property \"greater_than\" expects comparable values")); + } + } + + @Override + protected boolean isValid(Object value) { + + // timestamps + if (value instanceof Date) { + if (constraintValue instanceof Date) { + return ((Date) value).after((Date) constraintValue); + } + return false; + } + + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 > n2; + } + + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be greater than \"%s\"", + valueMsg, propertyName, constraintValueMsg); + } + +} + +/* +class GreaterThan(Constraint): + """Constraint class for "greater_than" + + Constrains a property or parameter to a value greater than ('>') + the value declared. + """ + + constraint_key = Constraint.GREATER_THAN + + valid_types = (int, float, datetime.date, + datetime.time, datetime.datetime) + + valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, + Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, + Schema.SCALAR_UNIT_TIME) + + def __init__(self, property_name, property_type, constraint): + super(GreaterThan, self).__init__(property_name, property_type, + constraint) + if not isinstance(constraint[self.GREATER_THAN], self.valid_types): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "greater_than" ' + 'expects comparable values.'))) + + def _is_valid(self, value): + if value > self.constraint_value: + return True + + return False + + def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' + 'greater than "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + cvalue=self.constraint_value_msg)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java new file mode 100644 index 0000000..4edf021 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java @@ -0,0 +1,186 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.Arrays; +import java.util.Date; + +import java.util.ArrayList; + +public class InRange extends Constraint { + // Constraint class for "in_range" + + //Constrains a property or parameter to a value in range of (inclusive) + //the two values declared. + + private static final String UNBOUNDED = "UNBOUNDED"; + + private Object min, max; + + protected void setValues() { + + setConstraintKey(IN_RANGE); + + // timestamps are loaded as Date objects + addValidTypes(Arrays.asList("Integer", "Double", "Float", "String", "Date")); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + validPropTypes.add(Schema.RANGE); + + } + + @SuppressWarnings("unchecked") + public InRange(String name, String type, Object c) { + super(name, type, c); + + if (!(constraintValue instanceof ArrayList) || ((ArrayList) constraintValue).size() != 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE106", "InvalidSchemaError: The property \"in_range\" expects a list")); + + } + + ArrayList alcv = (ArrayList) constraintValue; + String msg = "The property \"in_range\" expects comparable values"; + for (Object vo : alcv) { + if (!validTypes.contains(vo.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE107", "InvalidSchemaError: " + msg)); + } + // The only string we allow for range is the special value 'UNBOUNDED' + if ((vo instanceof String) && !((String) vo).equals(UNBOUNDED)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE108", "InvalidSchemaError: " + msg)); + } + } + min = alcv.get(0); + max = alcv.get(1); + + } + + @Override + protected boolean isValid(Object value) { + + // timestamps + if (value instanceof Date) { + if (min instanceof Date && max instanceof Date) { + return !((Date) value).before((Date) min) + && !((Date) value).after((Date) max); + } + return false; + } + + Double dvalue = new Double(value.toString()); + if (!(min instanceof String)) { + if (dvalue < new Double(min.toString())) { + return false; + } + } else if (!((String) min).equals(UNBOUNDED)) { + return false; + } + if (!(max instanceof String)) { + if (dvalue > new Double(max.toString())) { + return false; + } + } else if (!((String) max).equals(UNBOUNDED)) { + return false; + } + return true; + } + + @Override + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" is out of range \"(min:%s, max:%s)\"", + valueMsg, propertyName, min.toString(), max.toString()); + } + +} + +/*python + +class InRange(Constraint): + """Constraint class for "in_range" + + Constrains a property or parameter to a value in range of (inclusive) + the two values declared. + """ + UNBOUNDED = 'UNBOUNDED' + + constraint_key = Constraint.IN_RANGE + + valid_types = (int, float, datetime.date, + datetime.time, datetime.datetime, str) + + valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, + Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, + Schema.SCALAR_UNIT_TIME, Schema.RANGE) + + def __init__(self, property_name, property_type, constraint): + super(InRange, self).__init__(property_name, property_type, constraint) + if(not isinstance(self.constraint_value, collections.Sequence) or + (len(constraint[self.IN_RANGE]) != 2)): + ValidationIssueCollector.appendException( + InvalidSchemaError(message=_('The property "in_range" ' + 'expects a list.'))) + + msg = _('The property "in_range" expects comparable values.') + for value in self.constraint_value: + if not isinstance(value, self.valid_types): + ValidationIssueCollector.appendException( + InvalidSchemaError(message=msg)) + # The only string we allow for range is the special value + # 'UNBOUNDED' + if(isinstance(value, str) and value != self.UNBOUNDED): + ValidationIssueCollector.appendException( + InvalidSchemaError(message=msg)) + + self.min = self.constraint_value[0] + self.max = self.constraint_value[1] + + def _is_valid(self, value): + if not isinstance(self.min, str): + if value < self.min: + return False + elif self.min != self.UNBOUNDED: + return False + if not isinstance(self.max, str): + if value > self.max: + return False + elif self.max != self.UNBOUNDED: + return False + return True + + def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" is out of ' + 'range "(min:%(vmin)s, max:%(vmax)s)".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + vmin=self.constraint_value_msg[0], + vmax=self.constraint_value_msg[1])) + +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java new file mode 100644 index 0000000..7988cb8 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java @@ -0,0 +1,100 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.Collections; + +public class Length extends Constraint { + // Constraint class for "length" + + // Constrains the property or parameter to a value of a given length. + + @Override + protected void setValues() { + + setConstraintKey(LENGTH); + addValidTypes(Collections.singletonList("Integer")); + + validPropTypes.add(Schema.STRING); + + } + + public Length(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE109", "InvalidSchemaError: The property \"length\" expects an integer")); + } + } + + @Override + protected boolean isValid(Object value) { + if (value instanceof String && constraintValue instanceof Integer && + ((String) value).length() == (Integer) constraintValue) { + return true; + } + return false; + } + + @Override + protected String errMsg(Object value) { + return String.format("Length of value \"%s\" of property \"%s\" must be equal to \"%s\"", + value.toString(), propertyName, constraintValue.toString()); + } + +} + +/*python + class Length(Constraint): + """Constraint class for "length" + + Constrains the property or parameter to a value of a given length. + """ + + constraint_key = Constraint.LENGTH + + valid_types = (int, ) + + valid_prop_types = (Schema.STRING, ) + + def __init__(self, property_name, property_type, constraint): + super(Length, self).__init__(property_name, property_type, constraint) + if not isinstance(self.constraint_value, self.valid_types): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "length" expects ' + 'an integer.'))) + + def _is_valid(self, value): + if isinstance(value, str) and len(value) == self.constraint_value: + return True + + return False + + def _err_msg(self, value): + return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' + 'must be equal to "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=value, + cvalue=self.constraint_value)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java new file mode 100644 index 0000000..37a4afc --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java @@ -0,0 +1,124 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.Arrays; +import java.util.Date; + +public class LessOrEqual extends Constraint { + // Constraint class for "less_or_equal" + + // Constrains a property or parameter to a value less than or equal + // to ('<=') the value declared. + + protected void setValues() { + + setConstraintKey(LESS_OR_EQUAL); + + // timestamps are loaded as Date objects + addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public LessOrEqual(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE110", "InvalidSchemaError: The property \"less_or_equal\" expects comparable values")); + } + } + + @Override + protected boolean isValid(Object value) { + + // timestamps + if (value instanceof Date) { + if (constraintValue instanceof Date) { + return !((Date) value).after((Date) constraintValue); + } + return false; + } + + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 <= n2; + } + + @Override + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be less or equal to \"%s\"", + valueMsg, propertyName, constraintValueMsg); + } + +} + +/*python + +class LessOrEqual(Constraint): + """Constraint class for "less_or_equal" + + Constrains a property or parameter to a value less than or equal + to ('<=') the value declared. + """ + + constraint_key = Constraint.LESS_OR_EQUAL + + valid_types = (int, float, datetime.date, + datetime.time, datetime.datetime) + + valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, + Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, + Schema.SCALAR_UNIT_TIME) + + def __init__(self, property_name, property_type, constraint): + super(LessOrEqual, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, self.valid_types): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "less_or_equal" ' + 'expects comparable values.'))) + + def _is_valid(self, value): + if value <= self.constraint_value: + return True + + return False + + def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' + 'less than or equal to "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + cvalue=self.constraint_value_msg)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java new file mode 100644 index 0000000..952861d --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java @@ -0,0 +1,121 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.Arrays; +import java.util.Date; + +public class LessThan extends Constraint { + + @Override + protected void setValues() { + + setConstraintKey(LESS_THAN); + // timestamps are loaded as Date objects + addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); + //validTypes.add("datetime.date"); + //validTypes.add("datetime.time"); + //validTypes.add("datetime.datetime"); + + + validPropTypes.add(Schema.INTEGER); + validPropTypes.add(Schema.FLOAT); + validPropTypes.add(Schema.TIMESTAMP); + validPropTypes.add(Schema.SCALAR_UNIT_SIZE); + validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); + validPropTypes.add(Schema.SCALAR_UNIT_TIME); + + } + + public LessThan(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE111", "InvalidSchemaError: The property \"less_than\" expects comparable values")); + } + } + + @Override + protected boolean isValid(Object value) { + + // timestamps + if (value instanceof Date) { + if (constraintValue instanceof Date) { + return ((Date) value).before((Date) constraintValue); + } + return false; + } + + Double n1 = new Double(value.toString()); + Double n2 = new Double(constraintValue.toString()); + return n1 < n2; + } + + @Override + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" must be less than \"%s\"", + valueMsg, propertyName, constraintValueMsg); + } + +} + +/*python + +class LessThan(Constraint): +"""Constraint class for "less_than" + +Constrains a property or parameter to a value less than ('<') +the value declared. +""" + +constraint_key = Constraint.LESS_THAN + +valid_types = (int, float, datetime.date, + datetime.time, datetime.datetime) + +valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, + Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, + Schema.SCALAR_UNIT_TIME) + +def __init__(self, property_name, property_type, constraint): + super(LessThan, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, self.valid_types): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "less_than" ' + 'expects comparable values.'))) + +def _is_valid(self, value): + if value < self.constraint_value: + return True + + return False + +def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' + 'less than "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=self.value_msg, + cvalue=self.constraint_value_msg)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java new file mode 100644 index 0000000..9068b65 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java @@ -0,0 +1,110 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.Collections; +import java.util.LinkedHashMap; + +public class MaxLength extends Constraint { + // Constraint class for "min_length" + + // Constrains the property or parameter to a value of a maximum length. + + @Override + protected void setValues() { + + setConstraintKey(MAX_LENGTH); + + addValidTypes(Collections.singletonList("Integer")); + + + validPropTypes.add(Schema.STRING); + validPropTypes.add(Schema.MAP); + + } + + public MaxLength(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE112", "InvalidSchemaError: The property \"max_length\" expects an integer")); + } + } + + @SuppressWarnings("unchecked") + @Override + protected boolean isValid(Object value) { + if (value instanceof String && constraintValue instanceof Integer + && ((String) value).length() <= (Integer) constraintValue) { + return true; + } else { + return value instanceof LinkedHashMap && constraintValue instanceof Integer + && ((LinkedHashMap) value).size() <= (Integer) constraintValue; + } + } + + @Override + protected String errMsg(Object value) { + return String.format("Length of value \"%s\" of property \"%s\" must be no greater than \"%s\"", + value.toString(), propertyName, constraintValue.toString()); + } + +} + +/*python + +class MaxLength(Constraint): + """Constraint class for "max_length" + + Constrains the property or parameter to a value to a maximum length. + """ + + constraint_key = Constraint.MAX_LENGTH + + valid_types = (int, ) + + valid_prop_types = (Schema.STRING, Schema.MAP) + + def __init__(self, property_name, property_type, constraint): + super(MaxLength, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, self.valid_types): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "max_length" ' + 'expects an integer.'))) + + def _is_valid(self, value): + if ((isinstance(value, str) or isinstance(value, dict)) and + len(value) <= self.constraint_value): + return True + + return False + + def _err_msg(self, value): + return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' + 'must be no greater than "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=value, + cvalue=self.constraint_value)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java new file mode 100644 index 0000000..eb1d870 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java @@ -0,0 +1,109 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.Collections; +import java.util.LinkedHashMap; + +public class MinLength extends Constraint { + // Constraint class for "min_length" + + // Constrains the property or parameter to a value of a minimum length. + + @Override + protected void setValues() { + + setConstraintKey(MIN_LENGTH); + + addValidTypes(Collections.singletonList("Integer")); + + validPropTypes.add(Schema.STRING); + validPropTypes.add(Schema.MAP); + + } + + public MinLength(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE113", "InvalidSchemaError: The property \"min_length\" expects an integer")); + } + } + + @SuppressWarnings("unchecked") + @Override + protected boolean isValid(Object value) { + if (value instanceof String && constraintValue instanceof Integer + && ((String) value).length() >= (Integer) constraintValue) { + return true; + } else { + return value instanceof LinkedHashMap && constraintValue instanceof Integer + && ((LinkedHashMap) value).size() >= (Integer) constraintValue; + } + } + + @Override + protected String errMsg(Object value) { + return String.format("Length of value \"%s\" of property \"%s\" must be at least \"%s\"", + value.toString(), propertyName, constraintValue.toString()); + } + +} + +/*python + +class MinLength(Constraint): + """Constraint class for "min_length" + + Constrains the property or parameter to a value to a minimum length. + """ + + constraint_key = Constraint.MIN_LENGTH + + valid_types = (int, ) + + valid_prop_types = (Schema.STRING, Schema.MAP) + + def __init__(self, property_name, property_type, constraint): + super(MinLength, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, self.valid_types): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "min_length" ' + 'expects an integer.'))) + + def _is_valid(self, value): + if ((isinstance(value, str) or isinstance(value, dict)) and + len(value) >= self.constraint_value): + return True + + return False + + def _err_msg(self, value): + return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' + 'must be at least "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=value, + cvalue=self.constraint_value)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java new file mode 100644 index 0000000..913e922 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java @@ -0,0 +1,116 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements.constraints; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.Collections; +import java.util.regex.Matcher; +import java.util.regex.PatternSyntaxException; + +public class Pattern extends Constraint { + + @Override + protected void setValues() { + + setConstraintKey(PATTERN); + + addValidTypes(Collections.singletonList("String")); + + validPropTypes.add(Schema.STRING); + + } + + + public Pattern(String name, String type, Object c) { + super(name, type, c); + + if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE114", "InvalidSchemaError: The property \"pattern\" expects a string")); + } + } + + @Override + protected boolean isValid(Object value) { + try { + if (!(value instanceof String)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE115", String.format("ValueError: Input value \"%s\" to \"pattern\" property \"%s\" must be a string", + value.toString(), propertyName))); + return false; + } + String strp = constraintValue.toString(); + String strm = value.toString(); + java.util.regex.Pattern pattern = java.util.regex.Pattern.compile(strp); + Matcher matcher = pattern.matcher(strm); + if (matcher.find() && matcher.end() == strm.length()) { + return true; + } + return false; + } catch (PatternSyntaxException pse) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE116", String.format("ValueError: Invalid regex \"%s\" in \"pattern\" property \"%s\"", + constraintValue.toString(), propertyName))); + return false; + } + } + + @Override + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" does not match the pattern \"%s\"", + value.toString(), propertyName, constraintValue.toString()); + } + +} + +/*python + +class Pattern(Constraint): + """Constraint class for "pattern" + + Constrains the property or parameter to a value that is allowed by + the provided regular expression. + """ + + constraint_key = Constraint.PATTERN + + valid_types = (str, ) + + valid_prop_types = (Schema.STRING, ) + + def __init__(self, property_name, property_type, constraint): + super(Pattern, self).__init__(property_name, property_type, constraint) + if not isinstance(self.constraint_value, self.valid_types): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "pattern" ' + 'expects a string.'))) + self.match = re.compile(self.constraint_value).match + + def _is_valid(self, value): + match = self.match(value) + return match is not None and match.end() == len(value) + + def _err_msg(self, value): + return (_('The value "%(pvalue)s" of property "%(pname)s" does not ' + 'match pattern "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=value, + cvalue=self.constraint_value)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java new file mode 100644 index 0000000..15ec597 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java @@ -0,0 +1,309 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements.constraints; + +import com.google.common.collect.ImmutableMap; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.enums.FileSize; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.Map; + + +public class Schema { + + private static final String TYPE = "type"; + private static final String REQUIRED = "required"; + private static final String DESCRIPTION = "description"; + private static final String DEFAULT = "default"; + private static final String CONSTRAINTS = "constraints"; + private static final String STATUS = "status"; + private static final String ENTRYSCHEMA = "entry_schema"; + private static final String[] KEYS = { + TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS}; + + public static final String INTEGER = "integer"; + public static final String STRING = "string"; + public static final String BOOLEAN = "boolean"; + public static final String FLOAT = "float"; + public static final String RANGE = "range"; + public static final String NUMBER = "number"; + public static final String TIMESTAMP = "timestamp"; + public static final String LIST = "list"; + public static final String MAP = "map"; + public static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; + public static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; + public static final String SCALAR_UNIT_TIME = "scalar-unit.time"; + public static final String VERSION = "version"; + public static final String PORTDEF = "PortDef"; + public static final String PORTSPEC = "PortSpec"; //??? PortSpec.SHORTNAME + public static final String JSON = "json"; + + public static final String[] PROPERTY_TYPES = { + INTEGER, STRING, BOOLEAN, FLOAT, RANGE, NUMBER, TIMESTAMP, LIST, MAP, + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, + VERSION, PORTDEF, PORTSPEC, JSON}; + + public static final String[] SIMPLE_PROPERTY_TYPES = { + INTEGER, STRING, BOOLEAN, FLOAT, RANGE, NUMBER, TIMESTAMP, + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, + VERSION}; + + @SuppressWarnings("unused") + private static final String SCALAR_UNIT_SIZE_DEFAULT = "B"; + + private static Map scalarUnitSizeDict = ImmutableMap.builder() + .put("B", FileSize.B) + .put("KB", FileSize.KB) + .put("MB", FileSize.MB) + .put("GB", FileSize.GB) + .put("TB", FileSize.TB) + .put("KIB", FileSize.KIB) + .put("MIB", FileSize.MIB) + .put("GIB", FileSize.GIB) + .put("TIB", FileSize.TIB) + .build(); + + + private String name; + private LinkedHashMap schema; + private int len; + private ArrayList constraintsList; + + + public Schema(String name, LinkedHashMap schemaDict) { + this.name = name; + + if (!(schemaDict instanceof LinkedHashMap)) { + //msg = (_('Schema definition of "%(pname)s" must be a dict.') + // % dict(pname=name)) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE117", String.format( + "InvalidSchemaError: Schema definition of \"%s\" must be a dict", this.name))); + } + + if (schemaDict.get("type") == null) { + //msg = (_('Schema definition of "%(pname)s" must have a "type" ' + // 'attribute.') % dict(pname=name)) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE118", String.format( + "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute", this.name))); + } + + schema = schemaDict; + len = 0; //??? None + constraintsList = new ArrayList<>(); + } + + public String getType() { + return (String) schema.get(TYPE); + } + + public boolean isRequired() { + return (boolean) schema.getOrDefault(REQUIRED, true); + } + + public String getDescription() { + return (String) schema.getOrDefault(DESCRIPTION, ""); + } + + public Object getDefault() { + return schema.get(DEFAULT); + } + + public String getStatus() { + return (String) schema.getOrDefault(STATUS, ""); + } + + public static boolean isRequestedTypeSimple(String type) { + return Arrays.asList(SIMPLE_PROPERTY_TYPES).contains(type); + } + + @SuppressWarnings("unchecked") + public ArrayList getConstraints() { + if (constraintsList.size() == 0) { + Object cob = schema.get(CONSTRAINTS); + if (cob instanceof ArrayList) { + ArrayList constraintSchemata = (ArrayList) cob; + for (Object ob : constraintSchemata) { + if (ob instanceof LinkedHashMap) { + for (String cClass : ((LinkedHashMap) ob).keySet()) { + Constraint c = Constraint.factory(cClass, name, getType(), ob); + if (c != null) { + constraintsList.add(c); + } else { + // error + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE119", String.format( + "UnknownFieldError: Constraint type \"%s\" for property \"%s\" is not supported", + cClass, name))); + } + break; + } + } + } + } + } + return constraintsList; + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getEntrySchema() { + return (LinkedHashMap) schema.get(ENTRYSCHEMA); + } + + // Python intrinsic methods... + + // substitute for __getitem__ (aka self[key]) + public Object getItem(String key) { + return schema.get(key); + } + + /* + def __iter__(self): + for k in self.KEYS: + try: + self.schema[k] + except KeyError: + pass + else: + yield k + */ + + // substitute for __len__ (aka self.len()) + public int getLen() { + int len = 0; + for (String k : KEYS) { + if (schema.get(k) != null) { + len++; + } + this.len = len; + } + return this.len; + } + + // getter + public LinkedHashMap getSchema() { + return schema; + } + +} + +/*python + +class Schema(collections.Mapping): + +KEYS = ( + TYPE, REQUIRED, DESCRIPTION, + DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS +) = ( + 'type', 'required', 'description', + 'default', 'constraints', 'entry_schema', 'status' +) + +PROPERTY_TYPES = ( + INTEGER, STRING, BOOLEAN, FLOAT, RANGE, + NUMBER, TIMESTAMP, LIST, MAP, + SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, + VERSION, PORTDEF, PORTSPEC +) = ( + 'integer', 'string', 'boolean', 'float', 'range', + 'number', 'timestamp', 'list', 'map', + 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time', + 'version', 'PortDef', PortSpec.SHORTNAME +) + +SCALAR_UNIT_SIZE_DEFAULT = 'B' +scalarUnitSizeDict = {'B': 1, 'KB': 1000, 'KIB': 1024, 'MB': 1000000, + 'MIB': 1048576, 'GB': 1000000000, + 'GIB': 1073741824, 'TB': 1000000000000, + 'TIB': 1099511627776} + +def __init__(self, name, schema_dict): + self.name = name + if not isinstance(schema_dict, collections.Mapping): + msg = (_('Schema definition of "%(pname)s" must be a dict.') + % dict(pname=name)) + ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) + + try: + schema_dict['type'] + except KeyError: + msg = (_('Schema definition of "%(pname)s" must have a "type" ' + 'attribute.') % dict(pname=name)) + ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) + + self.schema = schema_dict + self.len = None + self.constraints_list = [] + +@property +def type(self): + return self.schema[self.TYPE] + +@property +def required(self): + return self.schema.get(self.REQUIRED, True) + +@property +def description(self): + return self.schema.get(self.DESCRIPTION, '') + +@property +def default(self): + return self.schema.get(self.DEFAULT) + +@property +def status(self): + return self.schema.get(self.STATUS, '') + +@property +def constraints(self): + if not self.constraints_list: + constraint_schemata = self.schema.get(self.CONSTRAINTS) + if constraint_schemata: + self.constraints_list = [Constraint(self.name, + self.type, + cschema) + for cschema in constraint_schemata] + return self.constraints_list + +@property +def entry_schema(self): + return self.schema.get(self.ENTRYSCHEMA) + +def __getitem__(self, key): + return self.schema[key] + +def __iter__(self): + for k in self.KEYS: + try: + self.schema[k] + except KeyError: + pass + else: + yield k + +def __len__(self): + if self.len is None: + self.len = len(list(iter(self))) + return self.len +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java new file mode 100644 index 0000000..c3a192d --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java @@ -0,0 +1,99 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements.constraints; + +import java.util.ArrayList; +import java.util.Collections; + +public class ValidValues extends Constraint { + + + protected void setValues() { + setConstraintKey(VALID_VALUES); + Collections.addAll(validPropTypes, Schema.PROPERTY_TYPES); + } + + + public ValidValues(String name, String type, Object c) { + super(name, type, c); + } + + @SuppressWarnings("unchecked") + protected boolean isValid(Object val) { + if (!(constraintValue instanceof ArrayList)) { + return false; + } + if (val instanceof ArrayList) { + boolean bAll = true; + for (Object v : (ArrayList) val) { + if (!((ArrayList) constraintValue).contains(v)) { + bAll = false; + break; + } + } + return bAll; + } + return ((ArrayList) constraintValue).contains(val); + } + + protected String errMsg(Object value) { + return String.format("The value \"%s\" of property \"%s\" is not valid. Expected a value from \"%s\"", + value.toString(), propertyName, constraintValue.toString()); + } + +} + +/*python + +class ValidValues(Constraint): +"""Constraint class for "valid_values" + +Constrains a property or parameter to a value that is in the list of +declared values. +""" +constraint_key = Constraint.VALID_VALUES + +valid_prop_types = Schema.PROPERTY_TYPES + +def __init__(self, property_name, property_type, constraint): + super(ValidValues, self).__init__(property_name, property_type, + constraint) + if not isinstance(self.constraint_value, collections.Sequence): + ValidationIsshueCollector.appendException( + InvalidSchemaError(message=_('The property "valid_values" ' + 'expects a list.'))) + +def _is_valid(self, value): + print '*** payton parser validating ',value,' in ',self.constraint_value#GGG + if isinstance(value, list): + return all(v in self.constraint_value for v in value) + return value in self.constraint_value + +def _err_msg(self, value): + allowed = '[%s]' % ', '.join(str(a) for a in self.constraint_value) + return (_('The value "%(pvalue)s" of property "%(pname)s" is not ' + 'valid. Expected a value from "%(cvalue)s".') % + dict(pname=self.property_name, + pvalue=value, + cvalue=allowed)) + + +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java new file mode 100644 index 0000000..b07f7fa --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java @@ -0,0 +1,32 @@ +/* +============LICENSE_START======================================================= + SDC + ================================================================================ + Copyright (C) 2019 Nokia. All rights reserved. + ================================================================================ + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + ============LICENSE_END========================================================= +*/ +package org.onap.sdc.toscaparser.api.elements.enums; + +public class FileSize { + public static final long B = 1L; + public static final long KB = 1000L; + public static final long MB = 1000000L; + public static final long GB = 1000000000L; + public static final long TB = 1000000000000L; + public static final long KIB = 1000L; + public static final long MIB = 1048576L; + public static final long GIB = 1073741824L; + public static final long TIB = 1099511627776L; +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java new file mode 100644 index 0000000..ac0d837 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java @@ -0,0 +1,40 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements.enums; + +public enum ToscaElementNames { + + TYPE("type"), + PROPERTIES("properties"), + ANNOTATIONS("annotations"), + SOURCE_TYPE("source_type"); + + private String name; + + ToscaElementNames(String name) { + this.name = name; + } + + public String getName() { + return name; + } + +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java new file mode 100644 index 0000000..5fbfca0 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java @@ -0,0 +1,204 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.extensions; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.reflections.Reflections; +import org.reflections.scanners.ResourcesScanner; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class ExtTools { + + private static Logger log = LoggerFactory.getLogger(ExtTools.class.getName()); + + private static LinkedHashMap extensionInfo = new LinkedHashMap<>(); + + public ExtTools() { + extensionInfo = loadExtensions(); + } + + private LinkedHashMap loadExtensions() { + + LinkedHashMap extensions = new LinkedHashMap<>(); + + Reflections reflections = new Reflections("extensions", new ResourcesScanner()); + Set resourcePaths = reflections.getResources(Pattern.compile(".*\\.py$")); + + for (String resourcePath : resourcePaths) { + try (InputStream is = ExtTools.class.getClassLoader().getResourceAsStream(resourcePath); + InputStreamReader isr = new InputStreamReader(is, Charset.forName("UTF-8")); + BufferedReader br = new BufferedReader(isr);) { + String version = null; + ArrayList sections = null; + String defsFile = null; + String line; + + Pattern pattern = Pattern.compile("^([^#]\\S+)\\s*=\\s*(\\S.*)$"); + while ((line = br.readLine()) != null) { + line = line.replace("'", "\""); + Matcher matcher = pattern.matcher(line); + if (matcher.find()) { + if (matcher.group(1).equals("VERSION")) { + version = matcher.group(2); + if (version.startsWith("'") || version.startsWith("\"")) { + version = version.substring(1, version.length() - 1); + } + } else if (matcher.group(1).equals("DEFS_FILE")) { + String fn = matcher.group(2); + if (fn.startsWith("'") || fn.startsWith("\"")) { + fn = fn.substring(1, fn.length() - 1); + } + defsFile = resourcePath.replaceFirst("\\w*.py$", fn); + } else if (matcher.group(1).equals("SECTIONS")) { + sections = new ArrayList<>(); + Pattern secpat = Pattern.compile("\"([^\"]+)\""); + Matcher secmat = secpat.matcher(matcher.group(2)); + while (secmat.find()) { + sections.add(secmat.group(1)); + } + } + } + } + + if (version != null && defsFile != null) { + LinkedHashMap ext = new LinkedHashMap<>(); + ext.put("defs_file", defsFile); + if (sections != null) { + ext.put("sections", sections); + } + extensions.put(version, ext); + } + } catch (Exception e) { + log.error("ExtTools - loadExtensions - {}", e); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue( + "JE281", "Failed to load extensions" + e.getMessage())); + } + } + return extensions; + } + + public ArrayList getVersions() { + return new ArrayList(extensionInfo.keySet()); + } + + public LinkedHashMap> getSections() { + LinkedHashMap> sections = new LinkedHashMap<>(); + for (String version : extensionInfo.keySet()) { + LinkedHashMap eiv = (LinkedHashMap) extensionInfo.get(version); + sections.put(version, (ArrayList) eiv.get("sections")); + } + return sections; + } + + public String getDefsFile(String version) { + LinkedHashMap eiv = (LinkedHashMap) extensionInfo.get(version); + return (String) eiv.get("defs_file"); + } + +} + +/*python + +from toscaparser.common.exception import ToscaExtAttributeError +from toscaparser.common.exception import ToscaExtImportError + +log = logging.getLogger("tosca.model") + +REQUIRED_ATTRIBUTES = ['VERSION', 'DEFS_FILE'] + + +class ExtTools(object): + def __init__(self): + self.extensionInfo = self._load_extensions() + + def _load_extensions(self): + '''Dynamically load all the extensions .''' + extensions = {} + + # Use the absolute path of the class path + abs_path = os.path.dirname(os.path.abspath(__file__)) + + extdirs = [e for e in os.listdir(abs_path) if + not e.startswith('tests') and + os.path.isdir(os.path.join(abs_path, e))] + + for e in extdirs: + log.info(e) + extpath = abs_path + '/' + e + # Grab all the extension files in the given path + ext_files = [f for f in os.listdir(extpath) if f.endswith('.py') + and not f.startswith('__init__')] + + # For each module, pick out the target translation class + for f in ext_files: + log.info(f) + ext_name = 'toscaparser/extensions/' + e + '/' + f.strip('.py') + ext_name = ext_name.replace('/', '.') + try: + extinfo = importlib.import_module(ext_name) + version = getattr(extinfo, 'VERSION') + defs_file = extpath + '/' + getattr(extinfo, 'DEFS_FILE') + + # Sections is an optional attribute + sections = getattr(extinfo, 'SECTIONS', ()) + + extensions[version] = {'sections': sections, + 'defs_file': defs_file} + except ImportError: + raise ToscaExtImportError(ext_name=ext_name) + except AttributeError: + attrs = ', '.join(REQUIRED_ATTRIBUTES) + raise ToscaExtAttributeError(ext_name=ext_name, + attrs=attrs) + + print 'Extensions ',extensions#GGG + return extensions + + def get_versions(self): + return self.extensionInfo.keys() + + def get_sections(self): + sections = {} + for version in self.extensionInfo.keys(): + sections[version] = self.extensionInfo[version]['sections'] + + return sections + + def get_defs_file(self, version): + versiondata = self.extensionInfo.get(version) + + if versiondata: + return versiondata.get('defs_file') + else: + return None +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java new file mode 100644 index 0000000..4ebeba9 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java @@ -0,0 +1,97 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.functions; + +import org.onap.sdc.toscaparser.api.TopologyTemplate; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; + +public class Concat extends Function { + // Validate the function and provide an instance of the function + + // Concatenation of values are supposed to be produced at runtime and + // therefore its the responsibility of the TOSCA engine to implement the + // evaluation of Concat functions. + + // Arguments: + + // * List of strings that needs to be concatenated + + // Example: + + // [ 'http://', + // get_attribute: [ server, public_address ], + // ':' , + // get_attribute: [ server, port ] ] + + + public Concat(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); + } + + @Override + public Object result() { + return this; + } + + @Override + void validate() { + if (args.size() < 1) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE145", + "ValueError: Invalid arguments for function \"concat\". " + + "Expected at least one argument")); + } + } + +} + +/*python + +class Concat(Function): +"""Validate the function and provide an instance of the function + +Concatenation of values are supposed to be produced at runtime and +therefore its the responsibility of the TOSCA engine to implement the +evaluation of Concat functions. + +Arguments: + +* List of strings that needs to be concatenated + +Example: + + [ 'http://', + get_attribute: [ server, public_address ], + ':' , + get_attribute: [ server, port ] ] +""" + +def validate(self): + if len(self.args) < 1: + ValidationIsshueCollector.appendException( + ValueError(_('Invalid arguments for function "{0}". Expected ' + 'at least one arguments.').format(CONCAT))) + +def result(self): + return self +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java new file mode 100644 index 0000000..711a7ca --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java @@ -0,0 +1,259 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.functions; + + +import org.onap.sdc.toscaparser.api.TopologyTemplate; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + +public abstract class Function { + + protected static final String GET_PROPERTY = "get_property"; + protected static final String GET_ATTRIBUTE = "get_attribute"; + protected static final String GET_INPUT = "get_input"; + protected static final String GET_OPERATION_OUTPUT = "get_operation_output"; + protected static final String CONCAT = "concat"; + protected static final String TOKEN = "token"; + + protected static final String SELF = "SELF"; + protected static final String HOST = "HOST"; + protected static final String TARGET = "TARGET"; + protected static final String SOURCE = "SOURCE"; + + protected static final String HOSTED_ON = "tosca.relationships.HostedOn"; + + protected static HashMap functionMappings = _getFunctionMappings(); + + private static HashMap _getFunctionMappings() { + HashMap map = new HashMap<>(); + map.put(GET_PROPERTY, "GetProperty"); + map.put(GET_INPUT, "GetInput"); + map.put(GET_ATTRIBUTE, "GetAttribute"); + map.put(GET_OPERATION_OUTPUT, "GetOperationOutput"); + map.put(CONCAT, "Concat"); + map.put(TOKEN, "Token"); + return map; + } + + protected TopologyTemplate toscaTpl; + protected Object context; + protected String name; + protected ArrayList args; + + + public Function(TopologyTemplate _toscaTpl, Object _context, String _name, ArrayList _args) { + toscaTpl = _toscaTpl; + context = _context; + name = _name; + args = _args; + validate(); + + } + + abstract Object result(); + + abstract void validate(); + + @SuppressWarnings("unchecked") + public static boolean isFunction(Object funcObj) { + // Returns True if the provided function is a Tosca intrinsic function. + // + //Examples: + // + //* "{ get_property: { SELF, port } }" + //* "{ get_input: db_name }" + //* Function instance + + //:param function: Function as string or a Function instance. + //:return: True if function is a Tosca intrinsic function, otherwise False. + // + + if (funcObj instanceof LinkedHashMap) { + LinkedHashMap function = (LinkedHashMap) funcObj; + if (function.size() == 1) { + String funcName = (new ArrayList(function.keySet())).get(0); + return functionMappings.keySet().contains(funcName); + } + } + return (funcObj instanceof Function); + } + + @SuppressWarnings("unchecked") + public static Object getFunction(TopologyTemplate ttpl, Object context, Object rawFunctionObj, boolean resolveGetInput) { + // Gets a Function instance representing the provided template function. + + // If the format provided raw_function format is not relevant for template + // functions or if the function name doesn't exist in function mapping the + // method returns the provided raw_function. + // + // :param tosca_tpl: The tosca template. + // :param node_template: The node template the function is specified for. + // :param raw_function: The raw function as dict. + // :return: Template function as Function instance or the raw_function if + // parsing was unsuccessful. + + + // iterate over leaves of the properties's tree and convert function leaves to function object, + // support List and Map nested, + // assuming that leaf value of function is always map type contains 1 item (e.g. my_leaf: {get_input: xxx}). + + if (rawFunctionObj instanceof LinkedHashMap) { // In map type case + LinkedHashMap rawFunction = ((LinkedHashMap) rawFunctionObj); + if (rawFunction.size() == 1 && + !(rawFunction.values().iterator().next() instanceof LinkedHashMap)) { // End point + return getFunctionForObjectItem(ttpl, context, rawFunction, resolveGetInput); + } else { + return getFunctionForMap(ttpl, context, rawFunction, resolveGetInput); + } + } else if (rawFunctionObj instanceof ArrayList) { // In list type case + return getFunctionForList(ttpl, context, (ArrayList) rawFunctionObj, resolveGetInput); + } + + return rawFunctionObj; + } + + private static Object getFunctionForList(TopologyTemplate ttpl, Object context, ArrayList rawFunctionObj, boolean resolveGetInput) { + // iterate over list properties in recursion, convert leaves to function, + // and collect them in the same hierarchy as the original list. + ArrayList rawFunctionObjList = new ArrayList<>(); + for (Object rawFunctionObjItem : rawFunctionObj) { + rawFunctionObjList.add(getFunction(ttpl, context, rawFunctionObjItem, resolveGetInput)); + } + return rawFunctionObjList; + } + + private static Object getFunctionForMap(TopologyTemplate ttpl, Object context, LinkedHashMap rawFunction, boolean resolveGetInput) { + // iterate over map nested properties in recursion, convert leaves to function, + // and collect them in the same hierarchy as the original map. + LinkedHashMap rawFunctionObjMap = new LinkedHashMap(); + for (Object rawFunctionObjItem : rawFunction.entrySet()) { + Object itemValue = getFunction(ttpl, context, ((Map.Entry) rawFunctionObjItem).getValue(), resolveGetInput); + rawFunctionObjMap.put(((Map.Entry) rawFunctionObjItem).getKey(), itemValue); + } + return rawFunctionObjMap; + } + + private static Object getFunctionForObjectItem(TopologyTemplate ttpl, Object context, Object rawFunctionObjItem, boolean resolveGetInput) { + if (isFunction(rawFunctionObjItem)) { + LinkedHashMap rawFunction = (LinkedHashMap) rawFunctionObjItem; + String funcName = (new ArrayList(rawFunction.keySet())).get(0); + if (functionMappings.keySet().contains(funcName)) { + String funcType = functionMappings.get(funcName); + Object oargs = (new ArrayList(rawFunction.values())).get(0); + ArrayList funcArgs; + if (oargs instanceof ArrayList) { + funcArgs = (ArrayList) oargs; + } else { + funcArgs = new ArrayList<>(); + funcArgs.add(oargs); + } + + switch (funcType) { + case "GetInput": + if (resolveGetInput) { + GetInput input = new GetInput(ttpl, context, funcName, funcArgs); + return input.result(); + } + return new GetInput(ttpl, context, funcName, funcArgs); + case "GetAttribute": + return new GetAttribute(ttpl, context, funcName, funcArgs); + case "GetProperty": + return new GetProperty(ttpl, context, funcName, funcArgs); + case "GetOperationOutput": + return new GetOperationOutput(ttpl, context, funcName, funcArgs); + case "Concat": + return new Concat(ttpl, context, funcName, funcArgs); + case "Token": + return new Token(ttpl, context, funcName, funcArgs); + } + } + } + + return rawFunctionObjItem; + } + + @Override + public String toString() { + String argsStr = args.size() > 1 ? args.toString() : args.get(0).toString(); + return name + ":" + argsStr; + } +} + +/*python + +from toscaparser.common.exception import ValidationIsshueCollector +from toscaparser.common.exception import UnknownInputError +from toscaparser.dataentity import DataEntity +from toscaparser.elements.constraints import Schema +from toscaparser.elements.datatype import DataType +from toscaparser.elements.entity_type import EntityType +from toscaparser.elements.relationshiptype import RelationshipType +from toscaparser.elements.statefulentitytype import StatefulEntityType +from toscaparser.utils.gettextutils import _ + + +GET_PROPERTY = 'get_property' +GET_ATTRIBUTE = 'get_attribute' +GET_INPUT = 'get_input' +GET_OPERATION_OUTPUT = 'get_operation_output' +CONCAT = 'concat' +TOKEN = 'token' + +SELF = 'SELF' +HOST = 'HOST' +TARGET = 'TARGET' +SOURCE = 'SOURCE' + +HOSTED_ON = 'tosca.relationships.HostedOn' + + +@six.add_metaclass(abc.ABCMeta) +class Function(object): + """An abstract type for representing a Tosca template function.""" + + def __init__(self, tosca_tpl, context, name, args): + self.tosca_tpl = tosca_tpl + self.context = context + self.name = name + self.args = args + self.validate() + + @abc.abstractmethod + def result(self): + """Invokes the function and returns its result + + Some methods invocation may only be relevant on runtime (for example, + getting runtime properties) and therefore its the responsibility of + the orchestrator/translator to take care of such functions invocation. + + :return: Function invocation result. + """ + return {self.name: self.args} + + @abc.abstractmethod + def validate(self): + """Validates function arguments.""" + pass +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java new file mode 100644 index 0000000..564d410 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java @@ -0,0 +1,544 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.functions; + +import org.onap.sdc.toscaparser.api.*; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +import org.onap.sdc.toscaparser.api.*; +import org.onap.sdc.toscaparser.api.elements.AttributeDef; +import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef; +import org.onap.sdc.toscaparser.api.elements.DataType; +import org.onap.sdc.toscaparser.api.elements.EntityType; +import org.onap.sdc.toscaparser.api.elements.NodeType; +import org.onap.sdc.toscaparser.api.elements.PropertyDef; +import org.onap.sdc.toscaparser.api.elements.RelationshipType; +import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; +import org.onap.sdc.toscaparser.api.elements.constraints.Schema; + +public class GetAttribute extends Function { + // Get an attribute value of an entity defined in the service template + + // Node template attributes values are set in runtime and therefore its the + // responsibility of the Tosca engine to implement the evaluation of + // get_attribute functions. + + // Arguments: + + // * Node template name | HOST. + // * Attribute name. + + // If the HOST keyword is passed as the node template name argument the + // function will search each node template along the HostedOn relationship + // chain until a node which contains the attribute is found. + + // Examples: + + // * { get_attribute: [ server, private_address ] } + // * { get_attribute: [ HOST, private_address ] } + // * { get_attribute: [ HOST, private_address, 0 ] } + // * { get_attribute: [ HOST, private_address, 0, some_prop] } + + public GetAttribute(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); + } + + @Override + void validate() { + if (args.size() < 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", + "ValueError: Illegal arguments for function \"get_attribute\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); + return; + } else if (args.size() == 2) { + _findNodeTemplateContainingAttribute(); + } else { + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl == null) { + return; + } + int index = 2; + AttributeDef attr = nodeTpl.getTypeDefinition().getAttributeDefValue((String) args.get(1)); + if (attr != null) { + // found + } else { + index = 3; + // then check the req or caps + if (!(args.get(1) instanceof String) || !(args.get(2) instanceof String)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", "ValueError: Illegal arguments for function \"get_attribute\". Expected a String argument")); + } + + attr = _findReqOrCapAttribute(args.get(1).toString(), args.get(2).toString()); + if (attr == null) { + return; + } + } + + + String valueType = (String) attr.getSchema().get("type"); + if (args.size() > index) { + for (Object elem : args.subList(index, args.size())) { + if (valueType.equals("list")) { + if (!(elem instanceof Integer)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE147", String.format( + "ValueError: Illegal arguments for function \"get_attribute\" \"%s\". Expected positive integer argument", + elem.toString()))); + } + Object ob = attr.getSchema().get("entry_schema"); + valueType = (String) + ((LinkedHashMap) ob).get("type"); + } else if (valueType.equals("map")) { + Object ob = attr.getSchema().get("entry_schema"); + valueType = (String) + ((LinkedHashMap) ob).get("type"); + } else { + boolean bFound = false; + for (String p : Schema.PROPERTY_TYPES) { + if (p.equals(valueType)) { + bFound = true; + break; + } + } + if (bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE148", String.format( + "ValueError: 'Illegal arguments for function \"get_attribute\". Unexpected attribute/index value \"%s\"", + elem))); + return; + } else { // It is a complex type + DataType dataType = new DataType(valueType, null); + LinkedHashMap props = + dataType.getAllProperties(); + PropertyDef prop = props.get((String) elem); + if (prop != null) { + valueType = (String) prop.getSchema().get("type"); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE149", String.format( + "KeyError: Illegal arguments for function \"get_attribute\". Attribute name \"%s\" not found in \"%\"", + elem, valueType))); + } + } + } + } + } + } + } + + @Override + public Object result() { + return this; + } + + private NodeTemplate getReferencedNodeTemplate() { + // Gets the NodeTemplate instance the get_attribute function refers to + + // If HOST keyword was used as the node template argument, the node + // template which contains the attribute along the HostedOn relationship + // chain will be returned. + + return _findNodeTemplateContainingAttribute(); + + } + + // Attributes can be explicitly created as part of the type definition + // or a property name can be implicitly used as an attribute name + private NodeTemplate _findNodeTemplateContainingAttribute() { + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl != null && + !_attributeExistsInType(nodeTpl.getTypeDefinition()) && + !nodeTpl.getProperties().keySet().contains(getAttributeName())) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE150", String.format( + "KeyError: Attribute \"%s\" was not found in node template \"%s\"", + getAttributeName(), nodeTpl.getName()))); + } + return nodeTpl; + } + + private boolean _attributeExistsInType(StatefulEntityType typeDefinition) { + LinkedHashMap attrsDef = typeDefinition.getAttributesDef(); + return attrsDef.get(getAttributeName()) != null; + } + + private NodeTemplate _findHostContainingAttribute(String nodeTemplateName) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); + if (nodeTemplate != null) { + LinkedHashMap hostedOnRel = + (LinkedHashMap) EntityType.TOSCA_DEF.get(HOSTED_ON); + for (RequirementAssignment r : nodeTemplate.getRequirements().getAll()) { + String targetName = r.getNodeTemplateName(); + NodeTemplate targetNode = _findNodeTemplate(targetName); + NodeType targetType = (NodeType) targetNode.getTypeDefinition(); + for (CapabilityTypeDef capability : targetType.getCapabilitiesObjects()) { +// if(((ArrayList)hostedOnRel.get("valid_target_types")).contains(capability.getType())) { + if (capability.inheritsFrom((ArrayList) hostedOnRel.get("valid_target_types"))) { + if (_attributeExistsInType(targetType)) { + return targetNode; + } + return _findHostContainingAttribute(targetName); + } + } + } + } + return null; + } + + + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { + if (nodeTemplateName.equals(HOST)) { + // Currently this is the only way to tell whether the function + // is used within the outputs section of the TOSCA template. + if (context instanceof ArrayList) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE151", + "ValueError: \"get_attribute: [ HOST, ... ]\" is not allowed in \"outputs\" section of the TOSCA template")); + return null; + } + NodeTemplate nodeTpl = _findHostContainingAttribute(SELF); + if (nodeTpl == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE152", String.format( + "ValueError: \"get_attribute: [ HOST, ... ]\" was used in " + + "node template \"%s\" but \"%s\" was not found in " + + "the relationship chain", ((NodeTemplate) context).getName(), HOSTED_ON))); + return null; + } + return nodeTpl; + } + if (nodeTemplateName.equals(TARGET)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE153", + "KeyError: \"TARGET\" keyword can only be used in context " + + " to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + if (nodeTemplateName.equals(SOURCE)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE154", + "KeyError: \"SOURCE\" keyword can only be used in context " + + " to \"Relationships\" source node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + String name; + if (nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { + name = ((NodeTemplate) context).getName(); + } else { + name = nodeTemplateName; + } + for (NodeTemplate nt : toscaTpl.getNodeTemplates()) { + if (nt.getName().equals(name)) { + return nt; + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE155", String.format( + "KeyError: Node template \"%s\" was not found", nodeTemplateName))); + return null; + } + + public AttributeDef _findReqOrCapAttribute(String reqOrCap, String attrName) { + + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + // Find attribute in node template's requirements + for (RequirementAssignment r : nodeTpl.getRequirements().getAll()) { + String nodeName = r.getNodeTemplateName(); + if (r.getName().equals(reqOrCap)) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); + return _getCapabilityAttribute(nodeTemplate, r.getName(), attrName); + } + } + // If requirement was not found, look in node template's capabilities + return _getCapabilityAttribute(nodeTpl, reqOrCap, attrName); + } + + private AttributeDef _getCapabilityAttribute(NodeTemplate nodeTemplate, + String capabilityName, + String attrName) { + // Gets a node template capability attribute + CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); + + if (cap != null) { + AttributeDef attribute = null; + LinkedHashMap attrs = + cap.getDefinition().getAttributesDef(); + if (attrs != null && attrs.keySet().contains(attrName)) { + attribute = attrs.get(attrName); + } + if (attribute == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE156", String.format( + "KeyError: Attribute \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + attrName, capabilityName, nodeTemplate.getName(), ((NodeTemplate) context).getName()))); + } + return attribute; + } + String msg = String.format( + "Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", + capabilityName, ((NodeTemplate) context).getName(), nodeTemplate.getName()); + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE157", "KeyError: " + msg)); + return null; + } + + String getNodeTemplateName() { + return (String) args.get(0); + } + + String getAttributeName() { + return (String) args.get(1); + } + +} + +/*python + +class GetAttribute(Function): +"""Get an attribute value of an entity defined in the service template + +Node template attributes values are set in runtime and therefore its the +responsibility of the Tosca engine to implement the evaluation of +get_attribute functions. + +Arguments: + +* Node template name | HOST. +* Attribute name. + +If the HOST keyword is passed as the node template name argument the +function will search each node template along the HostedOn relationship +chain until a node which contains the attribute is found. + +Examples: + +* { get_attribute: [ server, private_address ] } +* { get_attribute: [ HOST, private_address ] } +* { get_attribute: [ HOST, private_address, 0 ] } +* { get_attribute: [ HOST, private_address, 0, some_prop] } +""" + +def validate(self): + if len(self.args) < 2: + ValidationIssueCollector.appendException( + ValueError(_('Illegal arguments for function "{0}". Expected ' + 'arguments: "node-template-name", "req-or-cap"' + '(optional), "property name"' + ).format(GET_ATTRIBUTE))) + return + elif len(self.args) == 2: + self._find_node_template_containing_attribute() + else: + node_tpl = self._find_node_template(self.args[0]) + if node_tpl is None: + return + index = 2 + attrs = node_tpl.type_definition.get_attributes_def() + found = [attrs[self.args[1]]] if self.args[1] in attrs else [] + if found: + attr = found[0] + else: + index = 3 + # then check the req or caps + attr = self._find_req_or_cap_attribute(self.args[1], + self.args[2]) + + value_type = attr.schema['type'] + if len(self.args) > index: + for elem in self.args[index:]: + if value_type == "list": + if not isinstance(elem, int): + ValidationIssueCollector.appendException( + ValueError(_('Illegal arguments for function' + ' "{0}". "{1}" Expected positive' + ' integer argument' + ).format(GET_ATTRIBUTE, elem))) + value_type = attr.schema['entry_schema']['type'] + elif value_type == "map": + value_type = attr.schema['entry_schema']['type'] + elif value_type in Schema.PROPERTY_TYPES: + ValidationIssueCollector.appendException( + ValueError(_('Illegal arguments for function' + ' "{0}". Unexpected attribute/' + 'index value "{1}"' + ).format(GET_ATTRIBUTE, elem))) + return + else: # It is a complex type + data_type = DataType(value_type) + props = data_type.get_all_properties() + found = [props[elem]] if elem in props else [] + if found: + prop = found[0] + value_type = prop.schema['type'] + else: + ValidationIssueCollector.appendException( + KeyError(_('Illegal arguments for function' + ' "{0}". Attribute name "{1}" not' + ' found in "{2}"' + ).format(GET_ATTRIBUTE, + elem, + value_type))) + +def result(self): + return self + +def get_referenced_node_template(self): + """Gets the NodeTemplate instance the get_attribute function refers to. + + If HOST keyword was used as the node template argument, the node + template which contains the attribute along the HostedOn relationship + chain will be returned. + """ + return self._find_node_template_containing_attribute() + +# Attributes can be explicitly created as part of the type definition +# or a property name can be implicitly used as an attribute name +def _find_node_template_containing_attribute(self): + node_tpl = self._find_node_template(self.args[0]) + if node_tpl and \ + not self._attribute_exists_in_type(node_tpl.type_definition) \ + and self.attribute_name not in node_tpl.get_properties(): + ValidationIssueCollector.appendException( + KeyError(_('Attribute "%(att)s" was not found in node ' + 'template "%(ntpl)s".') % + {'att': self.attribute_name, + 'ntpl': node_tpl.name})) + return node_tpl + +def _attribute_exists_in_type(self, type_definition): + attrs_def = type_definition.get_attributes_def() + found = [attrs_def[self.attribute_name]] \ + if self.attribute_name in attrs_def else [] + return len(found) == 1 + +def _find_host_containing_attribute(self, node_template_name=SELF): + node_template = self._find_node_template(node_template_name) + if node_template: + hosted_on_rel = EntityType.TOSCA_DEF[HOSTED_ON] + for r in node_template.requirements: + for requirement, target_name in r.items(): + target_node = self._find_node_template(target_name) + target_type = target_node.type_definition + for capability in target_type.get_capabilities_objects(): + if capability.type in \ + hosted_on_rel['valid_target_types']: + if self._attribute_exists_in_type(target_type): + return target_node + return self._find_host_containing_attribute( + target_name) + +def _find_node_template(self, node_template_name): + if node_template_name == HOST: + # Currently this is the only way to tell whether the function + # is used within the outputs section of the TOSCA template. + if isinstance(self.context, list): + ValidationIssueCollector.appendException( + ValueError(_( + '"get_attribute: [ HOST, ... ]" is not allowed in ' + '"outputs" section of the TOSCA template.'))) + return + node_tpl = self._find_host_containing_attribute() + if not node_tpl: + ValidationIssueCollector.appendException( + ValueError(_( + '"get_attribute: [ HOST, ... ]" was used in node ' + 'template "{0}" but "{1}" was not found in ' + 'the relationship chain.').format(self.context.name, + HOSTED_ON))) + return + return node_tpl + if node_template_name == TARGET: + if not isinstance(self.context.type_definition, RelationshipType): + ValidationIssueCollector.appendException( + KeyError(_('"TARGET" keyword can only be used in context' + ' to "Relationships" target node'))) + return + return self.context.target + if node_template_name == SOURCE: + if not isinstance(self.context.type_definition, RelationshipType): + ValidationIssueCollector.appendException( + KeyError(_('"SOURCE" keyword can only be used in context' + ' to "Relationships" source node'))) + return + return self.context.source + name = self.context.name \ + if node_template_name == SELF and \ + not isinstance(self.context, list) \ + else node_template_name + for node_template in self.tosca_tpl.nodetemplates: + if node_template.name == name: + return node_template + ValidationIssueCollector.appendException( + KeyError(_( + 'Node template "{0}" was not found.' + ).format(node_template_name))) + +def _find_req_or_cap_attribute(self, req_or_cap, attr_name): + node_tpl = self._find_node_template(self.args[0]) + # Find attribute in node template's requirements + for r in node_tpl.requirements: + for req, node_name in r.items(): + if req == req_or_cap: + node_template = self._find_node_template(node_name) + return self._get_capability_attribute( + node_template, + req, + attr_name) + # If requirement was not found, look in node template's capabilities + return self._get_capability_attribute(node_tpl, + req_or_cap, + attr_name) + +def _get_capability_attribute(self, + node_template, + capability_name, + attr_name): + """Gets a node template capability attribute.""" + caps = node_template.get_capabilities() + if caps and capability_name in caps.keys(): + cap = caps[capability_name] + attribute = None + attrs = cap.definition.get_attributes_def() + if attrs and attr_name in attrs.keys(): + attribute = attrs[attr_name] + if not attribute: + ValidationIssueCollector.appendException( + KeyError(_('Attribute "%(attr)s" was not found in ' + 'capability "%(cap)s" of node template ' + '"%(ntpl1)s" referenced from node template ' + '"%(ntpl2)s".') % {'attr': attr_name, + 'cap': capability_name, + 'ntpl1': node_template.name, + 'ntpl2': self.context.name})) + return attribute + msg = _('Requirement/CapabilityAssignment "{0}" referenced from node template ' + '"{1}" was not found in node template "{2}".').format( + capability_name, + self.context.name, + node_template.name) + ValidationIssueCollector.appendException(KeyError(msg)) + +@property +def node_template_name(self): + return self.args[0] + +@property +def attribute_name(self): + return self.args[1] +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java new file mode 100644 index 0000000..ee5be17 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java @@ -0,0 +1,203 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (c) 2017 AT&T Intellectual Property. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * Modifications copyright (c) 2019 Fujitsu Limited. + * ================================================================================ + */ +package org.onap.sdc.toscaparser.api.functions; + +import org.onap.sdc.toscaparser.api.DataEntity; +import org.onap.sdc.toscaparser.api.TopologyTemplate; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.parameters.Input; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +public class GetInput extends Function { + + public static final String INDEX = "INDEX"; + public static final String INPUTS = "inputs"; + public static final String TYPE = "type"; + public static final String PROPERTIES = "properties"; + public static final String ENTRY_SCHEMA = "entry_schema"; + + public GetInput(TopologyTemplate toscaTpl, Object context, String name, ArrayList _args) { + super(toscaTpl, context, name, _args); + + } + + @Override + void validate() { + +// if(args.size() != 1) { +// //PA - changed to WARNING from CRITICAL after talking to Renana, 22/05/2017 +// ThreadLocalsHolder.getCollector().appendWarning(String.format( +// "ValueError: Expected one argument for function \"get_input\" but received \"%s\"", +// args.toString())); +// } + boolean bFound = false; + for (Input inp : toscaTpl.getInputs()) { + if (inp.getName().equals(args.get(0))) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE158", String.format( + "UnknownInputError: Unknown input \"%s\"", args.get(0)))); + } else if (args.size() > 2) { + LinkedHashMap inputs = (LinkedHashMap) toscaTpl.getTpl().get(INPUTS); + LinkedHashMap data = (LinkedHashMap) inputs.get(getInputName()); + String type; + + for (int argumentNumber = 1; argumentNumber < args.size(); argumentNumber++) { + String dataTypeName = ""; + bFound = false; + if (INDEX.equals(args.get(argumentNumber).toString()) || (args.get(argumentNumber) instanceof Integer)) { + bFound = true; + } else { + type = (String) data.get(TYPE); + //get type name + if (type.equals("list") || type.equals("map")) { + LinkedHashMap schema = (LinkedHashMap) data.get(ENTRY_SCHEMA); + dataTypeName = (String) schema.get(TYPE); + } else { + dataTypeName = type; + } + //check property name + LinkedHashMap dataType = (LinkedHashMap) toscaTpl.getCustomDefs().get(dataTypeName); + if (dataType != null) { + LinkedHashMap props = (LinkedHashMap) dataType.get(PROPERTIES); + data = (LinkedHashMap) props.get(args.get(argumentNumber).toString()); + if (data != null) { + bFound = true; + } + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE282", String.format( + "UnknownDataType: Unknown data type \"%s\"", args.get(argumentNumber)))); + } + } + } + } + + public Object result() { + if (toscaTpl.getParsedParams() != null && + toscaTpl.getParsedParams().get(getInputName()) != null) { + LinkedHashMap ttinp = (LinkedHashMap) toscaTpl.getTpl().get(INPUTS); + LinkedHashMap ttinpinp = (LinkedHashMap) ttinp.get(getInputName()); + String type = (String) ttinpinp.get("type"); + + Object value = DataEntity.validateDatatype( + type, toscaTpl.getParsedParams().get(getInputName()), null, toscaTpl.getCustomDefs(), null); + //SDC resolving Get Input + if (value instanceof ArrayList) { + if (args.size() == 2 && args.get(1) instanceof Integer && ((ArrayList) value).size() > (Integer) args.get(1)) { + return ((ArrayList) value).get((Integer) args.get(1)); + } + /* commented out for network cloud (SDNC) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE273",String.format( + "GetInputError: cannot resolve input name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0)))); + return null; +*/ + } + return value; + } + + Input inputDef = null; + for (Input inpDef : toscaTpl.getInputs()) { + if (getInputName().equals(inpDef.getName())) { + inputDef = inpDef; + break; + } + } + if (inputDef != null) { + if (args.size() == 2 && inputDef.getDefault() != null && inputDef.getDefault() instanceof ArrayList) { + if (args.get(1) instanceof Integer + && ((ArrayList) inputDef.getDefault()).size() > ((Integer) args.get(1)).intValue()) { + return ((ArrayList) inputDef.getDefault()).get(((Integer) args.get(1)).intValue()); + } +/* + commented out for network cloud (SDNC) + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE274",(String.format( + "GetInputError: cannot resolve input Def name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0))))); + return null; +*/ + } + return inputDef.getDefault(); + } + return null; + } + + public String getInputName() { + return (String) args.get(0); + } + + public LinkedHashMap getEntrySchema() { + LinkedHashMap inputs = (LinkedHashMap) toscaTpl.getTpl().get(INPUTS); + LinkedHashMap inputValue = (LinkedHashMap) inputs.get(getInputName()); + return (LinkedHashMap) inputValue.get(ENTRY_SCHEMA); + } + + public ArrayList getArguments() { + return args; + } +} + +/*python + +class GetInput(Function): +"""Get a property value declared within the input of the service template. + +Arguments: + +* Input name. + +Example: + +* get_input: port +""" + +def validate(self): + if len(self.args) != 1: + ValidationIssueCollector.appendException( + ValueError(_( + 'Expected one argument for function "get_input" but ' + 'received "%s".') % self.args)) + inputs = [input.name for input in self.tosca_tpl.inputs] + if self.args[0] not in inputs: + ValidationIssueCollector.appendException( + UnknownInputError(input_name=self.args[0])) + +def result(self): + if self.tosca_tpl.parsed_params and \ + self.input_name in self.tosca_tpl.parsed_params: + return DataEntity.validate_datatype( + self.tosca_tpl.tpl['inputs'][self.input_name]['type'], + self.tosca_tpl.parsed_params[self.input_name]) + + input = [input_def for input_def in self.tosca_tpl.inputs + if self.input_name == input_def.name][0] + return input.default + +@property +def input_name(self): + return self.args[0] + +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java new file mode 100644 index 0000000..06a28d6 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java @@ -0,0 +1,243 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.functions; + +import org.onap.sdc.toscaparser.api.EntityTemplate; +import org.onap.sdc.toscaparser.api.NodeTemplate; +import org.onap.sdc.toscaparser.api.RelationshipTemplate; +import org.onap.sdc.toscaparser.api.TopologyTemplate; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.InterfacesDef; +import org.onap.sdc.toscaparser.api.elements.RelationshipType; +import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; + + +public class GetOperationOutput extends Function { + + public GetOperationOutput(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); + } + + @Override + public void validate() { + if (args.size() == 4) { + _findNodeTemplate((String) args.get(0)); + String interfaceName = _findInterfaceName((String) args.get(1)); + _findOperationName(interfaceName, (String) args.get(2)); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE159", + "ValueError: Illegal arguments for function \"get_operation_output\". " + + "Expected arguments: \"template_name\",\"interface_name\"," + + "\"operation_name\",\"output_variable_name\"")); + } + } + + private String _findInterfaceName(String _interfaceName) { + boolean bFound = false; + for (String sect : InterfacesDef.SECTIONS) { + if (sect.equals(_interfaceName)) { + bFound = true; + break; + } + } + if (bFound) { + return _interfaceName; + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE160", String.format( + "ValueError: invalid interface name \"%s\" in \"get_operation_output\"", + _interfaceName))); + return null; + } + } + + private String _findOperationName(String interfaceName, String operationName) { + + if (interfaceName.equals("Configure") || + interfaceName.equals("tosca.interfaces.node.relationship.Configure")) { + boolean bFound = false; + for (String sect : StatefulEntityType.INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS) { + if (sect.equals(operationName)) { + bFound = true; + break; + } + } + if (bFound) { + return operationName; + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE161", String.format( + "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", + operationName))); + return null; + } + } + if (interfaceName.equals("Standard") || + interfaceName.equals("tosca.interfaces.node.lifecycle.Standard")) { + boolean bFound = false; + for (String sect : StatefulEntityType.INTERFACE_NODE_LIFECYCLE_OPERATIONS) { + if (sect.equals(operationName)) { + bFound = true; + break; + } + } + if (bFound) { + return operationName; + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE162", String.format( + "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", + operationName))); + return null; + } + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE163", String.format( + "ValueError: Invalid interface name \"%s\" in \"get_operation_output\"", + interfaceName))); + return null; + } + } + + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { + if (nodeTemplateName.equals(TARGET)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE164", + "KeyError: \"TARGET\" keyword can only be used in context " + + " to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + if (nodeTemplateName.equals(SOURCE)) { + if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE165", + "KeyError: \"SOURCE\" keyword can only be used in context " + + " to \"Relationships\" source node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + String name; + if (nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { + name = ((NodeTemplate) context).getName(); + } else { + name = nodeTemplateName; + } + for (NodeTemplate nt : toscaTpl.getNodeTemplates()) { + if (nodeTemplateName.equals(name)) { + return nt; + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE166", String.format( + "KeyError: Node template \"%s\" was not found", nodeTemplateName))); + return null; + } + + @Override + public Object result() { + return this; + } + +} + +/*python + +class GetOperationOutput(Function): +def validate(self): + if len(self.args) == 4: + self._find_node_template(self.args[0]) + interface_name = self._find_interface_name(self.args[1]) + self._find_operation_name(interface_name, self.args[2]) + else: + ValidationIssueCollector.appendException( + ValueError(_('Illegal arguments for function "{0}". Expected ' + 'arguments: "template_name","interface_name",' + '"operation_name","output_variable_name"' + ).format(GET_OPERATION_OUTPUT))) + return + +def _find_interface_name(self, interface_name): + if interface_name in toscaparser.elements.interfaces.SECTIONS: + return interface_name + else: + ValidationIssueCollector.appendException( + ValueError(_('Enter a valid interface name' + ).format(GET_OPERATION_OUTPUT))) + return + +def _find_operation_name(self, interface_name, operation_name): + if(interface_name == 'Configure' or + interface_name == 'tosca.interfaces.node.relationship.Configure'): + if(operation_name in + StatefulEntityType. + interfaces_relationship_configure_operations): + return operation_name + else: + ValidationIssueCollector.appendException( + ValueError(_('Enter an operation of Configure interface' + ).format(GET_OPERATION_OUTPUT))) + return + elif(interface_name == 'Standard' or + interface_name == 'tosca.interfaces.node.lifecycle.Standard'): + if(operation_name in + StatefulEntityType.interfaces_node_lifecycle_operations): + return operation_name + else: + ValidationIssueCollector.appendException( + ValueError(_('Enter an operation of Standard interface' + ).format(GET_OPERATION_OUTPUT))) + return + else: + ValidationIssueCollector.appendException( + ValueError(_('Enter a valid operation name' + ).format(GET_OPERATION_OUTPUT))) + return + +def _find_node_template(self, node_template_name): + if node_template_name == TARGET: + if not isinstance(self.context.type_definition, RelationshipType): + ValidationIssueCollector.appendException( + KeyError(_('"TARGET" keyword can only be used in context' + ' to "Relationships" target node'))) + return + return self.context.target + if node_template_name == SOURCE: + if not isinstance(self.context.type_definition, RelationshipType): + ValidationIssueCollector.appendException( + KeyError(_('"SOURCE" keyword can only be used in context' + ' to "Relationships" source node'))) + return + return self.context.source + name = self.context.name \ + if node_template_name == SELF and \ + not isinstance(self.context, list) \ + else node_template_name + for node_template in self.tosca_tpl.nodetemplates: + if node_template.name == name: + return node_template + ValidationIssueCollector.appendException( + KeyError(_( + 'Node template "{0}" was not found.' + ).format(node_template_name))) + +def result(self): + return self +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java new file mode 100644 index 0000000..90e0a8e --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java @@ -0,0 +1,639 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.functions; + +import org.onap.sdc.toscaparser.api.CapabilityAssignment; +import org.onap.sdc.toscaparser.api.NodeTemplate; +import org.onap.sdc.toscaparser.api.Property; +import org.onap.sdc.toscaparser.api.RelationshipTemplate; +import org.onap.sdc.toscaparser.api.RequirementAssignment; +import org.onap.sdc.toscaparser.api.TopologyTemplate; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef; +import org.onap.sdc.toscaparser.api.elements.EntityType; +import org.onap.sdc.toscaparser.api.elements.NodeType; +import org.onap.sdc.toscaparser.api.elements.PropertyDef; +import org.onap.sdc.toscaparser.api.elements.RelationshipType; +import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.LinkedHashMap; + +public class GetProperty extends Function { + // Get a property value of an entity defined in the same service template + + // Arguments: + + // * Node template name | SELF | HOST | SOURCE | TARGET. + // * Requirement or capability name (optional). + // * Property name. + + // If requirement or capability name is specified, the behavior is as follows: + // The req or cap name is first looked up in the specified node template's + // requirements. + // If found, it would search for a matching capability + // of an other node template and get its property as specified in function + // arguments. + // Otherwise, the req or cap name would be looked up in the specified + // node template's capabilities and if found, it would return the property of + // the capability as specified in function arguments. + + // Examples: + + // * { get_property: [ mysql_server, port ] } + // * { get_property: [ SELF, db_port ] } + // * { get_property: [ SELF, database_endpoint, port ] } + // * { get_property: [ SELF, database_endpoint, port, 1 ] } + + + public GetProperty(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); + } + + @Override + void validate() { + if (args.size() < 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE167", + "ValueError: Illegal arguments for function \"get_property\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); + return; + } + if (args.size() == 2) { + Property foundProp = _findProperty((String) args.get(1)); + if (foundProp == null) { + return; + } + Object prop = foundProp.getValue(); + if (prop instanceof Function) { + getFunction(toscaTpl, context, prop, toscaTpl.getResolveGetInput()); + } + } else if (args.size() >= 3) { + // do not use _find_property to avoid raise KeyError + // if the prop is not found + // First check if there is property with this name + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + LinkedHashMap props; + if (nodeTpl != null) { + props = nodeTpl.getProperties(); + } else { + props = new LinkedHashMap<>(); + } + int index = 2; + Object propertyValue; + if (props.get(args.get(1)) != null) { + propertyValue = ((Property) props.get(args.get(1))).getValue(); + } else { + index = 3; + // then check the req or caps + propertyValue = _findReqOrCapProperty((String) args.get(1), (String) args.get(2)); + } + + if (args.size() > index) { + for (Object elem : args.subList(index, args.size() - 1)) { + if (propertyValue instanceof ArrayList) { + int intElem = (int) elem; + propertyValue = _getIndexValue(propertyValue, intElem); + } else { + propertyValue = _getAttributeValue(propertyValue, (String) elem); + } + } + } + } + } + + @SuppressWarnings("unchecked") + private Object _findReqOrCapProperty(String reqOrCap, String propertyName) { + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl == null) { + return null; + } + // look for property in node template's requirements + for (RequirementAssignment req : nodeTpl.getRequirements().getAll()) { + String nodeName = req.getNodeTemplateName(); + if (req.getName().equals(reqOrCap)) { + NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); + return _getCapabilityProperty(nodeTemplate, req.getName(), propertyName, true); + } + } + // If requirement was not found, look in node template's capabilities + return _getCapabilityProperty(nodeTpl, reqOrCap, propertyName, true); + } + + private Object _getCapabilityProperty(NodeTemplate nodeTemplate, + String capabilityName, + String propertyName, + boolean throwErrors) { + + // Gets a node template capability property + Object property = null; + CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); + if (cap != null) { + LinkedHashMap props = cap.getProperties(); + if (props != null && props.get(propertyName) != null) { + property = ((Property) props.get(propertyName)).getValue(); + } + if (property == null && throwErrors) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE168", String.format( + "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + propertyName, capabilityName, nodeTemplate.getName(), ((NodeTemplate) context).getName()))); + } + return property; + } + if (throwErrors) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE169", String.format( + "KeyError: Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", + capabilityName, ((NodeTemplate) context).getName(), nodeTemplate.getName()))); + } + + return null; + } + + private Property _findProperty(String propertyName) { + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + if (nodeTpl == null) { + return null; + } + LinkedHashMap props = nodeTpl.getProperties(); + Property found = props.get(propertyName); + if (found == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE170", String.format( + "KeyError: Property \"%s\" was not found in node template \"%s\"", + propertyName, nodeTpl.getName()))); + } + return found; + } + + private NodeTemplate _findNodeTemplate(String nodeTemplateName) { + if (nodeTemplateName.equals(SELF)) { + return (NodeTemplate) context; + } + // enable the HOST value in the function + if (nodeTemplateName.equals(HOST)) { + NodeTemplate node = _findHostContainingProperty(null); + if (node == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE171", String.format( + "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", + (String) args.get(2), (String) args.get(1), ((NodeTemplate) context).getName()))); + return null; + } + return node; + } + if (nodeTemplateName.equals(TARGET)) { + if (!(((RelationshipTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE172", + "KeyError: \"TARGET\" keyword can only be used in context to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate) context).getTarget(); + } + if (nodeTemplateName.equals(SOURCE)) { + if (!(((RelationshipTemplate) context).getTypeDefinition() instanceof RelationshipType)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE173", + "KeyError: \"SOURCE\" keyword can only be used in context to \"Relationships\" target node")); + return null; + } + return ((RelationshipTemplate) context).getSource(); + } + if (toscaTpl.getNodeTemplates() == null) { + return null; + } + for (NodeTemplate nodeTemplate : toscaTpl.getNodeTemplates()) { + if (nodeTemplate.getName().equals(nodeTemplateName)) { + return nodeTemplate; + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE174", String.format( + "KeyError: Node template \"%s\" was not found. Referenced from Node Template \"%s\"", + nodeTemplateName, ((NodeTemplate) context).getName()))); + + return null; + } + + @SuppressWarnings("rawtypes") + private Object _getIndexValue(Object value, int index) { + if (value instanceof ArrayList) { + if (index < ((ArrayList) value).size()) { + return ((ArrayList) value).get(index); + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE175", String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an element with index %d", + args.get(2), args.get(1), ((NodeTemplate) context).getName(), index))); + + } + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE176", String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a list", + args.get(2), args.get(1), ((NodeTemplate) context).getName()))); + } + return null; + } + + @SuppressWarnings("unchecked") + private Object _getAttributeValue(Object value, String attribute) { + if (value instanceof LinkedHashMap) { + Object ov = ((LinkedHashMap) value).get(attribute); + if (ov != null) { + return ov; + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE177", String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an attribute named \"%s\"", + args.get(2), args.get(1), ((NodeTemplate) context).getName(), attribute))); + } + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE178", String.format( + "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a dict", + args.get(2), args.get(1), ((NodeTemplate) context).getName()))); + } + return null; + } + + // Add this functions similar to get_attribute case + private NodeTemplate _findHostContainingProperty(String nodeTemplateName) { + if (nodeTemplateName == null) { + nodeTemplateName = SELF; + } + NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); + LinkedHashMap hostedOnRel = (LinkedHashMap) + EntityType.TOSCA_DEF.get(HOSTED_ON); + for (RequirementAssignment requirement : nodeTemplate.getRequirements().getAll()) { + String targetName = requirement.getNodeTemplateName(); + NodeTemplate targetNode = _findNodeTemplate(targetName); + NodeType targetType = (NodeType) targetNode.getTypeDefinition(); + for (CapabilityTypeDef capDef : targetType.getCapabilitiesObjects()) { + if (capDef.inheritsFrom((ArrayList) hostedOnRel.get("valid_target_types"))) { + if (_propertyExistsInType(targetType)) { + return targetNode; + } + // If requirement was not found, look in node + // template's capabilities + if (args.size() > 2 && + _getCapabilityProperty(targetNode, (String) args.get(1), (String) args.get(2), false) != null) { + return targetNode; + } + + return _findHostContainingProperty(targetName); + } + } + + } + return null; + } + + private boolean _propertyExistsInType(StatefulEntityType typeDefinition) { + LinkedHashMap propsDef = typeDefinition.getPropertiesDef(); + return propsDef.keySet().contains((String) args.get(1)); + } + + @Override + public Object result() { + Object propertyValue; + if (args.size() >= 3) { + // First check if there is property with this name + NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); + LinkedHashMap props; + if (nodeTpl != null) { + props = nodeTpl.getProperties(); + } else { + props = new LinkedHashMap<>(); + } + int index = 2; + if (props.get(args.get(1)) != null) { + propertyValue = ((Property) props.get(args.get(1))).getValue(); + } else { + index = 3; + // then check the req or caps + propertyValue = _findReqOrCapProperty((String) args.get(1), (String) args.get(2)); + } + + if (args.size() > index) { + for (Object elem : args.subList(index, args.size() - 1)) { + if (propertyValue instanceof ArrayList) { + int intElem = (int) elem; + propertyValue = _getIndexValue(propertyValue, intElem); + } else { + propertyValue = _getAttributeValue(propertyValue, (String) elem); + } + } + } + } else { + propertyValue = _findProperty((String) args.get(1)).getValue(); + } + if (propertyValue instanceof Function) { + return ((Function) propertyValue).result(); + } + return getFunction(toscaTpl, context, propertyValue, toscaTpl.getResolveGetInput()); + } + + public String getNodeTemplateName() { + return (String) args.get(0); + } + + public String getPropertyName() { + if (args.size() > 2) { + return (String) args.get(2); + } + return (String) args.get(1); + } + + public String getReqorCap() { + if (args.size() > 2) { + return (String) args.get(1); + } + return null; + } + +} + +/*python + +class GetProperty(Function): +"""Get a property value of an entity defined in the same service template. + +Arguments: + +* Node template name | SELF | HOST | SOURCE | TARGET. +* Requirement or capability name (optional). +* Property name. + +If requirement or capability name is specified, the behavior is as follows: +The req or cap name is first looked up in the specified node template's +requirements. +If found, it would search for a matching capability +of an other node template and get its property as specified in function +arguments. +Otherwise, the req or cap name would be looked up in the specified +node template's capabilities and if found, it would return the property of +the capability as specified in function arguments. + +Examples: + +* { get_property: [ mysql_server, port ] } +* { get_property: [ SELF, db_port ] } +* { get_property: [ SELF, database_endpoint, port ] } +* { get_property: [ SELF, database_endpoint, port, 1 ] } +""" + +def validate(self): + if len(self.args) < 2: + ValidationIssueCollector.appendException( + ValueError(_( + 'Expected arguments: "node-template-name", "req-or-cap" ' + '(optional), "property name".'))) + return + if len(self.args) == 2: + found_prop = self._find_property(self.args[1]) + if not found_prop: + return + prop = found_prop.value + if not isinstance(prop, Function): + get_function(self.tosca_tpl, self.context, prop) + elif len(self.args) >= 3: + # do not use _find_property to avoid raise KeyError + # if the prop is not found + # First check if there is property with this name + node_tpl = self._find_node_template(self.args[0]) + props = node_tpl.get_properties() if node_tpl else [] + index = 2 + found = [props[self.args[1]]] if self.args[1] in props else [] + if found: + property_value = found[0].value + else: + index = 3 + # then check the req or caps + property_value = self._find_req_or_cap_property(self.args[1], + self.args[2]) + if len(self.args) > index: + for elem in self.args[index:]: + if isinstance(property_value, list): + int_elem = int(elem) + property_value = self._get_index_value(property_value, + int_elem) + else: + property_value = self._get_attribute_value( + property_value, + elem) + +def _find_req_or_cap_property(self, req_or_cap, property_name): + node_tpl = self._find_node_template(self.args[0]) + # Find property in node template's requirements + for r in node_tpl.requirements: + for req, node_name in r.items(): + if req == req_or_cap: + node_template = self._find_node_template(node_name) + return self._get_capability_property( + node_template, + req, + property_name) + # If requirement was not found, look in node template's capabilities + return self._get_capability_property(node_tpl, + req_or_cap, + property_name) + +def _get_capability_property(self, + node_template, + capability_name, + property_name): + """Gets a node template capability property.""" + caps = node_template.get_capabilities() + if caps and capability_name in caps.keys(): + cap = caps[capability_name] + property = None + props = cap.get_properties() + if props and property_name in props.keys(): + property = props[property_name].value + if not property: + ValidationIssueCollector.appendException( + KeyError(_('Property "%(prop)s" was not found in ' + 'capability "%(cap)s" of node template ' + '"%(ntpl1)s" referenced from node template ' + '"%(ntpl2)s".') % {'prop': property_name, + 'cap': capability_name, + 'ntpl1': node_template.name, + 'ntpl2': self.context.name})) + return property + msg = _('Requirement/CapabilityAssignment "{0}" referenced from node template ' + '"{1}" was not found in node template "{2}".').format( + capability_name, + self.context.name, + node_template.name) + ValidationIssueCollector.appendException(KeyError(msg)) + +def _find_property(self, property_name): + node_tpl = self._find_node_template(self.args[0]) + if not node_tpl: + return + props = node_tpl.get_properties() + found = [props[property_name]] if property_name in props else [] + if len(found) == 0: + ValidationIssueCollector.appendException( + KeyError(_('Property "%(prop)s" was not found in node ' + 'template "%(ntpl)s".') % + {'prop': property_name, + 'ntpl': node_tpl.name})) + return None + return found[0] + +def _find_node_template(self, node_template_name): + if node_template_name == SELF: + return self.context + # enable the HOST value in the function + if node_template_name == HOST: + return self._find_host_containing_property() + if node_template_name == TARGET: + if not isinstance(self.context.type_definition, RelationshipType): + ValidationIssueCollector.appendException( + KeyError(_('"TARGET" keyword can only be used in context' + ' to "Relationships" target node'))) + return + return self.context.target + if node_template_name == SOURCE: + if not isinstance(self.context.type_definition, RelationshipType): + ValidationIssueCollector.appendException( + KeyError(_('"SOURCE" keyword can only be used in context' + ' to "Relationships" source node'))) + return + return self.context.source + if not hasattr(self.tosca_tpl, 'nodetemplates'): + return + for node_template in self.tosca_tpl.nodetemplates: + if node_template.name == node_template_name: + return node_template + ValidationIssueCollector.appendException( + KeyError(_( + 'Node template "{0}" was not found.' + ).format(node_template_name))) + +def _get_index_value(self, value, index): + if isinstance(value, list): + if index < len(value): + return value[index] + else: + ValidationIssueCollector.appendException( + KeyError(_( + "Property '{0}' found in capability '{1}'" + " referenced from node template {2}" + " must have an element with index {3}."). + format(self.args[2], + self.args[1], + self.context.name, + index))) + else: + ValidationIssueCollector.appendException( + KeyError(_( + "Property '{0}' found in capability '{1}'" + " referenced from node template {2}" + " must be a list.").format(self.args[2], + self.args[1], + self.context.name))) + +def _get_attribute_value(self, value, attibute): + if isinstance(value, dict): + if attibute in value: + return value[attibute] + else: + ValidationIssueCollector.appendException( + KeyError(_( + "Property '{0}' found in capability '{1}'" + " referenced from node template {2}" + " must have an attribute named {3}."). + format(self.args[2], + self.args[1], + self.context.name, + attibute))) + else: + ValidationIssueCollector.appendException( + KeyError(_( + "Property '{0}' found in capability '{1}'" + " referenced from node template {2}" + " must be a dict.").format(self.args[2], + self.args[1], + self.context.name))) + +# Add this functions similar to get_attribute case +def _find_host_containing_property(self, node_template_name=SELF): + node_template = self._find_node_template(node_template_name) + hosted_on_rel = EntityType.TOSCA_DEF[HOSTED_ON] + for r in node_template.requirements: + for requirement, target_name in r.items(): + target_node = self._find_node_template(target_name) + target_type = target_node.type_definition + for capability in target_type.get_capabilities_objects(): + if capability.type in hosted_on_rel['valid_target_types']: + if self._property_exists_in_type(target_type): + return target_node + return self._find_host_containing_property( + target_name) + return None + +def _property_exists_in_type(self, type_definition): + props_def = type_definition.get_properties_def() + found = [props_def[self.args[1]]] \ + if self.args[1] in props_def else [] + return len(found) == 1 + +def result(self): + if len(self.args) >= 3: + # First check if there is property with this name + node_tpl = self._find_node_template(self.args[0]) + props = node_tpl.get_properties() if node_tpl else [] + index = 2 + found = [props[self.args[1]]] if self.args[1] in props else [] + if found: + property_value = found[0].value + else: + index = 3 + # then check the req or caps + property_value = self._find_req_or_cap_property(self.args[1], + self.args[2]) + if len(self.args) > index: + for elem in self.args[index:]: + if isinstance(property_value, list): + int_elem = int(elem) + property_value = self._get_index_value(property_value, + int_elem) + else: + property_value = self._get_attribute_value( + property_value, + elem) + else: + property_value = self._find_property(self.args[1]).value + if isinstance(property_value, Function): + return property_value.result() + return get_function(self.tosca_tpl, + self.context, + property_value) + +@property +def node_template_name(self): + return self.args[0] + +@property +def property_name(self): + if len(self.args) > 2: + return self.args[2] + return self.args[1] + +@property +def req_or_cap(self): + if len(self.args) > 2: + return self.args[1] + return None +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java new file mode 100644 index 0000000..240ce85 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java @@ -0,0 +1,130 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.functions; + +import org.onap.sdc.toscaparser.api.TopologyTemplate; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.ArrayList; + +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class Token extends Function { + // Validate the function and provide an instance of the function + + //The token function is used within a TOSCA service template on a string to + //parse out (tokenize) substrings separated by one or more token characters + //within a larger string. + + //Arguments: + + //* The composite string that contains one or more substrings separated by + // token characters. + //* The string that contains one or more token characters that separate + // substrings within the composite string. + //* The integer indicates the index of the substring to return from the + // composite string. Note that the first substring is denoted by using + // the '0' (zero) integer value. + + //Example: + + // [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ] + + + public Token(TopologyTemplate ttpl, Object context, String name, ArrayList args) { + super(ttpl, context, name, args); + } + + @Override + public Object result() { + return this; + } + + @Override + void validate() { + if (args.size() < 3) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE180", + "ValueError: Invalid arguments for function \"token\". " + + "Expected at least three arguments")); + } else { + if (!(args.get(1) instanceof String) || + ((String) args.get(1)).length() != 1) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE181", + "ValueError: Invalid arguments for function \"token\". " + + "Expected single char value as second argument")); + } + if (!(args.get(2) instanceof Integer)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE182", + "ValueError: Invalid arguments for function \"token\"" + + "Expected integer value as third argument")); + } + } + } + +} + +/*python + +class Token(Function): +"""Validate the function and provide an instance of the function + +The token function is used within a TOSCA service template on a string to +parse out (tokenize) substrings separated by one or more token characters +within a larger string. + + +Arguments: + +* The composite string that contains one or more substrings separated by + token characters. +* The string that contains one or more token characters that separate + substrings within the composite string. +* The integer indicates the index of the substring to return from the + composite string. Note that the first substring is denoted by using + the '0' (zero) integer value. + +Example: + + [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ] + +""" + +def validate(self): + if len(self.args) < 3: + ValidationIssueCollector.appendException( + ValueError(_('Invalid arguments for function "{0}". Expected ' + 'at least three arguments.').format(TOKEN))) + else: + if not isinstance(self.args[1], str) or len(self.args[1]) != 1: + ValidationIssueCollector.appendException( + ValueError(_('Invalid arguments for function "{0}". ' + 'Expected single char value as second ' + 'argument.').format(TOKEN))) + + if not isinstance(self.args[2], int): + ValidationIssueCollector.appendException( + ValueError(_('Invalid arguments for function "{0}". ' + 'Expected integer value as third ' + 'argument.').format(TOKEN))) + +def result(self): + return self +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java new file mode 100644 index 0000000..a34ebb5 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java @@ -0,0 +1,98 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.parameters; + +import org.onap.sdc.toscaparser.api.Property; +import org.onap.sdc.toscaparser.api.elements.enums.ToscaElementNames; + +import java.util.ArrayList; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; + +public class Annotation { + + private static final String HEAT = "HEAT"; + private String name; + private String type; + private ArrayList properties; + + + public Annotation() { + } + + @SuppressWarnings("unchecked") + public Annotation(Map.Entry annotationEntry) { + if (annotationEntry != null) { + name = annotationEntry.getKey(); + Map annValue = (Map) annotationEntry.getValue(); + type = (String) annValue.get(ToscaElementNames.TYPE.getName()); + properties = fetchProperties((Map) annValue.get(ToscaElementNames.PROPERTIES.getName())); + } + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public ArrayList getProperties() { + return properties; + } + + public void setProperties(ArrayList properties) { + this.properties = properties; + } + + private ArrayList fetchProperties(Map properties) { + if (properties != null) { + return (ArrayList) properties.entrySet().stream() + .map(Property::new) + .collect(Collectors.toList()); + } + return null; + } + + public boolean isHeatSourceType() { + if (properties == null) { + return false; + } + Optional sourceType = properties.stream() + .filter(p -> p.getName().equals(ToscaElementNames.SOURCE_TYPE.getName())) + .findFirst(); + if (!sourceType.isPresent()) { + return false; + } + return sourceType.get().getValue() != null && ((String) sourceType.get().getValue()).equals(HEAT); + } + +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java new file mode 100644 index 0000000..5d3ecb4 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java @@ -0,0 +1,199 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.parameters; + +import org.onap.sdc.toscaparser.api.DataEntity; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.elements.EntityType; +import org.onap.sdc.toscaparser.api.elements.constraints.Constraint; +import org.onap.sdc.toscaparser.api.elements.constraints.Schema; +import org.onap.sdc.toscaparser.api.elements.enums.ToscaElementNames; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.stream.Collectors; + +public class Input { + + private static final String TYPE = "type"; + private static final String DESCRIPTION = "description"; + private static final String DEFAULT = "default"; + private static final String CONSTRAINTS = "constraints"; + private static final String REQUIRED = "required"; + private static final String STATUS = "status"; + private static final String ENTRY_SCHEMA = "entry_schema"; + + public static final String INTEGER = "integer"; + public static final String STRING = "string"; + public static final String BOOLEAN = "boolean"; + public static final String FLOAT = "float"; + public static final String LIST = "list"; + public static final String MAP = "map"; + public static final String JSON = "json"; + + private static String[] inputField = { + TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED, STATUS, ENTRY_SCHEMA + }; + + private static String[] primitiveTypes = { + INTEGER, STRING, BOOLEAN, FLOAT, LIST, MAP, JSON + }; + + private String name; + private Schema schema; + private LinkedHashMap customDefs; + private Map annotations; + + public Input() { + } + + public Input(String name, LinkedHashMap schema, LinkedHashMap customDefinitions) { + this.name = name; + this.schema = new Schema(name, schema); + customDefs = customDefinitions; + } + + @SuppressWarnings("unchecked") + public void parseAnnotations() { + if (schema.getSchema() != null) { + LinkedHashMap annotations = (LinkedHashMap) schema.getSchema().get(ToscaElementNames.ANNOTATIONS.getName()); + if (annotations != null) { + setAnnotations(annotations.entrySet().stream() + .map(Annotation::new) + .filter(Annotation::isHeatSourceType) + .collect(Collectors.toMap(Annotation::getName, a -> a))); + } + } + } + + public String getName() { + return name; + } + + public String getType() { + return schema.getType(); + } + + public String getDescription() { + return schema.getDescription(); + } + + public boolean isRequired() { + return schema.isRequired(); + } + + public Object getDefault() { + return schema.getDefault(); + } + + public ArrayList getConstraints() { + return schema.getConstraints(); + } + + public void validate(Object value) { + validateField(); + validateType(getType()); + if (value != null) { + validateValue(value); + } + } + + private void validateField() { + for (String key : schema.getSchema().keySet()) { + boolean bFound = false; + for (String ifld : inputField) { + if (key.equals(ifld)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE214", String.format( + "UnknownFieldError: Input \"%s\" contains unknown field \"%s\"", + name, key))); + } + } + } + + private void validateType(String inputType) { + boolean bFound = false; + for (String pt : Schema.PROPERTY_TYPES) { + if (pt.equals(inputType)) { + bFound = true; + break; + } + } + + if (!bFound) { + if (customDefs.get(inputType) != null) { + bFound = true; + } + } + + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE215", String.format( + "ValueError: Invalid type \"%s\"", inputType))); + } + } + + @SuppressWarnings("unchecked") + private void validateValue(Object value) { + Object datatype; + if (EntityType.TOSCA_DEF.get(getType()) != null) { + datatype = EntityType.TOSCA_DEF.get(getType()); + } else if (EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()) != null) { + datatype = EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()); + } + + String type = getType(); + // if it's one of the basic types DON'T look in customDefs + if (Arrays.asList(primitiveTypes).contains(type)) { + DataEntity.validateDatatype(getType(), value, null, customDefs, null); + return; + } else if (customDefs.get(getType()) != null) { + datatype = customDefs.get(getType()); + DataEntity.validateDatatype(getType(), value, (LinkedHashMap) datatype, customDefs, null); + return; + } + + DataEntity.validateDatatype(getType(), value, null, customDefs, null); + } + + public Map getAnnotations() { + return annotations; + } + + private void setAnnotations(Map annotations) { + this.annotations = annotations; + } + + public void resetAnnotaions() { + annotations = null; + } + + public LinkedHashMap getEntrySchema() { + return schema.getEntrySchema(); + } + +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java new file mode 100644 index 0000000..8ef82b3 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java @@ -0,0 +1,129 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.parameters; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.util.LinkedHashMap; + +public class Output { + + private static final String DESCRIPTION = "description"; + public static final String VALUE = "value"; + private static final String[] OUTPUT_FIELD = {DESCRIPTION, VALUE}; + + private String name; + private LinkedHashMap attributes; + + public Output(String name, LinkedHashMap attributes) { + this.name = name; + this.attributes = attributes; + } + + public String getDescription() { + return (String) attributes.get(DESCRIPTION); + } + + public Object getValue() { + return attributes.get(VALUE); + } + + public void validate() { + validateField(); + } + + private void validateField() { + if (attributes == null) { + //TODO wrong error message... + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE216", String.format( + "ValidationError: Output \"%s\" has wrong type. Expecting a dict", + name))); + } + + if (getValue() == null) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE217", String.format( + "MissingRequiredFieldError: Output \"%s\" is missing required \"%s\"", + name, VALUE))); + } + for (String key : attributes.keySet()) { + boolean bFound = false; + for (String of : OUTPUT_FIELD) { + if (key.equals(of)) { + bFound = true; + break; + } + } + if (!bFound) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE218", String.format( + "UnknownFieldError: Output \"%s\" contains unknown field \"%s\"", + name, key))); + } + } + } + + // getter/setter + + public String getName() { + return name; + } + + public void setAttr(String name, Object value) { + attributes.put(name, value); + } +} + +/*python + +class Output(object): + + OUTPUT_FIELD = (DESCRIPTION, VALUE) = ('description', 'value') + + def __init__(self, name, attributes): + self.name = name + self.attributes = attributes + + @property + def description(self): + return self.attributes.get(self.DESCRIPTION) + + @property + def value(self): + return self.attributes.get(self.VALUE) + + def validate(self): + self._validate_field() + + def _validate_field(self): + if not isinstance(self.attributes, dict): + ValidationIssueCollector.appendException( + MissingRequiredFieldError(what='Output "%s"' % self.name, + required=self.VALUE)) + if self.value is None: + ValidationIssueCollector.appendException( + MissingRequiredFieldError(what='Output "%s"' % self.name, + required=self.VALUE)) + for name in self.attributes: + if name not in self.OUTPUT_FIELD: + ValidationIssueCollector.appendException( + UnknownFieldError(what='Output "%s"' % self.name, + field=name)) +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java new file mode 100644 index 0000000..4ada267 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java @@ -0,0 +1,790 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.prereq; + +import org.onap.sdc.toscaparser.api.ImportsLoader; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; +import org.onap.sdc.toscaparser.api.utils.UrlUtils; + +import java.io.BufferedOutputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.RandomAccessFile; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.util.*; +import java.util.zip.ZipEntry; +import java.util.zip.ZipFile; +import java.util.zip.ZipInputStream; + +import org.onap.sdc.toscaparser.api.common.JToscaException; +import org.onap.sdc.toscaparser.api.utils.JToscaErrorCodes; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.yaml.snakeyaml.Yaml; + +public class CSAR { + + private static Logger log = LoggerFactory.getLogger(CSAR.class.getName()); + private static final ArrayList META_PROPERTIES_FILES = new ArrayList<>(Arrays.asList("TOSCA-Metadata/TOSCA.meta", "csar.meta")); + + private String path; + private boolean isFile; + private boolean isValidated; + private boolean errorCaught; + private String csar; + private String tempDir; + // private Metadata metaData; + private File tempFile; + private LinkedHashMap> metaProperties; + + public CSAR(String csarPath, boolean aFile) { + path = csarPath; + isFile = aFile; + isValidated = false; + errorCaught = false; + csar = null; + tempDir = null; + tempFile = null; + metaProperties = new LinkedHashMap<>(); + } + + public boolean validate() throws JToscaException { + isValidated = true; + + //validate that the file or URL exists + + if (isFile) { + File f = new File(path); + if (!f.isFile()) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE220", String.format("\"%s\" is not a file", path))); + return false; + } else { + this.csar = path; + } + } else { + if (!UrlUtils.validateUrl(path)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE221", String.format("ImportError: \"%s\" does not exist", path))); + return false; + } + // get it to a local file + try { + File tempFile = File.createTempFile("csartmp", ".csar"); + Path ptf = Paths.get(tempFile.getPath()); + URL webfile = new URL(path); + InputStream in = webfile.openStream(); + Files.copy(in, ptf, StandardCopyOption.REPLACE_EXISTING); + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE222", "ImportError: failed to load CSAR from " + path)); + return false; + } + + log.debug("CSAR - validate - currently only files are supported"); + return false; + } + + _parseAndValidateMetaProperties(); + + if (errorCaught) { + return false; + } + + // validate that external references in the main template actually exist and are accessible + _validateExternalReferences(); + + return !errorCaught; + + } + + private void _parseAndValidateMetaProperties() throws JToscaException { + + ZipFile zf = null; + + try { + + // validate that it is a valid zip file + RandomAccessFile raf = new RandomAccessFile(csar, "r"); + long n = raf.readInt(); + raf.close(); + // check if Zip's magic number + if (n != 0x504B0304) { + String errorString = String.format("\"%s\" is not a valid zip file", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue()); + } + + // validate that it contains the metadata file in the correct location + zf = new ZipFile(csar); + ZipEntry ze = zf.getEntry("TOSCA-Metadata/TOSCA.meta"); + if (ze == null) { + + String errorString = String.format( + "\"%s\" is not a valid CSAR as it does not contain the " + + "required file \"TOSCA.meta\" in the folder \"TOSCA-Metadata\"", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.MISSING_META_FILE.getValue()); + } + + //Going over expected metadata files and parsing them + for (String metaFile : META_PROPERTIES_FILES) { + + byte ba[] = new byte[4096]; + ze = zf.getEntry(metaFile); + if (ze != null) { + InputStream inputStream = zf.getInputStream(ze); + n = inputStream.read(ba, 0, 4096); + String md = new String(ba); + md = md.substring(0, (int) n); + + String errorString = String.format( + "The file \"%s\" in the" + + " CSAR \"%s\" does not contain valid YAML content", ze.getName(), csar); + + try { + Yaml yaml = new Yaml(); + Object mdo = yaml.load(md); + if (!(mdo instanceof LinkedHashMap)) { + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); + } + + String[] split = ze.getName().split("/"); + String fileName = split[split.length - 1]; + + if (!metaProperties.containsKey(fileName)) { + metaProperties.put(fileName, (LinkedHashMap) mdo); + } + } catch (Exception e) { + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); + } + } + } + + // verify it has "Entry-Definition" + String edf = _getMetadata("Entry-Definitions"); + if (edf == null) { + String errorString = String.format( + "The CSAR \"%s\" is missing the required metadata " + + "\"Entry-Definitions\" in \"TOSCA-Metadata/TOSCA.meta\"", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.ENTRY_DEFINITION_NOT_DEFINED.getValue()); + } + + //validate that "Entry-Definitions' metadata value points to an existing file in the CSAR + boolean foundEDF = false; + Enumeration entries = zf.entries(); + while (entries.hasMoreElements()) { + ze = entries.nextElement(); + if (ze.getName().equals(edf)) { + foundEDF = true; + break; + } + } + if (!foundEDF) { + String errorString = String.format( + "The \"Entry-Definitions\" file defined in the CSAR \"%s\" does not exist", csar); + log.error(errorString); + throw new JToscaException(errorString, JToscaErrorCodes.MISSING_ENTRY_DEFINITION_FILE.getValue()); + } + } catch (JToscaException e) { + //ThreadLocalsHolder.getCollector().appendCriticalException(e.getMessage()); + throw e; + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE223", "ValidationError: " + e.getMessage())); + errorCaught = true; + } + + try { + if (zf != null) { + zf.close(); + } + } catch (IOException e) { + } + } + + public void cleanup() { + try { + if (tempFile != null) { + tempFile.delete(); + } + } catch (Exception e) { + } + } + + private String _getMetadata(String key) throws JToscaException { + if (!isValidated) { + validate(); + } + Object value = _getMetaProperty("TOSCA.meta").get(key); + return value != null ? value.toString() : null; + } + + public String getAuthor() throws JToscaException { + return _getMetadata("Created-By"); + } + + public String getVersion() throws JToscaException { + return _getMetadata("CSAR-Version"); + } + + public LinkedHashMap> getMetaProperties() { + return metaProperties; + } + + private LinkedHashMap _getMetaProperty(String propertiesFile) { + return metaProperties.get(propertiesFile); + } + + public String getMainTemplate() throws JToscaException { + String entryDef = _getMetadata("Entry-Definitions"); + ZipFile zf; + boolean ok = false; + try { + zf = new ZipFile(path); + ok = (zf.getEntry(entryDef) != null); + zf.close(); + } catch (IOException e) { + if (!ok) { + log.error("CSAR - getMainTemplate - failed to open {}", path); + } + } + if (ok) { + return entryDef; + } else { + return null; + } + } + + @SuppressWarnings("unchecked") + public LinkedHashMap getMainTemplateYaml() throws JToscaException { + String mainTemplate = tempDir + File.separator + getMainTemplate(); + if (mainTemplate != null) { + try (InputStream input = new FileInputStream(new File(mainTemplate));) { + Yaml yaml = new Yaml(); + Object data = yaml.load(input); + if (!(data instanceof LinkedHashMap)) { + throw new IOException(); + } + return (LinkedHashMap) data; + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE224", String.format( + "The file \"%s\" in the CSAR \"%s\" does not " + + "contain valid TOSCA YAML content", + mainTemplate, csar))); + } + } + return null; + } + + public String getDescription() throws JToscaException { + String desc = _getMetadata("Description"); + if (desc != null) { + return desc; + } + + Map metaData = metaProperties.get("TOSCA.meta"); + metaData.put("Description", getMainTemplateYaml().get("description")); + return _getMetadata("Description"); + } + + public String getTempDir() { + return tempDir; + } + + public void decompress() throws IOException, JToscaException { + if (!isValidated) { + validate(); + } + + if (tempDir == null || tempDir.isEmpty()) { + tempDir = Files.createTempDirectory("JTP").toString(); + unzip(path, tempDir); + } + } + + private void _validateExternalReferences() throws JToscaException { + // Extracts files referenced in the main template + // These references are currently supported: + // * imports + // * interface implementations + // * artifacts + try { + decompress(); + String mainTplFile = getMainTemplate(); + if (mainTplFile == null) { + return; + } + + LinkedHashMap mainTpl = getMainTemplateYaml(); + if (mainTpl.get("imports") != null) { + // this loads the imports + ImportsLoader il = new ImportsLoader((ArrayList) mainTpl.get("imports"), + tempDir + File.separator + mainTplFile, + (Object) null, + (LinkedHashMap) null); + } + + if (mainTpl.get("topology_template") != null) { + LinkedHashMap topologyTemplate = + (LinkedHashMap) mainTpl.get("topology_template"); + + if (topologyTemplate.get("node_templates") != null) { + LinkedHashMap nodeTemplates = + (LinkedHashMap) topologyTemplate.get("node_templates"); + for (String nodeTemplateKey : nodeTemplates.keySet()) { + LinkedHashMap nodeTemplate = + (LinkedHashMap) nodeTemplates.get(nodeTemplateKey); + if (nodeTemplate.get("artifacts") != null) { + LinkedHashMap artifacts = + (LinkedHashMap) nodeTemplate.get("artifacts"); + for (String artifactKey : artifacts.keySet()) { + Object artifact = artifacts.get(artifactKey); + if (artifact instanceof String) { + _validateExternalReference(mainTplFile, (String) artifact, true); + } else if (artifact instanceof LinkedHashMap) { + String file = (String) ((LinkedHashMap) artifact).get("file"); + if (file != null) { + _validateExternalReference(mainTplFile, file, true); + } + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE225", String.format( + "ValueError: Unexpected artifact definition for \"%s\"", + artifactKey))); + errorCaught = true; + } + } + } + if (nodeTemplate.get("interfaces") != null) { + LinkedHashMap interfaces = + (LinkedHashMap) nodeTemplate.get("interfaces"); + for (String interfaceKey : interfaces.keySet()) { + LinkedHashMap _interface = + (LinkedHashMap) interfaces.get(interfaceKey); + for (String operationKey : _interface.keySet()) { + Object operation = _interface.get(operationKey); + if (operation instanceof String) { + _validateExternalReference(mainTplFile, (String) operation, false); + } else if (operation instanceof LinkedHashMap) { + String imp = (String) ((LinkedHashMap) operation).get("implementation"); + if (imp != null) { + _validateExternalReference(mainTplFile, imp, true); + } + } + } + } + } + } + } + } + } catch (IOException e) { + errorCaught = true; + } finally { + // delete tempDir (only here?!?) + File fdir = new File(tempDir); + deleteDir(fdir); + tempDir = null; + } + } + + public static void deleteDir(File fdir) { + try { + if (fdir.isDirectory()) { + for (File c : fdir.listFiles()) + deleteDir(c); + } + fdir.delete(); + } catch (Exception e) { + } + } + + private void _validateExternalReference(String tplFile, String resourceFile, boolean raiseExc) { + // Verify that the external resource exists + + // If resource_file is a URL verify that the URL is valid. + // If resource_file is a relative path verify that the path is valid + // considering base folder (self.temp_dir) and tpl_file. + // Note that in a CSAR resource_file cannot be an absolute path. + if (UrlUtils.validateUrl(resourceFile)) { + String msg = String.format("URLException: The resource at \"%s\" cannot be accessed", resourceFile); + try { + if (UrlUtils.isUrlAccessible(resourceFile)) { + return; + } else { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE226", msg)); + errorCaught = true; + } + } catch (Exception e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE227", msg)); + } + } + + String dirPath = Paths.get(tplFile).getParent().toString(); + String filePath = tempDir + File.separator + dirPath + File.separator + resourceFile; + File f = new File(filePath); + if (f.isFile()) { + return; + } + + if (raiseExc) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE228", String.format( + "ValueError: The resource \"%s\" does not exist", resourceFile))); + } + errorCaught = true; + } + + private void unzip(String zipFilePath, String destDirectory) throws IOException { + File destDir = new File(destDirectory); + if (!destDir.exists()) { + destDir.mkdir(); + } + + try (ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFilePath));) { + ZipEntry entry = zipIn.getNextEntry(); + // iterates over entries in the zip file + while (entry != null) { + // create all directories needed for nested items + String[] parts = entry.getName().split("/"); + String s = destDirectory + File.separator; + for (int i = 0; i < parts.length - 1; i++) { + s += parts[i]; + File idir = new File(s); + if (!idir.exists()) { + idir.mkdir(); + } + s += File.separator; + } + String filePath = destDirectory + File.separator + entry.getName(); + if (!entry.isDirectory()) { + // if the entry is a file, extracts it + extractFile(zipIn, filePath); + } else { + // if the entry is a directory, make the directory + File dir = new File(filePath); + dir.mkdir(); + } + zipIn.closeEntry(); + entry = zipIn.getNextEntry(); + } + } + } + + /** + * Extracts a zip entry (file entry) + * + * @param zipIn + * @param filePath + * @throws IOException + */ + private static final int BUFFER_SIZE = 4096; + + private void extractFile(ZipInputStream zipIn, String filePath) throws IOException { + //BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath)); + try (FileOutputStream fos = new FileOutputStream(filePath); + BufferedOutputStream bos = new BufferedOutputStream(fos);) { + byte[] bytesIn = new byte[BUFFER_SIZE]; + int read = 0; + while ((read = zipIn.read(bytesIn)) != -1) { + bos.write(bytesIn, 0, read); + } + } + } + +} + +/*python + +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import URLException +from toscaparser.common.exception import ValidationError +from toscaparser.imports import ImportsLoader +from toscaparser.utils.gettextutils import _ +from toscaparser.utils.urlutils import UrlUtils + +try: # Python 2.x + from BytesIO import BytesIO +except ImportError: # Python 3.x + from io import BytesIO + + +class CSAR(object): + + def __init__(self, csar_file, a_file=True): + self.path = csar_file + self.a_file = a_file + self.is_validated = False + self.error_caught = False + self.csar = None + self.temp_dir = None + + def validate(self): + """Validate the provided CSAR file.""" + + self.is_validated = True + + # validate that the file or URL exists + missing_err_msg = (_('"%s" does not exist.') % self.path) + if self.a_file: + if not os.path.isfile(self.path): + ValidationIssueCollector.appendException( + ValidationError(message=missing_err_msg)) + return False + else: + self.csar = self.path + else: # a URL + if not UrlUtils.validate_url(self.path): + ValidationIssueCollector.appendException( + ValidationError(message=missing_err_msg)) + return False + else: + response = requests.get(self.path) + self.csar = BytesIO(response.content) + + # validate that it is a valid zip file + if not zipfile.is_zipfile(self.csar): + err_msg = (_('"%s" is not a valid zip file.') % self.path) + ValidationIssueCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that it contains the metadata file in the correct location + self.zfile = zipfile.ZipFile(self.csar, 'r') + filelist = self.zfile.namelist() + if 'TOSCA-Metadata/TOSCA.meta' not in filelist: + err_msg = (_('"%s" is not a valid CSAR as it does not contain the ' + 'required file "TOSCA.meta" in the folder ' + '"TOSCA-Metadata".') % self.path) + ValidationIssueCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that 'Entry-Definitions' property exists in TOSCA.meta + data = self.zfile.read('TOSCA-Metadata/TOSCA.meta') + invalid_yaml_err_msg = (_('The file "TOSCA-Metadata/TOSCA.meta" in ' + 'the CSAR "%s" does not contain valid YAML ' + 'content.') % self.path) + try: + meta = yaml.load(data) + if type(meta) is dict: + self.metadata = meta + else: + ValidationIssueCollector.appendException( + ValidationError(message=invalid_yaml_err_msg)) + return False + except yaml.YAMLError: + ValidationIssueCollector.appendException( + ValidationError(message=invalid_yaml_err_msg)) + return False + + if 'Entry-Definitions' not in self.metadata: + err_msg = (_('The CSAR "%s" is missing the required metadata ' + '"Entry-Definitions" in ' + '"TOSCA-Metadata/TOSCA.meta".') + % self.path) + ValidationIssueCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that 'Entry-Definitions' metadata value points to an + # existing file in the CSAR + entry = self.metadata.get('Entry-Definitions') + if entry and entry not in filelist: + err_msg = (_('The "Entry-Definitions" file defined in the ' + 'CSAR "%s" does not exist.') % self.path) + ValidationIssueCollector.appendException( + ValidationError(message=err_msg)) + return False + + # validate that external references in the main template actually + # exist and are accessible + self._validate_external_references() + return not self.error_caught + + def get_metadata(self): + """Return the metadata dictionary.""" + + # validate the csar if not already validated + if not self.is_validated: + self.validate() + + # return a copy to avoid changes overwrite the original + return dict(self.metadata) if self.metadata else None + + def _get_metadata(self, key): + if not self.is_validated: + self.validate() + return self.metadata.get(key) + + def get_author(self): + return self._get_metadata('Created-By') + + def get_version(self): + return self._get_metadata('CSAR-Version') + + def get_main_template(self): + entry_def = self._get_metadata('Entry-Definitions') + if entry_def in self.zfile.namelist(): + return entry_def + + def get_main_template_yaml(self): + main_template = self.get_main_template() + if main_template: + data = self.zfile.read(main_template) + invalid_tosca_yaml_err_msg = ( + _('The file "%(template)s" in the CSAR "%(csar)s" does not ' + 'contain valid TOSCA YAML content.') % + {'template': main_template, 'csar': self.path}) + try: + tosca_yaml = yaml.load(data) + if type(tosca_yaml) is not dict: + ValidationIssueCollector.appendException( + ValidationError(message=invalid_tosca_yaml_err_msg)) + return tosca_yaml + except Exception: + ValidationIssueCollector.appendException( + ValidationError(message=invalid_tosca_yaml_err_msg)) + + def get_description(self): + desc = self._get_metadata('Description') + if desc is not None: + return desc + + self.metadata['Description'] = \ + self.get_main_template_yaml().get('description') + return self.metadata['Description'] + + def decompress(self): + if not self.is_validated: + self.validate() + self.temp_dir = tempfile.NamedTemporaryFile().name + with zipfile.ZipFile(self.csar, "r") as zf: + zf.extractall(self.temp_dir) + + def _validate_external_references(self): + """Extracts files referenced in the main template + + These references are currently supported: + * imports + * interface implementations + * artifacts + """ + try: + self.decompress() + main_tpl_file = self.get_main_template() + if not main_tpl_file: + return + main_tpl = self.get_main_template_yaml() + + if 'imports' in main_tpl: + ImportsLoader(main_tpl['imports'], + os.path.join(self.temp_dir, main_tpl_file)) + + if 'topology_template' in main_tpl: + topology_template = main_tpl['topology_template'] + + if 'node_templates' in topology_template: + node_templates = topology_template['node_templates'] + + for node_template_key in node_templates: + node_template = node_templates[node_template_key] + if 'artifacts' in node_template: + artifacts = node_template['artifacts'] + for artifact_key in artifacts: + artifact = artifacts[artifact_key] + if isinstance(artifact, six.string_types): + self._validate_external_reference( + main_tpl_file, + artifact) + elif isinstance(artifact, dict): + if 'file' in artifact: + self._validate_external_reference( + main_tpl_file, + artifact['file']) + else: + ValidationIssueCollector.appendException( + ValueError(_('Unexpected artifact ' + 'definition for "%s".') + % artifact_key)) + self.error_caught = True + if 'interfaces' in node_template: + interfaces = node_template['interfaces'] + for interface_key in interfaces: + interface = interfaces[interface_key] + for opertation_key in interface: + operation = interface[opertation_key] + if isinstance(operation, six.string_types): + self._validate_external_reference( + main_tpl_file, + operation, + False) + elif isinstance(operation, dict): + if 'implementation' in operation: + self._validate_external_reference( + main_tpl_file, + operation['implementation']) + finally: + if self.temp_dir: + shutil.rmtree(self.temp_dir) + + def _validate_external_reference(self, tpl_file, resource_file, + raise_exc=True): + """Verify that the external resource exists + + If resource_file is a URL verify that the URL is valid. + If resource_file is a relative path verify that the path is valid + considering base folder (self.temp_dir) and tpl_file. + Note that in a CSAR resource_file cannot be an absolute path. + """ + if UrlUtils.validate_url(resource_file): + msg = (_('The resource at "%s" cannot be accessed.') % + resource_file) + try: + if UrlUtils.url_accessible(resource_file): + return + else: + ValidationIssueCollector.appendException( + URLException(what=msg)) + self.error_caught = True + except Exception: + ValidationIssueCollector.appendException( + URLException(what=msg)) + self.error_caught = True + + if os.path.isfile(os.path.join(self.temp_dir, + os.path.dirname(tpl_file), + resource_file)): + return + + if raise_exc: + ValidationIssueCollector.appendException( + ValueError(_('The resource "%s" does not exist.') + % resource_file)) + self.error_caught = True +*/ + + diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java new file mode 100644 index 0000000..237b738 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java @@ -0,0 +1,50 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.utils; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public class CopyUtils { + + private CopyUtils() { + } + + @SuppressWarnings("unchecked") + public static Object copyLhmOrAl(Object src) { + if (src instanceof LinkedHashMap) { + LinkedHashMap dst = new LinkedHashMap(); + for (Map.Entry me : ((LinkedHashMap) src).entrySet()) { + dst.put(me.getKey(), me.getValue()); + } + return dst; + } else if (src instanceof ArrayList) { + ArrayList dst = new ArrayList(); + for (Object o : (ArrayList) src) { + dst.add(o); + } + return dst; + } else { + return null; + } + } +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java new file mode 100644 index 0000000..158a3e1 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java @@ -0,0 +1,68 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.utils; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.Map; + +public class DumpUtils { + + @SuppressWarnings("unchecked") + private static void dumpYaml(Object yo, int level) { + final String indent = " "; + try { + if (yo == null) { + System.out.println(""); + return; + } + String cname = yo.getClass().getSimpleName(); + System.out.print(cname); + if (cname.equals("LinkedHashMap")) { + LinkedHashMap lhm = (LinkedHashMap) yo; + System.out.println(); + for (Map.Entry me : lhm.entrySet()) { + System.out.print(indent.substring(0, level) + me.getKey() + ": "); + dumpYaml(me.getValue(), level + 2); + } + } else if (cname.equals("ArrayList")) { + ArrayList al = (ArrayList) yo; + System.out.println(); + for (int i = 0; i < al.size(); i++) { + System.out.format("%s[%d] ", indent.substring(0, level), i); + dumpYaml(al.get(i), level + 2); + } + } else if (cname.equals("String")) { + System.out.println(" ==> \"" + (String) yo + "\""); + } else if (cname.equals("Integer")) { + System.out.println(" ==> " + (int) yo); + } else if (cname.equals("Boolean")) { + System.out.println(" ==> " + (boolean) yo); + } else if (cname.equals("Double")) { + System.out.println(" ==> " + (double) yo); + } else { + System.out.println(" !! unexpected type"); + } + } catch (Exception e) { + System.out.println("Exception!! " + e.getMessage()); + } + } +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java new file mode 100644 index 0000000..3849ce0 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java @@ -0,0 +1,52 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.utils; + + +public enum JToscaErrorCodes { + MISSING_META_FILE("JE1001"), + INVALID_META_YAML_CONTENT("JE1002"), + ENTRY_DEFINITION_NOT_DEFINED("JE1003"), + MISSING_ENTRY_DEFINITION_FILE("JE1004"), + GENERAL_ERROR("JE1005"), + PATH_NOT_VALID("JE1006"), + CSAR_TOSCA_VALIDATION_ERROR("JE1007"), + INVALID_CSAR_FORMAT("JE1008"); + + private String value; + + JToscaErrorCodes(String value) { + this.value = value; + } + + public String getValue() { + return value; + } + + public static JToscaErrorCodes getByCode(String code) { + for (JToscaErrorCodes v : values()) { + if (v.getValue().equals(code)) { + return v; + } + } + return null; + } +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java new file mode 100644 index 0000000..a753d62 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java @@ -0,0 +1,209 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.utils; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +// test with functions/test_concat.yaml +public class TOSCAVersionProperty { + + private String version; + + private static final String VERSION_RE = + "^(?([0-9][0-9]*))" + + "(\\.(?([0-9][0-9]*)))?" + + "(\\.(?([0-9][0-9]*)))?" + + "(\\.(?([0-9A-Za-z]+)))?" + + "(\\-(?[0-9])*)?$"; + + private String minorVersion = null; + private String majorVersion = null; + private String fixVersion = null; + private String qualifier = null; + private String buildVersion = null; + + + public TOSCAVersionProperty(String version) { + + if (version.equals("0") || version.equals("0.0") || version.equals("0.0.0")) { + return; + } + + Pattern pattern = Pattern.compile(VERSION_RE); + Matcher matcher = pattern.matcher(version); + if (!matcher.find()) { + ThreadLocalsHolder.getCollector().appendValidationIssue( + new JToscaValidationIssue( + "JE252", + "InvalidTOSCAVersionPropertyException: " + + "Value of TOSCA version property \"" + version + "\" is invalid" + )); + return; + } + minorVersion = matcher.group("gMinorVersion"); + majorVersion = matcher.group("gMajorVersion"); + fixVersion = matcher.group("gFixVersion"); + qualifier = validateQualifier(matcher.group("gQualifier")); + buildVersion = validateBuild(matcher.group("gBuildVersion")); + validateMajorVersion(majorVersion); + + this.version = version; + + } + + private String validateMajorVersion(String value) { + // Validate major version + + // Checks if only major version is provided and assumes + // minor version as 0. + // Eg: If version = 18, then it returns version = '18.0' + + if (minorVersion == null && buildVersion == null && !value.equals("0")) { + //log.warning(_('Minor version assumed "0".')) + version = version + "0"; + } + return value; + } + + private String validateQualifier(String value) { + // Validate qualifier + + // TOSCA version is invalid if a qualifier is present without the + // fix version or with all of major, minor and fix version 0s. + + // For example, the following versions are invalid + // 18.0.abc + // 0.0.0.abc + + if ((fixVersion == null && value != null) || (minorVersion.equals("0") && majorVersion.equals("0") + && fixVersion.equals("0") && value != null)) { + ThreadLocalsHolder.getCollector().appendValidationIssue( + new JToscaValidationIssue( + "JE253", + "InvalidTOSCAVersionPropertyException: Value of TOSCA version property \"" + + version + + "\" is invalid" + )); + } + return value; + } + + private String validateBuild(String value) { + // Validate build version + + // TOSCA version is invalid if build version is present without the qualifier. + // Eg: version = 18.0.0-1 is invalid. + + if (qualifier == null && value != null) { + ThreadLocalsHolder.getCollector().appendValidationIssue( + new JToscaValidationIssue( + "JE254", + "InvalidTOSCAVersionPropertyException: " + + "Value of TOSCA version property \"" + version + "\" is invalid" + ) + ); + } + return value; + } + + public Object getVersion() { + return version; + } + +} + +/*python + +class TOSCAVersionProperty(object): + + VERSION_RE = re.compile('^(?P([0-9][0-9]*))' + '(\.(?P([0-9][0-9]*)))?' + '(\.(?P([0-9][0-9]*)))?' + '(\.(?P([0-9A-Za-z]+)))?' + '(\-(?P[0-9])*)?$') + + def __init__(self, version): + self.version = str(version) + match = self.VERSION_RE.match(self.version) + if not match: + ValidationIssueCollector.appendException( + InvalidTOSCAVersionPropertyException(what=(self.version))) + return + ver = match.groupdict() + if self.version in ['0', '0.0', '0.0.0']: + log.warning(_('Version assumed as not provided')) + self.version = None + self.minor_version = ver['minor_version'] + self.major_version = ver['major_version'] + self.fix_version = ver['fix_version'] + self.qualifier = self._validate_qualifier(ver['qualifier']) + self.build_version = self._validate_build(ver['build_version']) + self._validate_major_version(self.major_version) + + def _validate_major_version(self, value): + """Validate major version + + Checks if only major version is provided and assumes + minor version as 0. + Eg: If version = 18, then it returns version = '18.0' + """ + + if self.minor_version is None and self.build_version is None and \ + value != '0': + log.warning(_('Minor version assumed "0".')) + self.version = '.'.join([value, '0']) + return value + + def _validate_qualifier(self, value): + """Validate qualifier + + TOSCA version is invalid if a qualifier is present without the + fix version or with all of major, minor and fix version 0s. + + For example, the following versions are invalid + 18.0.abc + 0.0.0.abc + """ + if (self.fix_version is None and value) or \ + (self.minor_version == self.major_version == + self.fix_version == '0' and value): + ValidationIssueCollector.appendException( + InvalidTOSCAVersionPropertyException(what=(self.version))) + return value + + def _validate_build(self, value): + """Validate build version + + TOSCA version is invalid if build version is present without the + qualifier. + Eg: version = 18.0.0-1 is invalid. + """ + if not self.qualifier and value: + ValidationIssueCollector.appendException( + InvalidTOSCAVersionPropertyException(what=(self.version))) + return value + + def get_version(self): + return self.version +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java new file mode 100644 index 0000000..4c4581b --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java @@ -0,0 +1,45 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.utils; + +import org.onap.sdc.toscaparser.api.common.ValidationIssueCollector; + +public class ThreadLocalsHolder { + + private static final ThreadLocal EXCEPTION_COLLECTOR_THREAD_LOCAL = new ThreadLocal<>(); + + private ThreadLocalsHolder() { + } + + public static ValidationIssueCollector getCollector() { + return EXCEPTION_COLLECTOR_THREAD_LOCAL.get(); + } + + public static void setCollector(ValidationIssueCollector validationIssueCollector) { + cleanup(); + EXCEPTION_COLLECTOR_THREAD_LOCAL.set(validationIssueCollector); + } + + public static void cleanup() { + EXCEPTION_COLLECTOR_THREAD_LOCAL.remove(); + } + +} diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java new file mode 100644 index 0000000..d081d91 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java @@ -0,0 +1,145 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.utils; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.MalformedURLException; +import java.net.URL; + +public class UrlUtils { + + private static final int HTTP_STATUS_OK = 200; + + private UrlUtils() { + } + + public static boolean validateUrl(String sUrl) { + // Validates whether the given path is a URL or not + + // If the given path includes a scheme (http, https, ftp, ...) and a net + // location (a domain name such as www.github.com) it is validated as a URL + try { + URL url = new URL(sUrl); + if (url.getProtocol().equals("file")) { + return true; + } + return url.getAuthority() != null; + } catch (MalformedURLException e) { + return false; + } + } + + public static String joinUrl(String sUrl, String relativePath) { + // Builds a new URL from the given URL and the relative path + + // Example: + // url: http://www.githib.com/openstack/heat + // relative_path: heat-translator + // - joined: http://www.githib.com/openstack/heat-translator + if (!validateUrl(sUrl)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE255", String.format( + "ValueError: The URL \"%s\" is malformed", sUrl))); + } + try { + URL base = new URL(sUrl); + return (new URL(base, relativePath)).toString(); + } catch (MalformedURLException e) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE256", String.format( + "ValueError: Joining URL \"%s\" and relative path \"%s\" caused an exception", sUrl, relativePath))); + return sUrl; + } + } + + public static boolean isUrlAccessible(String sUrl) { + // Validates whether the given URL is accessible + + // Returns true if the get call returns a 200 response code. + // Otherwise, returns false. + try { + HttpURLConnection connection = (HttpURLConnection) new URL(sUrl).openConnection(); + connection.setRequestMethod("HEAD"); + int responseCode = connection.getResponseCode(); + return responseCode == HTTP_STATUS_OK; + } catch (IOException e) { + return false; + } + } + +} + +/*python + +from six.moves.urllib.parse import urljoin +from six.moves.urllib.parse import urlparse +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.utils.gettextutils import _ + +try: + # Python 3.x + import urllib.request as urllib2 +except ImportError: + # Python 2.x + import urllib2 + + +class UrlUtils(object): + + @staticmethod + def validate_url(path): + """Validates whether the given path is a URL or not. + + If the given path includes a scheme (http, https, ftp, ...) and a net + location (a domain name such as www.github.com) it is validated as a + URL. + """ + parsed = urlparse(path) + if parsed.scheme == 'file': + # If the url uses the file scheme netloc will be "" + return True + else: + return bool(parsed.scheme) and bool(parsed.netloc) + + @staticmethod + def join_url(url, relative_path): + """Builds a new URL from the given URL and the relative path. + + Example: + url: http://www.githib.com/openstack/heat + relative_path: heat-translator + - joined: http://www.githib.com/openstack/heat-translator + """ + if not UrlUtils.validate_url(url): + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a valid URL.') % url)) + return urljoin(url, relative_path) + + @staticmethod + def url_accessible(url): + """Validates whether the given URL is accessible. + + Returns true if the get call returns a 200 response code. + Otherwise, returns false. + """ + return urllib2.urlopen(url).getcode() == 200 +*/ diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java new file mode 100644 index 0000000..b90d882 --- /dev/null +++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java @@ -0,0 +1,439 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.utils; + +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; + +import java.util.ArrayList; +import java.util.Date; +import java.util.LinkedHashMap; + +public class ValidateUtils { + + private static final String RANGE_UNBOUNDED = "UNBOUNDED"; + + private ValidateUtils() { + } + + public static Object strToNum(Object value) { + // Convert a string representation of a number into a numeric type + // TODO(TBD) we should not allow numeric values in, input should be str + if (value instanceof Number) { + return value; + } + try { + return Integer.parseInt((String) value); + } catch (NumberFormatException e) { + } + try { + return Float.parseFloat((String) value); + } catch (Exception e) { + } + return null; + } + + public static Object validateNumeric(Object value) { + if (value != null) { + if (!(value instanceof Number)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE257", String.format( + "ValueError: \"%s\" is not a numeric", value.toString()))); + } + } + return value; + } + + public static Object validateInteger(Object value) { + if (value != null) { + if (!(value instanceof Integer)) { + // allow "true" and "false" + if (value instanceof Boolean) { + return (Boolean) value ? 1 : 0; + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE258", String.format( + "ValueError: \"%s\" is not an integer", value.toString()))); + } + } + return value; + } + + public static Object validateFloat(Object value) { + if (value != null) { + if (!(value instanceof Float || value instanceof Double)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE259", String.format( + "ValueError: \"%s\" is not a float", value.toString()))); + } + } + return value; + } + + public static Object validateString(Object value) { + if (value != null) { + if (!(value instanceof String)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE260", String.format( + "ValueError: \'%s\' is not a string", value.toString()))); + } + } + return value; + } + + public static Object validateList(Object value) { + if (value != null) { + if (!(value instanceof ArrayList)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE261", String.format( + "ValueError: \"%s\" is not a list", value.toString()))); + } + } + return value; + } + + + @SuppressWarnings("unchecked") + public static Object validateRange(Object range) { + // list class check + validateList(range); + // validate range list has a min and max + if (range instanceof ArrayList && ((ArrayList) range).size() != 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE262", String.format( + "ValueError: \"%s\" is not a valid range", range.toString()))); + // too dangerous to continue... + return range; + } + // validate min and max are numerics or the keyword UNBOUNDED + boolean minTest = false; + boolean maxTest = false; + Object r0 = ((ArrayList) range).get(0); + Object r1 = ((ArrayList) range).get(1); + + if (!(r0 instanceof Integer) && !(r0 instanceof Float) + || !(r1 instanceof Integer) && !(r1 instanceof Float)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE263", String.format( + "ValueError: \"%s\" is not a valid range", range.toString()))); + // too dangerous to continue... + return range; + } + + Float min = 0.0F; + Float max = 0.0F; + if (r0 instanceof String && ((String) r0).equals(RANGE_UNBOUNDED)) { + minTest = true; + } else { + min = r0 instanceof Integer ? ((Integer) r0).floatValue() : (Float) r0; + } + if (r1 instanceof String && ((String) r1).equals(RANGE_UNBOUNDED)) { + maxTest = true; + } else { + max = r1 instanceof Integer ? ((Integer) r1).floatValue() : (Float) r1; + } + + // validate the max > min (account for UNBOUNDED) + if (!minTest && !maxTest) { + // Note: min == max is allowed + if (min > max) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE264", String.format( + "ValueError:\"%s\" is not a valid range", range.toString()))); + } + } + return range; + } + + @SuppressWarnings("unchecked") + public static Object validateValueInRange(Object value, Object range, String propName) { + // verify all 3 are numeric and convert to Floats + if (!(value instanceof Integer || value instanceof Float)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE265", String.format( + "ValueError: validateInRange: \"%s\" is not a number", range.toString()))); + return value; + } + Float fval = value instanceof Integer ? ((Integer) value).floatValue() : (Float) value; + + ////////////////////////// + //"validateRange(range);" + ////////////////////////// + // better safe than sorry... + // validate that range list has a min and max + if (range instanceof ArrayList && ((ArrayList) range).size() != 2) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE266", String.format( + "ValueError: \"%s\" is not a valid range", range.toString()))); + // too dangerous to continue... + return value; + } + // validate min and max are numerics or the keyword UNBOUNDED + boolean minTest = false; + boolean maxTest = false; + Object r0 = ((ArrayList) range).get(0); + Object r1 = ((ArrayList) range).get(1); + + if (!(r0 instanceof Integer) && !(r0 instanceof Float) + || !(r1 instanceof Integer) && !(r1 instanceof Float)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE267", String.format( + "ValueError: \"%s\" is not a valid range", range.toString()))); + // too dangerous to continue... + return value; + } + + Float min = 0.0F; + Float max = 0.0F; + if (r0 instanceof String && ((String) r0).equals(RANGE_UNBOUNDED)) { + minTest = true; + } else { + min = r0 instanceof Integer ? ((Integer) r0).floatValue() : (Float) r0; + } + if (r1 instanceof String && ((String) r1).equals(RANGE_UNBOUNDED)) { + maxTest = true; + } else { + max = r1 instanceof Integer ? ((Integer) r1).floatValue() : (Float) r1; + } + + // validate the max > min (account for UNBOUNDED) + if (!minTest && !maxTest) { + // Note: min == max is allowed + if (min > max) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE268", String.format( + "ValueError:\"%s\" is not a valid range", range.toString()))); + } + } + // finally... + boolean bError = false; + //Note: value is valid if equal to min + if (!minTest) { + if (fval < min) { + bError = true; + } + } + // Note: value is valid if equal to max + if (!maxTest) { + if (fval > max) { + bError = true; + } + } + if (bError) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE269", String.format( + "RangeValueError: Property \"%s\", \"%s\" not in range [\"%s\" - \"%s\"", + propName, value.toString(), r0.toString(), r1.toString()))); + } + return value; + } + + public static Object validateMap(Object ob) { + if (ob != null) { + if (!(ob instanceof LinkedHashMap)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE270", String.format( + "ValueError\"%s\" is not a map.", ob.toString()))); + } + } + return ob; + } + + public static Object validateBoolean(Object value) { + if (value != null) { + if (value instanceof Boolean) { + return value; + } + if (value instanceof String) { + String normalized = ((String) value).toLowerCase(); + if (normalized.equals("true") || normalized.equals("false")) { + return normalized.equals("true"); + } + } + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE271", String.format( + "ValueError: \"%s\" is not a boolean", value.toString()))); + } + return value; + } + + public static Object validateTimestamp(Object value) { + + /* + try: + # Note: we must return our own exception message + # as dateutil's parser returns different types / values on + # different systems. OSX, for example, returns a tuple + # containing a different error message than Linux + dateutil.parser.parse(value) + except Exception as e: + original_err_msg = str(e) + log.error(original_err_msg) + ValidationIssueCollector.appendException( + ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') % + {'val': value, 'msg': original_err_msg})) + */ + // timestamps are loaded as Date objects by the YAML parser + if (value != null) { + if (!(value instanceof Date)) { + ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE272", String.format( + "ValueError: \"%s\" is not a valid timestamp", + value.toString()))); + + } + } + return value; + } + +} + +/*python + +from toscaparser.elements import constraints +from toscaparser.common.exception import ValidationIssueCollector +from toscaparser.common.exception import InvalidTOSCAVersionPropertyException +from toscaparser.common.exception import RangeValueError +from toscaparser.utils.gettextutils import _ + +log = logging.getLogger('tosca') + +RANGE_UNBOUNDED = 'UNBOUNDED' + + +def str_to_num(value): + '''Convert a string representation of a number into a numeric type.''' + # tODO(TBD) we should not allow numeric values in, input should be str + if isinstance(value, numbers.Number): + return value + try: + return int(value) + except ValueError: + return float(value) + + +def validate_numeric(value): + if not isinstance(value, numbers.Number): + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a numeric.') % value)) + return value + + +def validate_integer(value): + if not isinstance(value, int): + try: + value = int(value) + except Exception: + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not an integer.') % value)) + return value + + +def validate_float(value): + if not isinstance(value, float): + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a float.') % value)) + return value + + +def validate_string(value): + if not isinstance(value, six.string_types): + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a string.') % value)) + return value + + +def validate_list(value): + if not isinstance(value, list): + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a list.') % value)) + return value + + +def validate_range(range): + # list class check + validate_list(range) + # validate range list has a min and max + if len(range) != 2: + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a valid range.') % range)) + # validate min and max are numerics or the keyword UNBOUNDED + min_test = max_test = False + if not range[0] == RANGE_UNBOUNDED: + min = validate_numeric(range[0]) + else: + min_test = True + if not range[1] == RANGE_UNBOUNDED: + max = validate_numeric(range[1]) + else: + max_test = True + # validate the max > min (account for UNBOUNDED) + if not min_test and not max_test: + # Note: min == max is allowed + if min > max: + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a valid range.') % range)) + + return range + + +def validate_value_in_range(value, range, prop_name): + validate_numeric(value) + validate_range(range) + + # Note: value is valid if equal to min + if range[0] != RANGE_UNBOUNDED: + if value < range[0]: + ValidationIssueCollector.appendException( + RangeValueError(pname=prop_name, + pvalue=value, + vmin=range[0], + vmax=range[1])) + # Note: value is valid if equal to max + if range[1] != RANGE_UNBOUNDED: + if value > range[1]: + ValidationIssueCollector.appendException( + RangeValueError(pname=prop_name, + pvalue=value, + vmin=range[0], + vmax=range[1])) + return value + + +def validate_map(value): + if not isinstance(value, collections.Mapping): + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a map.') % value)) + return value + + +def validate_boolean(value): + if isinstance(value, bool): + return value + + if isinstance(value, str): + normalised = value.lower() + if normalised in ['true', 'false']: + return normalised == 'true' + + ValidationIssueCollector.appendException( + ValueError(_('"%s" is not a boolean.') % value)) + + +def validate_timestamp(value): + try: + # Note: we must return our own exception message + # as dateutil's parser returns different types / values on + # different systems. OSX, for example, returns a tuple + # containing a different error message than Linux + dateutil.parser.parse(value) + except Exception as e: + original_err_msg = str(e) + log.error(original_err_msg) + ValidationIssueCollector.appendException( + ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') % + {'val': value, 'msg': original_err_msg})) + return + +*/ diff --git a/jtosca/src/main/resources/TOSCA_definition_1_0.yaml b/jtosca/src/main/resources/TOSCA_definition_1_0.yaml new file mode 100644 index 0000000..d80ed17 --- /dev/null +++ b/jtosca/src/main/resources/TOSCA_definition_1_0.yaml @@ -0,0 +1,971 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +########################################################################## +# The content of this file reflects TOSCA Simple Profile in YAML version +# 1.0.0. It describes the definition for TOSCA types including Node Type, +# Relationship Type, CapabilityAssignment Type and Interfaces. +########################################################################## +tosca_definitions_version: tosca_simple_yaml_1_0 + +########################################################################## +# Node Type. +# A Node Type is a reusable entity that defines the type of one or more +# Node Templates. +########################################################################## +node_types: + tosca.nodes.Root: + description: > + The TOSCA root node all other TOSCA base node types derive from. + attributes: + tosca_id: + type: string + tosca_name: + type: string + state: + type: string + capabilities: + feature: + type: tosca.capabilities.Node + requirements: + - dependency: + capability: tosca.capabilities.Node + node: tosca.nodes.Root + relationship: tosca.relationships.DependsOn + occurrences: [ 0, UNBOUNDED ] + interfaces: + Standard: + type: tosca.interfaces.node.lifecycle.Standard + + tosca.nodes.Compute: + derived_from: tosca.nodes.Root + attributes: + private_address: + type: string + public_address: + type: string + networks: + type: map + entry_schema: + type: tosca.datatypes.network.NetworkInfo + ports: + type: map + entry_schema: + type: tosca.datatypes.network.PortInfo + capabilities: + host: + type: tosca.capabilities.Container + binding: + type: tosca.capabilities.network.Bindable + os: + type: tosca.capabilities.OperatingSystem + scalable: + type: tosca.capabilities.Scalable + endpoint: + type: tosca.capabilities.Endpoint.Admin + requirements: + - local_storage: + capability: tosca.capabilities.Attachment + node: tosca.nodes.BlockStorage + relationship: tosca.relationships.AttachesTo + occurrences: [0, UNBOUNDED] + + tosca.nodes.SoftwareComponent: + derived_from: tosca.nodes.Root + properties: + # domain-specific software component version + component_version: + type: version + required: false + description: > + Software component version. + admin_credential: + type: tosca.datatypes.Credential + required: false + requirements: + - host: + capability: tosca.capabilities.Container + node: tosca.nodes.Compute + relationship: tosca.relationships.HostedOn + + tosca.nodes.DBMS: + derived_from: tosca.nodes.SoftwareComponent + properties: + port: + required: false + type: integer + description: > + The port the DBMS service will listen to for data and requests. + root_password: + required: false + type: string + description: > + The root password for the DBMS service. + capabilities: + host: + type: tosca.capabilities.Container + valid_source_types: [tosca.nodes.Database] + + tosca.nodes.Database: + derived_from: tosca.nodes.Root + properties: + user: + required: false + type: string + description: > + User account name for DB administration + port: + required: false + type: integer + description: > + The port the database service will use to listen for incoming data and + requests. + name: + required: false + type: string + description: > + The name of the database. + password: + required: false + type: string + description: > + The password for the DB user account + requirements: + - host: + capability: tosca.capabilities.Container + node: tosca.nodes.DBMS + relationship: tosca.relationships.HostedOn + capabilities: + database_endpoint: + type: tosca.capabilities.Endpoint.Database + + tosca.nodes.WebServer: + derived_from: tosca.nodes.SoftwareComponent + capabilities: + data_endpoint: + type: tosca.capabilities.Endpoint + admin_endpoint: + type: tosca.capabilities.Endpoint.Admin + host: + type: tosca.capabilities.Container + valid_source_types: [tosca.nodes.WebApplication] + + tosca.nodes.WebApplication: + derived_from: tosca.nodes.Root + properties: + context_root: + type: string + required: false + requirements: + - host: + capability: tosca.capabilities.Container + node: tosca.nodes.WebServer + relationship: tosca.relationships.HostedOn + capabilities: + app_endpoint: + type: tosca.capabilities.Endpoint + + tosca.nodes.BlockStorage: + derived_from: tosca.nodes.Root + properties: + size: + type: scalar-unit.size + constraints: + - greater_or_equal: 1 MB + volume_id: + type: string + required: false + snapshot_id: + type: string + required: false + attributes: + volume_id: + type: string + capabilities: + attachment: + type: tosca.capabilities.Attachment + + tosca.nodes.network.Network: + derived_from: tosca.nodes.Root + description: > + The TOSCA Network node represents a simple, logical network service. + properties: + ip_version: + type: integer + required: false + default: 4 + constraints: + - valid_values: [ 4, 6 ] + description: > + The IP version of the requested network. Valid values are 4 for ipv4 + or 6 for ipv6. + cidr: + type: string + required: false + description: > + The cidr block of the requested network. + start_ip: + type: string + required: false + description: > + The IP address to be used as the start of a pool of addresses within + the full IP range derived from the cidr block. + end_ip: + type: string + required: false + description: > + The IP address to be used as the end of a pool of addresses within + the full IP range derived from the cidr block. + gateway_ip: + type: string + required: false + description: > + The gateway IP address. + network_name: + type: string + required: false + description: > + An identifier that represents an existing Network instance in the + underlying cloud infrastructure or can be used as the name of the + newly created network. If network_name is provided and no other + properties are provided (with exception of network_id), then an + existing network instance will be used. If network_name is provided + alongside with more properties then a new network with this name will + be created. + network_id: + type: string + required: false + description: > + An identifier that represents an existing Network instance in the + underlying cloud infrastructure. This property is mutually exclusive + with all other properties except network_name. This can be used alone + or together with network_name to identify an existing network. + segmentation_id: + type: string + required: false + description: > + A segmentation identifier in the underlying cloud infrastructure. + E.g. VLAN ID, GRE tunnel ID, etc.. + network_type: + type: string + required: false + description: > + It specifies the nature of the physical network in the underlying + cloud infrastructure. Examples are flat, vlan, gre or vxlan. + For flat and vlan types, physical_network should be provided too. + physical_network: + type: string + required: false + description: > + It identifies the physical network on top of which the network is + implemented, e.g. physnet1. This property is required if network_type + is flat or vlan. + dhcp_enabled: + type: boolean + required: false + default: true + description: > + Indicates should DHCP service be enabled on the network or not. + capabilities: + link: + type: tosca.capabilities.network.Linkable + + tosca.nodes.network.Port: + derived_from: tosca.nodes.Root + description: > + The TOSCA Port node represents a logical entity that associates between + Compute and Network normative types. The Port node type effectively + represents a single virtual NIC on the Compute node instance. + properties: + ip_address: + type: string + required: false + description: > + Allow the user to set a static IP. + order: + type: integer + required: false + default: 0 + constraints: + - greater_or_equal: 0 + description: > + The order of the NIC on the compute instance (e.g. eth2). + is_default: + type: boolean + required: false + default: false + description: > + If is_default=true this port will be used for the default gateway + route. Only one port that is associated to single compute node can + set as is_default=true. + ip_range_start: + type: string + required: false + description: > + Defines the starting IP of a range to be allocated for the compute + instances that are associated with this Port. + ip_range_end: + type: string + required: false + description: > + Defines the ending IP of a range to be allocated for the compute + instances that are associated with this Port. + attributes: + ip_address: + type: string + requirements: + - binding: + description: > + Binding requirement expresses the relationship between Port and + Compute nodes. Effectively it indicates that the Port will be + attached to specific Compute node instance + capability: tosca.capabilities.network.Bindable + relationship: tosca.relationships.network.BindsTo + node: tosca.nodes.Compute + - link: + description: > + Link requirement expresses the relationship between Port and Network + nodes. It indicates which network this port will connect to. + capability: tosca.capabilities.network.Linkable + relationship: tosca.relationships.network.LinksTo + node: tosca.nodes.network.Network + + tosca.nodes.network.FloatingIP: + derived_from: tosca.nodes.Root + description: > + The TOSCA FloatingIP node represents a floating IP that can associate to a Port. + properties: + floating_network: + type: string + required: true + floating_ip_address: + type: string + required: false + port_id: + type: string + required: false + requirements: + - link: + capability: tosca.capabilities.network.Linkable + relationship: tosca.relationships.network.LinksTo + node: tosca.nodes.network.Port + + tosca.nodes.ObjectStorage: + derived_from: tosca.nodes.Root + description: > + The TOSCA ObjectStorage node represents storage that provides the ability + to store data as objects (or BLOBs of data) without consideration for the + underlying filesystem or devices + properties: + name: + type: string + required: true + description: > + The logical name of the object store (or container). + size: + type: scalar-unit.size + required: false + constraints: + - greater_or_equal: 0 GB + description: > + The requested initial storage size. + maxsize: + type: scalar-unit.size + required: false + constraints: + - greater_or_equal: 0 GB + description: > + The requested maximum storage size. + capabilities: + storage_endpoint: + type: tosca.capabilities.Endpoint + + tosca.nodes.LoadBalancer: + derived_from: tosca.nodes.Root + properties: + algorithm: + type: string + required: false + status: experimental + capabilities: + client: + type: tosca.capabilities.Endpoint.Public + occurrences: [0, UNBOUNDED] + description: the Floating (IP) client’s on the public network can connect to + requirements: + - application: + capability: tosca.capabilities.Endpoint + relationship: tosca.relationships.RoutesTo + occurrences: [0, UNBOUNDED] + description: Connection to one or more load balanced applications + + tosca.nodes.Container.Application: + derived_from: tosca.nodes.Root + requirements: + - host: + capability: tosca.capabilities.Container + node: tosca.nodes.Container.Runtime + relationship: tosca.relationships.HostedOn + + tosca.nodes.Container.Runtime: + derived_from: tosca.nodes.SoftwareComponent + capabilities: + host: + type: tosca.capabilities.Container + scalable: + type: tosca.capabilities.Scalable + + tosca.nodes.Container.Application.Docker: + derived_from: tosca.nodes.Container.Application + requirements: + - host: + capability: tosca.capabilities.Container.Docker + +########################################################################## +# Relationship Type. +# A Relationship Type is a reusable entity that defines the type of one +# or more relationships between Node Types or Node Templates. +########################################################################## +relationship_types: + tosca.relationships.Root: + description: > + The TOSCA root Relationship Type all other TOSCA base Relationship Types + derive from. + attributes: + tosca_id: + type: string + tosca_name: + type: string + interfaces: + Configure: + type: tosca.interfaces.relationship.Configure + + tosca.relationships.DependsOn: + derived_from: tosca.relationships.Root + + tosca.relationships.HostedOn: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.Container ] + + tosca.relationships.ConnectsTo: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.Endpoint ] + credential: + type: tosca.datatypes.Credential + required: false + + tosca.relationships.AttachesTo: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.Attachment ] + properties: + location: + required: true + type: string + constraints: + - min_length: 1 + device: + required: false + type: string + + tosca.relationships.RoutesTo: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.Endpoint ] + + tosca.relationships.network.LinksTo: + derived_from: tosca.relationships.DependsOn + valid_target_types: [ tosca.capabilities.network.Linkable ] + + tosca.relationships.network.BindsTo: + derived_from: tosca.relationships.DependsOn + valid_target_types: [ tosca.capabilities.network.Bindable ] + +########################################################################## +# CapabilityAssignment Type. +# A CapabilityAssignment Type is a reusable entity that describes a kind of +# capability that a Node Type can declare to expose. +########################################################################## +capability_types: + tosca.capabilities.Root: + description: > + The TOSCA root Capability Type all other TOSCA base Capability Types + derive from. + + tosca.capabilities.Node: + derived_from: tosca.capabilities.Root + + tosca.capabilities.Container: + derived_from: tosca.capabilities.Root + properties: + num_cpus: + required: false + type: integer + constraints: + - greater_or_equal: 1 + cpu_frequency: + required: false + type: scalar-unit.frequency + constraints: + - greater_or_equal: 0.1 GHz + disk_size: + required: false + type: scalar-unit.size + constraints: + - greater_or_equal: 0 MB + mem_size: + required: false + type: scalar-unit.size + constraints: + - greater_or_equal: 0 MB + + tosca.capabilities.Endpoint: + derived_from: tosca.capabilities.Root + properties: + protocol: + type: string + required: true + default: tcp + port: + type: tosca.datatypes.network.PortDef + required: false + secure: + type: boolean + required: false + default: false + url_path: + type: string + required: false + port_name: + type: string + required: false + network_name: + type: string + required: false + default: PRIVATE + initiator: + type: string + required: false + default: source + constraints: + - valid_values: [source, target, peer] + ports: + type: map + required: false + constraints: + - min_length: 1 + entry_schema: + type: tosca.datatypes.network.PortSpec + attributes: + ip_address: + type: string + + tosca.capabilities.Endpoint.Admin: + derived_from: tosca.capabilities.Endpoint + properties: + secure: + type: boolean + default: true + constraints: + - equal: true + + tosca.capabilities.Endpoint.Public: + derived_from: tosca.capabilities.Endpoint + properties: + # Change the default network_name to use the first public network found + network_name: + type: string + default: PUBLIC + constraints: + - equal: PUBLIC + floating: + description: > + Indicates that the public address should be allocated from a pool of + floating IPs that are associated with the network. + type: boolean + default: false + status: experimental + dns_name: + description: The optional name to register with DNS + type: string + required: false + status: experimental + + tosca.capabilities.Scalable: + derived_from: tosca.capabilities.Root + properties: + min_instances: + type: integer + required: true + default: 1 + description: > + This property is used to indicate the minimum number of instances + that should be created for the associated TOSCA Node Template by + a TOSCA orchestrator. + max_instances: + type: integer + required: true + default: 1 + description: > + This property is used to indicate the maximum number of instances + that should be created for the associated TOSCA Node Template by + a TOSCA orchestrator. + default_instances: + type: integer + required: false + description: > + An optional property that indicates the requested default number + of instances that should be the starting number of instances a + TOSCA orchestrator should attempt to allocate. + The value for this property MUST be in the range between the values + set for min_instances and max_instances properties. + + tosca.capabilities.Endpoint.Database: + derived_from: tosca.capabilities.Endpoint + + tosca.capabilities.Attachment: + derived_from: tosca.capabilities.Root + + tosca.capabilities.network.Linkable: + derived_from: tosca.capabilities.Root + description: > + A node type that includes the Linkable capability indicates that it can + be pointed by tosca.relationships.network.LinksTo relationship type, which + represents an association relationship between Port and Network node types. + + tosca.capabilities.network.Bindable: + derived_from: tosca.capabilities.Root + description: > + A node type that includes the Bindable capability indicates that it can + be pointed by tosca.relationships.network.BindsTo relationship type, which + represents a network association relationship between Port and Compute node + types. + + tosca.capabilities.OperatingSystem: + derived_from: tosca.capabilities.Root + properties: + architecture: + required: false + type: string + description: > + The host Operating System (OS) architecture. + type: + required: false + type: string + description: > + The host Operating System (OS) type. + distribution: + required: false + type: string + description: > + The host Operating System (OS) distribution. Examples of valid values + for an “type” of “Linux” would include: + debian, fedora, rhel and ubuntu. + version: + required: false + type: version + description: > + The host Operating System version. + + tosca.capabilities.Container.Docker: + derived_from: tosca.capabilities.Container + properties: + version: + type: list + required: false + entry_schema: + type: version + description: > + The Docker version capability. + publish_all: + type: boolean + default: false + required: false + description: > + Indicates that all ports (ranges) listed in the dockerfile + using the EXPOSE keyword be published. + publish_ports: + type: list + entry_schema: + type: tosca.datatypes.network.PortSpec + required: false + description: > + List of ports mappings from source (Docker container) + to target (host) ports to publish. + expose_ports: + type: list + entry_schema: + type: tosca.datatypes.network.PortSpec + required: false + description: > + List of ports mappings from source (Docker container) to expose + to other Docker containers (not accessible outside host). + volumes: + type: list + entry_schema: + type: string + required: false + description: > + The dockerfile VOLUME command which is used to enable access + from the Docker container to a directory on the host machine. + host_id: + type: string + required: false + description: > + The optional identifier of an existing host resource + that should be used to run this container on. + volume_id: + type: string + required: false + description: > + The optional identifier of an existing storage volume (resource) + that should be used to create the container's mount point(s) on. + +########################################################################## + # Interfaces Type. + # The Interfaces element describes a list of one or more interface + # definitions for a modelable entity (e.g., a Node or Relationship Type) + # as defined within the TOSCA Simple Profile specification. +########################################################################## +interface_types: + tosca.interfaces.node.lifecycle.Standard: + create: + description: Standard lifecycle create operation. + configure: + description: Standard lifecycle configure operation. + start: + description: Standard lifecycle start operation. + stop: + description: Standard lifecycle stop operation. + delete: + description: Standard lifecycle delete operation. + + tosca.interfaces.relationship.Configure: + pre_configure_source: + description: Operation to pre-configure the source endpoint. + pre_configure_target: + description: Operation to pre-configure the target endpoint. + post_configure_source: + description: Operation to post-configure the source endpoint. + post_configure_target: + description: Operation to post-configure the target endpoint. + add_target: + description: Operation to add a target node. + remove_target: + description: Operation to remove a target node. + add_source: > + description: Operation to notify the target node of a source node which + is now available via a relationship. + description: + target_changed: > + description: Operation to notify source some property or attribute of the + target changed + +########################################################################## + # Data Type. + # A Datatype is a complex data type declaration which contains other + # complex or simple data types. +########################################################################## +data_types: + tosca.datatypes.Root: + description: > + The TOSCA root Data Type all other TOSCA base Data Types derive from + + tosca.datatypes.network.NetworkInfo: + derived_from: tosca.datatypes.Root + properties: + network_name: + type: string + network_id: + type: string + addresses: + type: list + entry_schema: + type: string + + tosca.datatypes.network.PortInfo: + derived_from: tosca.datatypes.Root + properties: + port_name: + type: string + port_id: + type: string + network_id: + type: string + mac_address: + type: string + addresses: + type: list + entry_schema: + type: string + + tosca.datatypes.network.PortDef: + derived_from: tosca.datatypes.Root + type: integer + constraints: + - in_range: [ 1, 65535 ] + + tosca.datatypes.network.PortSpec: + derived_from: tosca.datatypes.Root + properties: + protocol: + type: string + required: true + default: tcp + constraints: + - valid_values: [ udp, tcp, igmp ] + target: + type: tosca.datatypes.network.PortDef + required: false + target_range: + type: range + required: false + constraints: + - in_range: [ 1, 65535 ] + source: + type: tosca.datatypes.network.PortDef + required: false + source_range: + type: range + required: false + constraints: + - in_range: [ 1, 65535 ] + + tosca.datatypes.Credential: + derived_from: tosca.datatypes.Root + properties: + protocol: + type: string + required: false + token_type: + type: string + default: password + required: true + token: + type: string + required: true + keys: + type: map + entry_schema: + type: string + required: false + user: + type: string + required: false + +########################################################################## + # Artifact Type. + # An Artifact Type is a reusable entity that defines the type of one or more + # files which Node Types or Node Templates can have dependent relationships + # and used during operations such as during installation or deployment. +########################################################################## +artifact_types: + tosca.artifacts.Root: + description: > + The TOSCA Artifact Type all other TOSCA Artifact Types derive from + properties: + version: version + + tosca.artifacts.File: + derived_from: tosca.artifacts.Root + + tosca.artifacts.Deployment: + derived_from: tosca.artifacts.Root + description: TOSCA base type for deployment artifacts + + tosca.artifacts.Deployment.Image: + derived_from: tosca.artifacts.Deployment + + tosca.artifacts.Deployment.Image.VM: + derived_from: tosca.artifacts.Deployment.Image + + tosca.artifacts.Implementation: + derived_from: tosca.artifacts.Root + description: TOSCA base type for implementation artifacts + + tosca.artifacts.Implementation.Bash: + derived_from: tosca.artifacts.Implementation + description: Script artifact for the Unix Bash shell + mime_type: application/x-sh + file_ext: [ sh ] + + tosca.artifacts.Implementation.Python: + derived_from: tosca.artifacts.Implementation + description: Artifact for the interpreted Python language + mime_type: application/x-python + file_ext: [ py ] + + tosca.artifacts.Deployment.Image.Container.Docker: + derived_from: tosca.artifacts.Deployment.Image + description: Docker container image + + tosca.artifacts.Deployment.Image.VM.ISO: + derived_from: tosca.artifacts.Deployment.Image + description: Virtual Machine (VM) image in ISO disk format + mime_type: application/octet-stream + file_ext: [ iso ] + + tosca.artifacts.Deployment.Image.VM.QCOW2: + derived_from: tosca.artifacts.Deployment.Image + description: Virtual Machine (VM) image in QCOW v2 standard disk format + mime_type: application/octet-stream + file_ext: [ qcow2 ] + +########################################################################## + # Policy Type. + # TOSCA Policy Types represent logical grouping of TOSCA nodes that have + # an implied relationship and need to be orchestrated or managed together + # to achieve some result. +########################################################################## +policy_types: + tosca.policies.Root: + description: The TOSCA Policy Type all other TOSCA Policy Types derive from. + + tosca.policies.Placement: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern + placement of TOSCA nodes or groups of nodes. + + tosca.policies.Scaling: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern + scaling of TOSCA nodes or groups of nodes. + + tosca.policies.Monitoring: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern + monitoring of TOSCA nodes or groups of nodes. + + tosca.policies.Update: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern + update of TOSCA nodes or groups of nodes. + + tosca.policies.Performance: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to declare + performance requirements for TOSCA nodes or groups of nodes. + + onap.policies.Monitoring: + derived_from: tosca.policies.Root + description: The ONAP Policy Type definition for DCAE uS component monitoring policies. + +########################################################################## + # Group Type. + # Group Type represents logical grouping of TOSCA nodes that have an + # implied membership relationship and may need to be orchestrated or + # managed together to achieve some result. +########################################################################## +group_types: + tosca.groups.Root: + description: The TOSCA Group Type all other TOSCA Group Types derive from + interfaces: + Standard: + type: tosca.interfaces.node.lifecycle.Standard diff --git a/jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py b/jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py new file mode 100644 index 0000000..a5bda4a --- /dev/null +++ b/jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py @@ -0,0 +1,19 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# VERSION and DEFS_FILE are required for all extensions + +VERSION = 'tosca_simple_yaml_1_0_0' + +DEFS_FILE = "TOSCA_simple_yaml_definition_1_0_0.yaml" + +SECTIONS = ('metadata') diff --git a/jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml b/jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml new file mode 100644 index 0000000..c645e27 --- /dev/null +++ b/jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml @@ -0,0 +1,240 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +########################################################################## +# The content of this file reflects TOSCA NFV Profile in YAML version +# 1.0.0. It describes the definition for TOSCA NFV types including Node Type, +# Relationship Type, CapabilityAssignment Type and Interfaces. +########################################################################## +tosca_definitions_version: tosca_simple_yaml_1_0_0 + +########################################################################## +# Node Type. +# A Node Type is a reusable entity that defines the type of one or more +# Node Templates. +########################################################################## +node_types: + tosca.nodes.nfv.VNF: + derived_from: tosca.nodes.Root # Or should this be its own top - level type? + properties: + id: + type: string + description: ID of this VNF + vendor: + type: string + description: name of the vendor who generate this VNF + version: + type: version + description: version of the software for this VNF + requirements: + - virtualLink: + capability: tosca.capabilities.nfv.VirtualLinkable + relationship: tosca.relationships.nfv.VirtualLinksTo + node: tosca.nodes.nfv.VL + + tosca.nodes.nfv.VDU: + derived_from: tosca.nodes.Compute + capabilities: + high_availability: + type: tosca.capabilities.nfv.HA + virtualbinding: + type: tosca.capabilities.nfv.VirtualBindable + monitoring_parameter: + type: tosca.capabilities.nfv.Metric + requirements: + - high_availability: + capability: tosca.capabilities.nfv.HA + relationship: tosca.relationships.nfv.HA + node: tosca.nodes.nfv.VDU + occurrences: [ 0, 1 ] + + tosca.nodes.nfv.CP: + derived_from: tosca.nodes.network.Port + properties: + type: + type: string + required: false + requirements: + - virtualLink: + capability: tosca.capabilities.nfv.VirtualLinkable + relationship: tosca.relationships.nfv.VirtualLinksTo + node: tosca.nodes.nfv.VL + - virtualBinding: + capability: tosca.capabilities.nfv.VirtualBindable + relationship: tosca.relationships.nfv.VirtualBindsTo + node: tosca.nodes.nfv.VDU + attributes: + address: + type: string + + tosca.nodes.nfv.VL: + derived_from: tosca.nodes.network.Network + properties: + vendor: + type: string + required: true + description: name of the vendor who generate this VL + capabilities: + virtual_linkable: + type: tosca.capabilities.nfv.VirtualLinkable + + tosca.nodes.nfv.VL.ELine: + derived_from: tosca.nodes.nfv.VL + capabilities: + virtual_linkable: + occurrences: 2 + + tosca.nodes.nfv.VL.ELAN: + derived_from: tosca.nodes.nfv.VL + + tosca.nodes.nfv.VL.ETree: + derived_from: tosca.nodes.nfv.VL + + tosca.nodes.nfv.FP: + derived_from: tosca.nodes.Root + properties: + policy: + type: string + required: false + description: name of the vendor who generate this VL + requirements: + - forwarder: + capability: tosca.capabilities.nfv.Forwarder + relationship: tosca.relationships.nfv.ForwardsTo + +########################################################################## +# Relationship Type. +# A Relationship Type is a reusable entity that defines the type of one +# or more relationships between Node Types or Node Templates. +########################################################################## + +relationship_types: + tosca.relationships.nfv.VirtualLinksTo: + derived_from: tosca.relationships.network.LinksTo + valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ] + + tosca.relationships.nfv.VirtualBindsTo: + derived_from: tosca.relationships.network.BindsTo + valid_target_types: [ tosca.capabilities.nfv.VirtualBindable ] + + tosca.relationships.nfv.HA: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.nfv.HA ] + + tosca.relationships.nfv.Monitor: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.nfv.Metric ] + + tosca.relationships.nfv.ForwardsTo: + derived_from: tosca.relationships.root + valid_target_types: [ tosca.capabilities.nfv.Forwarder] + +########################################################################## +# CapabilityAssignment Type. +# A CapabilityAssignment Type is a reusable entity that describes a kind of +# capability that a Node Type can declare to expose. +########################################################################## + +capability_types: + tosca.capabilities.nfv.VirtualLinkable: + derived_from: tosca.capabilities.network.Linkable + + tosca.capabilities.nfv.VirtualBindable: + derived_from: tosca.capabilities.network.Bindable + + tosca.capabilities.nfv.HA: + derived_from: tosca.capabilities.Root + valid_source_types: [ tosca.nodes.nfv.VDU ] + + tosca.capabilities.nfv.HA.ActiveActive: + derived_from: tosca.capabilities.nfv.HA + + tosca.capabilities.nfv.HA.ActivePassive: + derived_from: tosca.capabilities.nfv.HA + + tosca.capabilities.nfv.Metric: + derived_from: tosca.capabilities.Root + + tosca.capabilities.nfv.Forwarder: + derived_from: tosca.capabilities.Root + +########################################################################## + # Interfaces Type. + # The Interfaces element describes a list of one or more interface + # definitions for a modelable entity (e.g., a Node or Relationship Type) + # as defined within the TOSCA Simple Profile specification. +########################################################################## + +########################################################################## + # Data Type. + # A Datatype is a complex data type declaration which contains other + # complex or simple data types. +########################################################################## + +########################################################################## + # Artifact Type. + # An Artifact Type is a reusable entity that defines the type of one or more + # files which Node Types or Node Templates can have dependent relationships + # and used during operations such as during installation or deployment. +########################################################################## + +########################################################################## + # Policy Type. + # TOSCA Policy Types represent logical grouping of TOSCA nodes that have + # an implied relationship and need to be orchestrated or managed together + # to achieve some result. +########################################################################## + +########################################################################## + # Group Type + # +########################################################################## +group_types: + tosca.groups.nfv.VNFFG: + derived_from: tosca.groups.Root + + properties: + vendor: + type: string + required: true + description: name of the vendor who generate this VNFFG + + version: + type: string + required: true + description: version of this VNFFG + + number_of_endpoints: + type: integer + required: true + description: count of the external endpoints included in this VNFFG + + dependent_virtual_link: + type: list + entry_schema: + type: string + required: true + description: Reference to a VLD used in this Forwarding Graph + + connection_point: + type: list + entry_schema: + type: string + required: true + description: Reference to Connection Points forming the VNFFG + + constituent_vnfs: + type: list + entry_schema: + type: string + required: true + description: Reference to a list of VNFD used in this VNF Forwarding Graph diff --git a/jtosca/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml b/jtosca/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml new file mode 100644 index 0000000..8b08837 --- /dev/null +++ b/jtosca/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml @@ -0,0 +1,240 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +########################################################################## +# The content of this file reflects TOSCA NFV Profile in YAML version +# 1.0.0. It describes the definition for TOSCA NFV types including Node Type, +# Relationship Type, CapabilityAssignment Type and Interfaces. +########################################################################## +tosca_definitions_version: tosca_simple_profile_for_nfv_1_0_0 + +########################################################################## +# Node Type. +# A Node Type is a reusable entity that defines the type of one or more +# Node Templates. +########################################################################## +node_types: + tosca.nodes.nfv.VNF: + derived_from: tosca.nodes.Root # Or should this be its own top - level type? + properties: + id: + type: string + description: ID of this VNF + vendor: + type: string + description: name of the vendor who generate this VNF + version: + type: version + description: version of the software for this VNF + requirements: + - virtualLink: + capability: tosca.capabilities.nfv.VirtualLinkable + relationship: tosca.relationships.nfv.VirtualLinksTo + node: tosca.nodes.nfv.VL + + tosca.nodes.nfv.VDU: + derived_from: tosca.nodes.Compute + capabilities: + high_availability: + type: tosca.capabilities.nfv.HA + virtualbinding: + type: tosca.capabilities.nfv.VirtualBindable + monitoring_parameter: + type: tosca.capabilities.nfv.Metric + requirements: + - high_availability: + capability: tosca.capabilities.nfv.HA + relationship: tosca.relationships.nfv.HA + node: tosca.nodes.nfv.VDU + occurrences: [ 0, 1 ] + + tosca.nodes.nfv.CP: + derived_from: tosca.nodes.network.Port + properties: + type: + type: string + required: false + requirements: + - virtualLink: + capability: tosca.capabilities.nfv.VirtualLinkable + relationship: tosca.relationships.nfv.VirtualLinksTo + node: tosca.nodes.nfv.VL + - virtualBinding: + capability: tosca.capabilities.nfv.VirtualBindable + relationship: tosca.relationships.nfv.VirtualBindsTo + node: tosca.nodes.nfv.VDU + attributes: + address: + type: string + + tosca.nodes.nfv.VL: + derived_from: tosca.nodes.network.Network + properties: + vendor: + type: string + required: true + description: name of the vendor who generate this VL + capabilities: + virtual_linkable: + type: tosca.capabilities.nfv.VirtualLinkable + + tosca.nodes.nfv.VL.ELine: + derived_from: tosca.nodes.nfv.VL + capabilities: + virtual_linkable: + occurrences: 2 + + tosca.nodes.nfv.VL.ELAN: + derived_from: tosca.nodes.nfv.VL + + tosca.nodes.nfv.VL.ETree: + derived_from: tosca.nodes.nfv.VL + + tosca.nodes.nfv.FP: + derived_from: tosca.nodes.Root + properties: + policy: + type: string + required: false + description: name of the vendor who generate this VL + requirements: + - forwarder: + capability: tosca.capabilities.nfv.Forwarder + relationship: tosca.relationships.nfv.ForwardsTo + +########################################################################## +# Relationship Type. +# A Relationship Type is a reusable entity that defines the type of one +# or more relationships between Node Types or Node Templates. +########################################################################## + +relationship_types: + tosca.relationships.nfv.VirtualLinksTo: + derived_from: tosca.relationships.network.LinksTo + valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ] + + tosca.relationships.nfv.VirtualBindsTo: + derived_from: tosca.relationships.network.BindsTo + valid_target_types: [ tosca.capabilities.nfv.VirtualBindable ] + + tosca.relationships.nfv.HA: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.nfv.HA ] + + tosca.relationships.nfv.Monitor: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.nfv.Metric ] + + tosca.relationships.nfv.ForwardsTo: + derived_from: tosca.relationships.root + valid_target_types: [ tosca.capabilities.nfv.Forwarder] + +########################################################################## +# CapabilityAssignment Type. +# A CapabilityAssignment Type is a reusable entity that describes a kind of +# capability that a Node Type can declare to expose. +########################################################################## + +capability_types: + tosca.capabilities.nfv.VirtualLinkable: + derived_from: tosca.capabilities.network.Linkable + + tosca.capabilities.nfv.VirtualBindable: + derived_from: tosca.capabilities.network.Bindable + + tosca.capabilities.nfv.HA: + derived_from: tosca.capabilities.Root + valid_source_types: [ tosca.nodes.nfv.VDU ] + + tosca.capabilities.nfv.HA.ActiveActive: + derived_from: tosca.capabilities.nfv.HA + + tosca.capabilities.nfv.HA.ActivePassive: + derived_from: tosca.capabilities.nfv.HA + + tosca.capabilities.nfv.Metric: + derived_from: tosca.capabilities.Root + + tosca.capabilities.nfv.Forwarder: + derived_from: tosca.capabilities.Root + +########################################################################## + # Interfaces Type. + # The Interfaces element describes a list of one or more interface + # definitions for a modelable entity (e.g., a Node or Relationship Type) + # as defined within the TOSCA Simple Profile specification. +########################################################################## + +########################################################################## + # Data Type. + # A Datatype is a complex data type declaration which contains other + # complex or simple data types. +########################################################################## + +########################################################################## + # Artifact Type. + # An Artifact Type is a reusable entity that defines the type of one or more + # files which Node Types or Node Templates can have dependent relationships + # and used during operations such as during installation or deployment. +########################################################################## + +########################################################################## + # Policy Type. + # TOSCA Policy Types represent logical grouping of TOSCA nodes that have + # an implied relationship and need to be orchestrated or managed together + # to achieve some result. +########################################################################## + +########################################################################## + # Group Type + # +########################################################################## +group_types: + tosca.groups.nfv.VNFFG: + derived_from: tosca.groups.Root + + properties: + vendor: + type: string + required: true + description: name of the vendor who generate this VNFFG + + version: + type: string + required: true + description: version of this VNFFG + + number_of_endpoints: + type: integer + required: true + description: count of the external endpoints included in this VNFFG + + dependent_virtual_link: + type: list + entry_schema: + type: string + required: true + description: Reference to a VLD used in this Forwarding Graph + + connection_point: + type: list + entry_schema: + type: string + required: true + description: Reference to Connection Points forming the VNFFG + + constituent_vnfs: + type: list + entry_schema: + type: string + required: true + description: Reference to a list of VNFD used in this VNF Forwarding Graph diff --git a/jtosca/src/main/resources/extensions/nfv/nfv.py b/jtosca/src/main/resources/extensions/nfv/nfv.py new file mode 100644 index 0000000..0c7c2b9 --- /dev/null +++ b/jtosca/src/main/resources/extensions/nfv/nfv.py @@ -0,0 +1,19 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +# VERSION and DEFS_FILE are required for all extensions + +VERSION = 'tosca_simple_profile_for_nfv_1_0_0' + +DEFS_FILE = "TOSCA_nfv_definition_1_0.yaml" + +SECTIONS = ('metadata') diff --git a/jtosca/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java b/jtosca/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java new file mode 100644 index 0000000..140a6e9 --- /dev/null +++ b/jtosca/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java @@ -0,0 +1,100 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import com.opencsv.CSVWriter; + +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Scanner; + +//Generate excel file, include all validation issues errors in jtosca +//the error java code, the line number and file name for each error. +public class GetValidationIssues { + + public static CSVWriter fileWriter = null; + public static List data = new ArrayList<>(); + + public static void main(String[] args) { + System.out.println("GetAllValidationIssues - path to project files Directory is " + Arrays.toString(args)); + File jtoscaFiles = new File(args[0] + "\\jtosca\\src\\main\\java\\org\\onap\\sdc\\toscaparser\\api"); + + try { + printFiles(jtoscaFiles); + fileWriter = new CSVWriter(new FileWriter(args[1] + "\\JToscaValidationIssues_" + System.currentTimeMillis() + ".csv"), '\t'); + fileWriter.writeNext(new String[]{"Error Message", "Class Name", "Line No."}, false); + fileWriter.writeAll(data, false); + } catch (IOException e) { + e.printStackTrace(); + } finally { + try { + fileWriter.flush(); + fileWriter.close(); + } catch (IOException e) { + System.out.println("Error while flushing/closing fileWriter !!!"); + e.printStackTrace(); + } + } + } + + private static void printFiles(File dir) { + if (dir != null && dir.exists()) { + for (File file : dir.listFiles()) { + if (file.isDirectory()) + printFiles(file); + else { + Scanner scanner = null; + try { + scanner = new Scanner(file); + + int lineNum = 0; + while (scanner.hasNextLine()) { + String line = scanner.nextLine(); + lineNum++; + if (line.startsWith("/*python")) + break; + + if (!line.trim().startsWith("//") && !line.trim().startsWith("#") && line.contains("ThreadLocalsHolder.getCollector().appendValidationIssue")) { + String errMsg = line.trim(); + if (!errMsg.contains(";")) { + String nextLine = null; + while (scanner.hasNextLine() && (nextLine == null || !nextLine.contains(";"))) { + nextLine = scanner.nextLine(); + errMsg += nextLine.trim(); + } + } + + data.add(new String[]{errMsg, file.getName(), String.valueOf(lineNum)}); + } + } + } catch (IOException e) { + e.printStackTrace(); + } + } + } + } + } +} + diff --git a/jtosca/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java b/jtosca/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java new file mode 100644 index 0000000..5876ac7 --- /dev/null +++ b/jtosca/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java @@ -0,0 +1,309 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (c) 2017 AT&T Intellectual Property. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + * Modifications copyright (c) 2019 Fujitsu Limited. + * ================================================================================ + */ +package org.onap.sdc.toscaparser.api; + +import org.junit.Test; +import org.onap.sdc.toscaparser.api.common.JToscaException; +import org.onap.sdc.toscaparser.api.elements.DataType; +import org.onap.sdc.toscaparser.api.elements.PropertyDef; +import org.onap.sdc.toscaparser.api.elements.constraints.Schema; +import org.onap.sdc.toscaparser.api.parameters.Annotation; +import org.onap.sdc.toscaparser.api.parameters.Input; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.io.File; +import java.util.*; +import java.util.stream.Collectors; + +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.core.IsNull.notNullValue; +import static org.junit.Assert.*; + +public class JToscaImportTest { + + @Test + public void testNoMissingTypeValidationError() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/sdc-onboarding_csar.csar") + .getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List missingTypeErrors = ThreadLocalsHolder.getCollector().getValidationIssueReport().stream() + .filter(s -> s.contains("JE136")).collect(Collectors.toList()); + assertEquals(0, missingTypeErrors.size()); + } + + @Test + public void testNoStackOverFlowError() { + Exception jte = null; + try { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/sdc-onboarding_csar.csar") + .getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + } catch (Exception e) { + jte = e; + } + assertEquals(null, jte); + } + + @Test + public void testNoInvalidImports() throws JToscaException { + List fileNames = new ArrayList<>(); + fileNames.add("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); + fileNames.add("csars/sdc-onboarding_csar.csar"); + fileNames.add("csars/resource-Spgw-csar-ZTE.csar"); + + for (String fileName : fileNames) { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(fileName).getFile(); + File file = new File(fileStr); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List invalidImportErrors = ThreadLocalsHolder.getCollector().getValidationIssueReport().stream() + .filter(s -> s.contains("JE195")).collect(Collectors.toList()); + assertEquals(0, invalidImportErrors.size()); + } + } + + @Test + public void testParseAnnotations() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + + List inputs = toscaTemplate.getInputs(); + assertNotNull(inputs); + assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + + inputs.forEach(Input::parseAnnotations); + assertTrue(!inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + } + + @Test + public void testGetInputsWithAndWithoutAnnotations() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List inputs = toscaTemplate.getInputs(); + assertNotNull(inputs); + assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + + inputs = toscaTemplate.getInputs(true); + assertNotNull(inputs); + validateInputsAnnotations(inputs); + + inputs = toscaTemplate.getInputs(false); + assertNotNull(inputs); + assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); + } + + @Test + public void testGetPropertyNameTest() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(0); + + ArrayList valueList = (ArrayList) nodeTemplate.getPropertyValueFromTemplatesByName("vmxvpfe_sriov41_0_port_vlanfilter"); + assertEquals(4, valueList.size()); + + assertEquals("vPE", (String) nodeTemplate.getPropertyValueFromTemplatesByName("nf_role")); + + assertNull(nodeTemplate.getPropertyValueFromTemplatesByName("test")); + } + + @Test + public void testGetParentNodeTemplateTest() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(0); + //parent of this VF is service (null) + assertNull(nodeTemplate.getParentNodeTemplate()); + List children = nodeTemplate.getSubMappingToscaTemplate().getNodeTemplates(); + assertFalse(children.isEmpty()); + NodeTemplate cVFC = children.get(4); + //parent is the VF above + assertEquals("2017-488_ADIOD-vPE 0", cVFC.getParentNodeTemplate().getName()); + List children1 = cVFC.getSubMappingToscaTemplate().getNodeTemplates(); + assertFalse(children1.isEmpty()); + //parent is the CVFC above + assertEquals(cVFC, children1.get(0).getParentNodeTemplate()); + +/* + + TopologyTemplate tt = nodeTemplate.getOriginComponentTemplate(); + List groups = tt.getGroups(); + List policies = tt.getPolicies(); + + TopologyTemplate tt1 = cVFC.getOriginComponentTemplate(); + groups = tt.getGroups(); + policies = tt.getPolicies(); +*/ + + } + + @Test + public void testNullValueHasNoNullPointerException() throws JToscaException { + + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-JennyVtsbcKarunaSvc-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + List inputs = toscaTemplate.getInputs(); + assertNotNull(inputs); + } + + @Test + public void testGetPolicyMetadata() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + ArrayList policies = toscaTemplate.getPolicies(); + assertNotNull(policies); + assertEquals(1, policies.size()); + assertEquals("org.openecomp.policies.External", policies.get(0).getType()); + assertEquals("adf03496-bf87-43cf-b20a-450e47cb44bd", policies.get(0).getMetaData().getOrDefault("UUID", "").toString()); + assertTrue(policies.get(0).getMetaData().getOrDefault("UUID_test", "").toString().isEmpty()); + } + + @Test + public void testGetPolicyMetadataObj() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar").getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + ArrayList policies = toscaTemplate.getPolicies(); + assertNotNull(policies); + assertEquals(1, policies.size()); + assertEquals("adf03496-bf87-43cf-b20a-450e47cb44bd", policies.get(0).getMetaDataObj().getAllProperties().getOrDefault("UUID", "").toString()); + assertTrue(policies.get(0).getMetaDataObj().getAllProperties().getOrDefault("name_test", "").toString().isEmpty()); + } + + private void validateInputsAnnotations(List inputs) { + List inputsWithAnnotations = inputs.stream().filter(i -> i.getAnnotations() != null) + .collect(Collectors.toList()); + assertTrue(!inputs.isEmpty()); + inputsWithAnnotations.stream().forEach(i -> validateAnnotations(i)); + } + + private void validateAnnotations(Input input) { + assertNotNull(input.getAnnotations()); + assertEquals(input.getAnnotations().size(), 1); + Annotation annotation = input.getAnnotations().get("source"); + assertEquals(annotation.getName(), "source"); + assertEquals(annotation.getType().toLowerCase(), "org.openecomp.annotations.source"); + assertNotNull(annotation.getProperties()); + Optional source_type = annotation.getProperties().stream() + .filter(p -> p.getName().equals("source_type")).findFirst(); + assertTrue(source_type.isPresent()); + assertEquals(source_type.get().getValue(), "HEAT"); + } + + private static final String TEST_DATATYPE_FILENAME = "csars/dataTypes-test-service.csar"; + private static final String TEST_DATATYPE_TEST1 = "TestType1"; + private static final String TEST_DATATYPE_TEST2 = "TestType2"; + private static final String TEST_DATATYPE_PROPERTY_STR = "strdata"; + private static final String TEST_DATATYPE_PROPERTY_INT = "intdata"; + private static final String TEST_DATATYPE_PROPERTY_LIST = "listdata"; + private static final String TEST_DATATYPE_PROPERTY_TYPE = "type"; + private static final String TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA = "entry_schema"; + private static final String TEST_DATATYPE_TOSTRING = "data_types="; + + @Test + public void testGetDataType() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_DATATYPE_FILENAME).getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + HashSet dataTypes = toscaTemplate.getDataTypes(); + assertThat(dataTypes, notNullValue()); + assertThat(dataTypes.size(), is(2)); + + for (DataType dataType : dataTypes) { + LinkedHashMap properties; + PropertyDef property; + if (dataType.getType().equals(TEST_DATATYPE_TEST1)) { + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_STR); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_STR)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.STRING)); + } + if (dataType.getType().equals(TEST_DATATYPE_TEST2)) { + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_INT); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_INT)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.INTEGER)); + + property = properties.get(TEST_DATATYPE_PROPERTY_LIST); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA), is(TEST_DATATYPE_TEST1)); + + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST1), notNullValue()); + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST2), notNullValue()); + assertThat(toscaTemplate.toString(), containsString(TEST_DATATYPE_TOSTRING)); + } + } + + } + + @Test + public void testGetInputValidate() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_DATATYPE_FILENAME).getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + HashSet dataTypes = toscaTemplate.getDataTypes(); + assertThat(dataTypes, notNullValue()); + assertThat(dataTypes.size(), is(2)); + + for (DataType dataType : dataTypes) { + LinkedHashMap properties; + PropertyDef property; + if (dataType.getType().equals(TEST_DATATYPE_TEST1)) { + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_STR); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_STR)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.STRING)); + } + if (dataType.getType().equals(TEST_DATATYPE_TEST2)) { + properties = dataType.getAllProperties(); + property = properties.get(TEST_DATATYPE_PROPERTY_INT); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_INT)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.INTEGER)); + + property = properties.get(TEST_DATATYPE_PROPERTY_LIST); + assertThat(property, notNullValue()); + assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.LIST)); + assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA), is(TEST_DATATYPE_TEST1)); + + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST1), notNullValue()); + assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST2), notNullValue()); + assertThat(toscaTemplate.toString(), containsString(TEST_DATATYPE_TOSTRING)); + } + } + } +} diff --git a/jtosca/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java b/jtosca/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java new file mode 100644 index 0000000..2ec41b2 --- /dev/null +++ b/jtosca/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java @@ -0,0 +1,127 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.hasSize; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import java.io.File; +import java.net.URL; +import java.util.ArrayList; +import java.util.Collection; +import java.util.LinkedHashMap; + +import java.util.Map; +import org.junit.Test; +import org.onap.sdc.toscaparser.api.common.JToscaException; +import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; +import org.onap.sdc.toscaparser.api.utils.JToscaErrorCodes; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +public class JToscaMetadataParse { + + @Test + public void testMetadataParsedCorrectly() throws JToscaException { + final File file = loadCsar("csars/csar_hello_world.csar"); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + LinkedHashMap metadataProperties = toscaTemplate.getMetaProperties("TOSCA.meta"); + assertNotNull(metadataProperties); + Object entryDefinition = metadataProperties.get("Entry-Definitions"); + assertNotNull(entryDefinition); + assertEquals("tosca_helloworld.yaml", entryDefinition); + } + + @Test + public void noWarningsAfterParse() throws JToscaException { + final File file = loadCsar("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); + assertTrue(validationIssuesCaught == 0); + } + + @Test + public void requiredInputErrorsAfterParse() throws JToscaException { + final File file = loadCsar("csars/tmpCSAR_Huawei_vSPGW_without_required_inputs.csar"); + new ToscaTemplate(file.getAbsolutePath(), null, true, null); + + final Map validationIssues = ThreadLocalsHolder.getCollector() + .getValidationIssues(); + final Collection actualValidationIssueList = validationIssues.values(); + + final Collection expectedValidationIssueList = new ArrayList<>(); + final String errorCode = "JE003"; + final String errorFormat = "MissingRequiredFieldError: The required input \"%s\" was not provided"; + expectedValidationIssueList.add(new JToscaValidationIssue(errorCode + , String.format(errorFormat, "nf_naming_code"))); + expectedValidationIssueList.add(new JToscaValidationIssue(errorCode + , String.format(errorFormat, "nf_type"))); + expectedValidationIssueList.add(new JToscaValidationIssue(errorCode + , String.format(errorFormat, "nf_role"))); + expectedValidationIssueList.add(new JToscaValidationIssue(errorCode + , String.format(errorFormat, "min_instances"))); + expectedValidationIssueList.add(new JToscaValidationIssue(errorCode + , String.format(errorFormat, "max_instances"))); + expectedValidationIssueList.add(new JToscaValidationIssue(errorCode + , String.format(errorFormat, "nf_function"))); + + assertThat("The actual and the expected validation issue lists should have the same size" + , actualValidationIssueList, hasSize(expectedValidationIssueList.size()) + ); + + assertThat("The actual and the expected validation issue lists should be the same" + , actualValidationIssueList, containsInAnyOrder(expectedValidationIssueList.toArray(new JToscaValidationIssue[0])) + ); + } + + @Test + public void testEmptyCsar() throws JToscaException { + final File file = loadCsar("csars/emptyCsar.csar"); + try { + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + } catch (JToscaException e) { + assertTrue(e.getCode().equals(JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue())); + } + int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); + assertTrue(validationIssuesCaught == 0); + } + + @Test + public void testEmptyPath() throws JToscaException { + String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("").getFile(); + File file = new File(fileStr); + try { + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + } catch (JToscaException e) { + assertTrue(e.getCode().equals(JToscaErrorCodes.PATH_NOT_VALID.getValue())); + } + } + + private File loadCsar(final String csarFilePath) { + final URL resourceUrl = JToscaMetadataParse.class.getClassLoader().getResource(csarFilePath); + assertNotNull(String.format("Could not load CSAR file '%s'", csarFilePath), resourceUrl); + + return new File(resourceUrl.getFile()); + } +} diff --git a/jtosca/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java b/jtosca/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java new file mode 100644 index 0000000..fd84d6e --- /dev/null +++ b/jtosca/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java @@ -0,0 +1,167 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import org.junit.BeforeClass; +import org.junit.Test; +import org.onap.sdc.toscaparser.api.JToscaImportTest; +import org.onap.sdc.toscaparser.api.NodeTemplate; +import org.onap.sdc.toscaparser.api.Property; +import org.onap.sdc.toscaparser.api.ToscaTemplate; +import org.onap.sdc.toscaparser.api.common.JToscaException; + +import java.io.File; +import java.net.URL; +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +public class CalculatePropertyByPathTest { + private static ToscaTemplate toscaTemplate; + + @BeforeClass + public static void setUpClass() throws JToscaException { + URL scarUrl = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar"); + if (scarUrl != null) { + File file = new File(scarUrl.getFile()); + toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); + } + + } + + @Test + public void testGetPropertyWhenPropertyHasListOfDataTypesAndPathIsNotEmpty() throws JToscaException { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(0); //testVM_testVM_SRIOVtrunk1_port + + Property property = cp.getProperties().get("related_networks"); + List propertyValueList = property.getLeafPropertyValue("related_network_role"); + assertEquals(3, propertyValueList.size()); + assertTrue(propertyValueList.contains("cor_direct_2")); + assertTrue(propertyValueList.contains("sgi_direct_2")); + assertTrue(propertyValueList.contains("int_imbl_2")); + } + + @Test + public void testGetPropertyWhenPropertyHasDataTypeAndPathIsEmpty() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(1); //testVM_testVM_SRIOVNonTrunk0_port + + Property property = cp.getProperties().get("exCP_naming"); + List propertyValueList = property.getLeafPropertyValue(""); + assertTrue(propertyValueList.isEmpty()); + } + + @Test + public void testGetPropertyWhenPropertyHasSimpleTypeAndValueAsGetInputIsNotResolvedCorrectlyAndPathIsEmpty() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(1); //testVM_testVM_SRIOVNonTrunk0_port + + Property property = cp.getProperties().get("network"); + List propertyValueList = property.getLeafPropertyValue(""); + assertTrue(propertyValueList.isEmpty()); + } + + @Test + public void testGetPropertyWhenPropertyHasSimpleTypeAndPathIsEmpty() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(1); //testVM_testVM_SRIOVNonTrunk0_port + + Property property = cp.getProperties().get("subinterface_indicator"); + List propertyValueList = property.getLeafPropertyValue(""); + assertEquals(1, propertyValueList.size()); + assertEquals("false", propertyValueList.get(0)); + } + + + @Test + public void testGetPropertyWhenPropertyHasDataTypeAndPathIsNotEmpty() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(2); //testVM_testVM_OVS_port + + Property property = cp.getProperties().get("ip_requirements"); + List propertyValueList = property.getLeafPropertyValue("ip_version"); + assertEquals(1, propertyValueList.size()); + assertEquals("4", propertyValueList.get(0)); + } + + @Test + public void testGetPropertyWhenPropertyHasListOfDataTypesAndPathIsNull() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(2); //testVM_testVM_OVS_port + + Property property = cp.getProperties().get("ip_requirements"); + assertTrue(property.getLeafPropertyValue(null).isEmpty()); + } + + @Test + public void testGetPropertyWhenPropertyHasListOfDataTypesAndPathIsComplex() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(0); //testVM_testVM_SRIOVtrunk1_port + + Property property = cp.getProperties().get("ip_requirements"); + List propertyValueList = property.getLeafPropertyValue("ip_count_required#is_required"); + assertEquals(1, propertyValueList.size()); + assertEquals("false", propertyValueList.get(0)); + } + + @Test + public void testGetPropertyWhenPropertyHasListOfDataTypesAndPathIsWrong() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(0); //testVM_testVM_SRIOVtrunk1_port + + Property property = cp.getProperties().get("ip_requirements"); + List propertyValueList = property.getLeafPropertyValue("ip_count_required#is_required_1"); + assertEquals(0, propertyValueList.size()); + } + + @Test + public void testGetPropertyWhenPropertyHasDataTypeWithoutSchemaAndComplexPath() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(0); //testVM_testVM_SRIOVtrunk1_port + + Property property = cp.getProperties().get("mac_requirements"); + List propertyValueList = property.getLeafPropertyValue("mac_count_required#is_required"); + assertEquals(1, propertyValueList.size()); + assertEquals("false", propertyValueList.get(0)); + } + + @Test + public void testGetPropertyWhenPropertyHasDataTypeWithoutSchemaAndSimplePath() { + NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 + .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM + .getSubMappingToscaTemplate().getNodeTemplates().get(0); //testVM_testVM_SRIOVtrunk1_port + + Property property = cp.getProperties().get("mac_requirements"); + List propertyValueList = property.getLeafPropertyValue("mac_count_required"); + assertEquals(0, propertyValueList.size()); + } +} diff --git a/jtosca/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java b/jtosca/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java new file mode 100644 index 0000000..d65de28 --- /dev/null +++ b/jtosca/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java @@ -0,0 +1,75 @@ +/*- + * ============LICENSE_START======================================================= + * SDC + * ================================================================================ + * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ + +package org.onap.sdc.toscaparser.api.elements; + +import org.junit.After; +import org.junit.Test; + +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.Map; + +import static org.junit.Assert.assertEquals; + +public class EntityTypeTest { + + private static final Map origMap = EntityType.TOSCA_DEF; + + @Test + public void testUpdateDefinitions() throws Exception { + + Map testData = new HashMap<>(); + testData.put("tosca.nodes.nfv.VNF", "{derived_from=tosca.nodes.Root, properties={id={type=string, description=ID of this VNF}, vendor={type=string, description=name of the vendor who generate this VNF}, version={type=version, description=version of the software for this VNF}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}]}"); + testData.put("tosca.nodes.nfv.VDU", "{derived_from=tosca.nodes.Compute, capabilities={high_availability={type=tosca.capabilities.nfv.HA}, virtualbinding={type=tosca.capabilities.nfv.VirtualBindable}, monitoring_parameter={type=tosca.capabilities.nfv.Metric}}, requirements=[{high_availability={capability=tosca.capabilities.nfv.HA, relationship=tosca.relationships.nfv.HA, node=tosca.nodes.nfv.VDU, occurrences=[0, 1]}}]}"); + testData.put("tosca.nodes.nfv.CP", "{derived_from=tosca.nodes.network.Port, properties={type={type=string, required=false}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}, {virtualBinding={capability=tosca.capabilities.nfv.VirtualBindable, relationship=tosca.relationships.nfv.VirtualBindsTo, node=tosca.nodes.nfv.VDU}}], attributes={address={type=string}}}"); + testData.put("tosca.nodes.nfv.VL", "{derived_from=tosca.nodes.network.Network, properties={vendor={type=string, required=true, description=name of the vendor who generate this VL}}, capabilities={virtual_linkable={type=tosca.capabilities.nfv.VirtualLinkable}}}"); + testData.put("tosca.nodes.nfv.VL.ELine", "{derived_from=tosca.nodes.nfv.VL, capabilities={virtual_linkable={occurrences=2}}}"); + testData.put("tosca.nodes.nfv.VL.ELAN", "{derived_from=tosca.nodes.nfv.VL}"); + testData.put("tosca.nodes.nfv.VL.ETree", "{derived_from=tosca.nodes.nfv.VL}"); + testData.put("tosca.nodes.nfv.FP", "{derived_from=tosca.nodes.Root, properties={policy={type=string, required=false, description=name of the vendor who generate this VL}}, requirements=[{forwarder={capability=tosca.capabilities.nfv.Forwarder, relationship=tosca.relationships.nfv.ForwardsTo}}]}"); + testData.put("tosca.groups.nfv.VNFFG", "{derived_from=tosca.groups.Root, properties={vendor={type=string, required=true, description=name of the vendor who generate this VNFFG}, version={type=string, required=true, description=version of this VNFFG}, number_of_endpoints={type=integer, required=true, description=count of the external endpoints included in this VNFFG}, dependent_virtual_link={type=list, entry_schema={type=string}, required=true, description=Reference to a VLD used in this Forwarding Graph}, connection_point={type=list, entry_schema={type=string}, required=true, description=Reference to Connection Points forming the VNFFG}, constituent_vnfs={type=list, entry_schema={type=string}, required=true, description=Reference to a list of VNFD used in this VNF Forwarding Graph}}}"); + testData.put("tosca.relationships.nfv.VirtualLinksTo", "{derived_from=tosca.relationships.network.LinksTo, valid_target_types=[tosca.capabilities.nfv.VirtualLinkable]}"); + testData.put("tosca.relationships.nfv.VirtualBindsTo", "{derived_from=tosca.relationships.network.BindsTo, valid_target_types=[tosca.capabilities.nfv.VirtualBindable]}"); + testData.put("tosca.relationships.nfv.HA", "{derived_from=tosca.relationships.Root, valid_target_types=[tosca.capabilities.nfv.HA]}"); + testData.put("tosca.relationships.nfv.Monitor", "{derived_from=tosca.relationships.ConnectsTo, valid_target_types=[tosca.capabilities.nfv.Metric]}"); + testData.put("tosca.relationships.nfv.ForwardsTo", "{derived_from=tosca.relationships.root, valid_target_types=[tosca.capabilities.nfv.Forwarder]}"); + testData.put("tosca.capabilities.nfv.VirtualLinkable", "{derived_from=tosca.capabilities.network.Linkable}"); + testData.put("tosca.capabilities.nfv.VirtualBindable", "{derived_from=tosca.capabilities.network.Bindable}"); + testData.put("tosca.capabilities.nfv.HA", "{derived_from=tosca.capabilities.Root, valid_source_types=[tosca.nodes.nfv.VDU]}"); + testData.put("tosca.capabilities.nfv.HA.ActiveActive", "{derived_from=tosca.capabilities.nfv.HA}"); + testData.put("tosca.capabilities.nfv.HA.ActivePassive", "{derived_from=tosca.capabilities.nfv.HA}"); + testData.put("tosca.capabilities.nfv.Metric", "{derived_from=tosca.capabilities.Root}"); + testData.put("tosca.capabilities.nfv.Forwarder", "{derived_from=tosca.capabilities.Root}"); + + Map expectedDefMap = origMap; + expectedDefMap.putAll(testData); + EntityType.updateDefinitions("tosca_simple_profile_for_nfv_1_0_0"); + + assertEquals(expectedDefMap, EntityType.TOSCA_DEF); + + } + + @After + public void tearDown() throws Exception { + EntityType.TOSCA_DEF = (LinkedHashMap) origMap; + } + +} diff --git a/jtosca/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java b/jtosca/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java new file mode 100644 index 0000000..98e5102 --- /dev/null +++ b/jtosca/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java @@ -0,0 +1,96 @@ +/*- + * ============LICENSE_START======================================================= + * Copyright (c) 2019 Fujitsu Limited. + * ================================================================================ + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============LICENSE_END========================================================= + */ +package org.onap.sdc.toscaparser.api.functions; + +import org.junit.Test; +import org.onap.sdc.toscaparser.api.*; +import org.onap.sdc.toscaparser.api.common.JToscaException; +import org.onap.sdc.toscaparser.api.elements.constraints.Schema; +import org.onap.sdc.toscaparser.api.parameters.Input; +import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; + +import java.io.File; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.junit.Assert.*; + +public class GetInputTest { + + private static final String TEST_FILENAME = "csars/listed_input.csar"; + private static final String TEST_FILENAME_NG = "csars/listed_input_ng.csar"; + private static final String TEST_PROPERTY_ROLE = "role"; + private static final String TEST_PROPERTY_LONGITUDE = "longitude"; + private static final String TEST_DEFAULT_VALUE = "dsvpn-hub"; + private static final String TEST_DESCRIPTION_VALUE = "This is used for SDWAN only"; + private static final String TEST_INPUT_TYPE = "type"; + private static final String TEST_INPUT_SCHEMA_TYPE = "tosca.datatypes.siteresource.site"; + private static final String TEST_TOSTRING = "get_input:[sites, 1, longitude]"; + private static final String TEST_INPUT_SITES = "sites"; + + @Test + public void validate() throws JToscaException { + String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_FILENAME).getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null, false); + NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(1).getSubMappingToscaTemplate().getNodeTemplates().get(0); + ArrayList inputs = toscaTemplate.getNodeTemplates().get(1).getSubMappingToscaTemplate().getInputs(); + LinkedHashMap properties = nodeTemplate.getProperties(); + assertThat(properties, notNullValue()); + assertThat(properties.size(), is(14)); + + Property property = properties.get(TEST_PROPERTY_ROLE); + assertThat(properties, notNullValue()); + assertThat(property.getName(), is(TEST_PROPERTY_ROLE)); + assertThat(property.getType(), is(Schema.STRING)); + assertThat(property.getDefault(), is(TEST_DEFAULT_VALUE)); + assertThat(property.getDescription(), is(TEST_DESCRIPTION_VALUE)); + GetInput getInput = (GetInput) property.getValue(); + assertThat(getInput.getEntrySchema().get(TEST_INPUT_TYPE).toString(), is(TEST_INPUT_SCHEMA_TYPE)); + + property = properties.get(TEST_PROPERTY_LONGITUDE); + assertThat(properties, notNullValue()); + assertThat(property.getName(), is(TEST_PROPERTY_LONGITUDE)); + assertThat(property.getValue().toString(), is(TEST_TOSTRING)); + getInput = (GetInput) property.getValue(); + ArrayList getInputArguments = getInput.getArguments(); + assertThat(getInputArguments.size(), is(3)); + assertThat(getInputArguments.get(0).toString(), is(TEST_INPUT_SITES)); + assertThat(getInputArguments.get(1).toString(), is("1")); + assertThat(getInputArguments.get(2).toString(), is(TEST_PROPERTY_LONGITUDE)); + + Input in = inputs.get(10); + assertThat(in.getEntrySchema().get(TEST_INPUT_TYPE), is(TEST_INPUT_SCHEMA_TYPE)); + assertThat(in.getName(), is(TEST_INPUT_SITES)); + assertThat(in.getType(), is(Input.LIST)); + } + + @Test + public void validate_ng() throws JToscaException { + //invalid file + String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_FILENAME_NG).getFile(); + File file = new File(fileStr); + ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null, false); + + List issues = ThreadLocalsHolder.getCollector().getValidationIssueReport(); + assertTrue(issues.stream().anyMatch(x -> x.contains("JE282"))); + } +} diff --git a/jtosca/src/test/resources/csars/csar_hello_world.csar b/jtosca/src/test/resources/csars/csar_hello_world.csar new file mode 100644 index 0000000..43ffbbc Binary files /dev/null and b/jtosca/src/test/resources/csars/csar_hello_world.csar differ diff --git a/jtosca/src/test/resources/csars/dataTypes-test-service.csar b/jtosca/src/test/resources/csars/dataTypes-test-service.csar new file mode 100644 index 0000000..b4de177 Binary files /dev/null and b/jtosca/src/test/resources/csars/dataTypes-test-service.csar differ diff --git a/jtosca/src/test/resources/csars/emptyCsar.csar b/jtosca/src/test/resources/csars/emptyCsar.csar new file mode 100644 index 0000000..15cb0ec Binary files /dev/null and b/jtosca/src/test/resources/csars/emptyCsar.csar differ diff --git a/jtosca/src/test/resources/csars/listed_input.csar b/jtosca/src/test/resources/csars/listed_input.csar new file mode 100644 index 0000000..445b91a Binary files /dev/null and b/jtosca/src/test/resources/csars/listed_input.csar differ diff --git a/jtosca/src/test/resources/csars/listed_input_ng.csar b/jtosca/src/test/resources/csars/listed_input_ng.csar new file mode 100644 index 0000000..6b3402e Binary files /dev/null and b/jtosca/src/test/resources/csars/listed_input_ng.csar differ diff --git a/jtosca/src/test/resources/csars/resource-Spgw-csar-ZTE.csar b/jtosca/src/test/resources/csars/resource-Spgw-csar-ZTE.csar new file mode 100644 index 0000000..58c3ddd Binary files /dev/null and b/jtosca/src/test/resources/csars/resource-Spgw-csar-ZTE.csar differ diff --git a/jtosca/src/test/resources/csars/sdc-onboarding_csar.csar b/jtosca/src/test/resources/csars/sdc-onboarding_csar.csar new file mode 100644 index 0000000..f12605d Binary files /dev/null and b/jtosca/src/test/resources/csars/sdc-onboarding_csar.csar differ diff --git a/jtosca/src/test/resources/csars/service-AdiodVmxVpeBvService-csar.csar b/jtosca/src/test/resources/csars/service-AdiodVmxVpeBvService-csar.csar new file mode 100644 index 0000000..28aa6f4 Binary files /dev/null and b/jtosca/src/test/resources/csars/service-AdiodVmxVpeBvService-csar.csar differ diff --git a/jtosca/src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar b/jtosca/src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar new file mode 100644 index 0000000..ee01780 Binary files /dev/null and b/jtosca/src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar differ diff --git a/jtosca/src/test/resources/csars/service-NetworkCloudVnfServiceMock-csar.csar b/jtosca/src/test/resources/csars/service-NetworkCloudVnfServiceMock-csar.csar new file mode 100644 index 0000000..aabf83c Binary files /dev/null and b/jtosca/src/test/resources/csars/service-NetworkCloudVnfServiceMock-csar.csar differ diff --git a/jtosca/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar b/jtosca/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar new file mode 100644 index 0000000..9dc29c7 Binary files /dev/null and b/jtosca/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar differ diff --git a/jtosca/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_without_required_inputs.csar b/jtosca/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_without_required_inputs.csar new file mode 100644 index 0000000..194fabb Binary files /dev/null and b/jtosca/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_without_required_inputs.csar differ diff --git a/jtosca/version.properties b/jtosca/version.properties new file mode 100644 index 0000000..0f0fb2b --- /dev/null +++ b/jtosca/version.properties @@ -0,0 +1,13 @@ +########################################################### +# Versioning variables +# Note that these variables cannot be structured (e.g. : version.release or version.snapshot etc... ) +# because they are used in Jenkins, whose plug-in doesn't support + +major=1 +minor=6 +patch=0 + +base_version=${major}.${minor}.${patch} + +release_version=${base_version} +snapshot_version=${base_version}-SNAPSHOT diff --git a/pom.xml b/pom.xml deleted file mode 100644 index 8e02299..0000000 --- a/pom.xml +++ /dev/null @@ -1,284 +0,0 @@ - - 4.0.0 - - org.onap.sdc.jtosca - jtosca - 1.6.0-SNAPSHOT - sdc-jtosca - - - org.onap.oparent - oparent - 2.0.0 - - - - - - - - - UTF-8 - - - - 2.1 - - - - - - - ${project.basedir}/target/jacoco.exec - https://nexus.onap.org - /content/sites/site/org/onap/sdc/jtosca/${project.version} - snapshots - releases - - ${project.build.sourceEncoding} - true - ${project.basedir} - . - **/scripts/**/* - **/test/**/*,**/tests/**/* - app/**/*.js,server-mock/**/*.js,src/**/*.js,src/main/**/*.java - ${project.version} - - - - - - - org.yaml - snakeyaml - 1.14 - compile - - - - org.slf4j - slf4j-api - 1.7.25 - - - - - - org.hamcrest - hamcrest - ${hamcrest.version} - test - - - org.hamcrest - hamcrest-library - ${hamcrest.version} - test - - - junit - junit - 4.12 - test - - - - com.opencsv - opencsv - 3.10 - test - - - - - org.apache.commons - commons-io - 1.3.2 - - - - org.reflections - reflections - 0.9.11 - - - com.google.guava - guava - - - - - com.google.guava - guava - compile - 25.1-jre - - - - - - - - - org.apache.maven.plugins - maven-javadoc-plugin - 2.10.4 - - false - org.umlgraph.doclet.UmlGraphDoc - - org.umlgraph - umlgraph - 5.6 - - -views - true - - - - - - - - - maven-checkstyle-plugin - 2.17 - - checkstyle-suppressions.xml - checkstyle.suppressions.file - - - - org.apache.maven.plugins - maven-site-plugin - 3.4 - - - org.apache.maven.wagon - wagon-webdav-jackrabbit - 2.10 - - - - - - org.jacoco - jacoco-maven-plugin - 0.7.8 - - - - prepare-agent - - prepare-agent - - - ${sonar.jacoco.reportPath} - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - 2.5.1 - true - - 1.8 - 1.8 - - - - org.apache.maven.plugins - maven-javadoc-plugin - 2.10.3 - - - - - org.apache.maven.plugins - maven-surefire-plugin - 2.19.1 - - - */* - - - - - org.sonarsource.scanner.maven - sonar-maven-plugin - 3.0.2 - - - com.github.sylvainlaurent.maven - yaml-json-validator-maven-plugin - 1.0.1 - - - validate - validate - - validate - - - - - - src/main/resources/**/*.y*ml - src/test/resources/**/*.y*ml - - - - - src/main/resources/**/*.json - src/test/resources/**/*.json - - - - - - - - - - - - - central - Official Maven repository - http://repo2.maven.org/maven2/ - - - onap-releases - Release Repository - ${nexus.proxy}/content/repositories/releases/ - - - onap-snapshots - Snapshots Repository - ${nexus.proxy}/content/repositories/snapshots/ - - - - - - onap-releases - Release Repository - ${nexus.proxy}/content/repositories/${releases.path}/ - - - onap-snapshots - Snapshot Repository - ${nexus.proxy}/content/repositories/${snapshots.path}/ - - - onap-site - dav:${nexus.proxy}${sitePath} - - - - diff --git a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java deleted file mode 100644 index bb7b47d..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java +++ /dev/null @@ -1,174 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef; -import org.onap.sdc.toscaparser.api.elements.PropertyDef; - -public class CapabilityAssignment { - - private String name; - private LinkedHashMap _properties; - private CapabilityTypeDef _definition; - private LinkedHashMap _customDef; - - public CapabilityAssignment(String cname, - LinkedHashMap cproperties, - CapabilityTypeDef cdefinition, LinkedHashMap customDef) { - name = cname; - _properties = cproperties; - _definition = cdefinition; - _customDef = customDef; - } - - /** - * Get the properties list for capability - * - * @return list of property objects for capability - */ - public ArrayList getPropertiesObjects() { - // Return a list of property objects - ArrayList properties = new ArrayList(); - LinkedHashMap props = _properties; - if (props != null) { - for (Map.Entry me : props.entrySet()) { - String pname = me.getKey(); - Object pvalue = me.getValue(); - - LinkedHashMap propsDef = _definition.getPropertiesDef(); - if (propsDef != null) { - PropertyDef pd = (PropertyDef) propsDef.get(pname); - if (pd != null) { - properties.add(new Property(pname, pvalue, pd.getSchema(), _customDef)); - } - } - } - } - return properties; - } - - /** - * Get the map of properties - * - * @return map of all properties contains dictionary of property name and property object - */ - public LinkedHashMap getProperties() { - // Return a dictionary of property name-object pairs - LinkedHashMap npps = new LinkedHashMap<>(); - for (Property p : getPropertiesObjects()) { - npps.put(p.getName(), p); - } - return npps; - } - - /** - * Get the property value by name - * - * @param pname - the property name for capability - * @return the property value for this name - */ - public Object getPropertyValue(String pname) { - // Return the value of a given property name - LinkedHashMap props = getProperties(); - if (props != null && props.get(pname) != null) { - return props.get(name).getValue(); - } - return null; - } - - /** - * Get the name for capability - * - * @return the name for capability - */ - public String getName() { - return name; - } - - /** - * Get the definition for capability - * - * @return CapabilityTypeDef - contain definition for capability - */ - public CapabilityTypeDef getDefinition() { - return _definition; - } - - /** - * Set the property for capability - * - * @param pname - the property name for capability to set - * @param pvalue - the property valiue for capability to set - */ - public void setProperty(String pname, Object pvalue) { - _properties.put(pname, pvalue); - } - - @Override - public String toString() { - return "CapabilityAssignment{" + - "name='" + name + '\'' + - ", _properties=" + _properties + - ", _definition=" + _definition + - '}'; - } -} - -/*python - -from toscaparser.properties import Property - - -class CapabilityAssignment(object): - '''TOSCA built-in capabilities type.''' - - def __init__(self, name, properties, definition): - self.name = name - self._properties = properties - self.definition = definition - - def get_properties_objects(self): - '''Return a list of property objects.''' - properties = [] - props = self._properties - if props: - for name, value in props.items(): - props_def = self.definition.get_properties_def() - if props_def and name in props_def: - properties.append(Property(name, value, - props_def[name].schema)) - return properties - - def get_properties(self): - '''Return a dictionary of property name-object pairs.''' - return {prop.name: prop - for prop in self.get_properties_objects()} - - def get_property_value(self, name): - '''Return the value of a given property name.''' - props = self.get_properties() - if props and name in props: - return props[name].value -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java b/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java deleted file mode 100644 index 28ada96..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java +++ /dev/null @@ -1,72 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -public class CapabilityAssignments { - - private Map capabilityAssignments; - - public CapabilityAssignments(Map capabilityAssignments) { - this.capabilityAssignments = capabilityAssignments != null ? new HashMap<>(capabilityAssignments) : new HashMap<>(); - } - - /** - * Get all capability assignments for node template.
- * This object can be either the original one, holding all capability assignments for this node template,or a filtered one, holding a filtered subset.
- * - * @return list of capability assignments for the node template.
- * If there are no capability assignments, empty list is returned. - */ - public List getAll() { - return new ArrayList<>(capabilityAssignments.values()); - } - - /** - * Filter capability assignments by capability tosca type. - * - * @param type - The tosca type of capability assignments. - * @return CapabilityAssignments object, containing capability assignments of this type.
- * If no such found, filtering will result in an empty collection. - */ - public CapabilityAssignments getCapabilitiesByType(String type) { - Map capabilityAssignmentsMap = capabilityAssignments.entrySet().stream() - .filter(cap -> cap.getValue().getDefinition().getType().equals(type)).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - - return new CapabilityAssignments(capabilityAssignmentsMap); - } - - /** - * Get capability assignment by capability name. - * - * @param name - The name of capability assignment - * @return capability assignment with this name, or null if no such capability assignment was found. - */ - public CapabilityAssignment getCapabilityByName(String name) { - return capabilityAssignments.get(name); - } - -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java b/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java deleted file mode 100644 index e95fe72..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java +++ /dev/null @@ -1,457 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.elements.DataType; -import org.onap.sdc.toscaparser.api.elements.PortSpec; -import org.onap.sdc.toscaparser.api.elements.PropertyDef; -import org.onap.sdc.toscaparser.api.elements.ScalarUnitFrequency; -import org.onap.sdc.toscaparser.api.elements.ScalarUnitSize; -import org.onap.sdc.toscaparser.api.elements.ScalarUnitTime; -import org.onap.sdc.toscaparser.api.elements.constraints.Constraint; -import org.onap.sdc.toscaparser.api.elements.constraints.Schema; -import org.onap.sdc.toscaparser.api.functions.Function; -import org.onap.sdc.toscaparser.api.utils.TOSCAVersionProperty; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.onap.sdc.toscaparser.api.utils.ValidateUtils; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; - -public class DataEntity { - // A complex data value entity - - private LinkedHashMap customDef; - private DataType dataType; - private LinkedHashMap schema; - private Object value; - private String propertyName; - - public DataEntity(String _dataTypeName, Object _valueDict, - LinkedHashMap _customDef, String _propName) { - - customDef = _customDef; - dataType = new DataType(_dataTypeName, _customDef); - schema = dataType.getAllProperties(); - value = _valueDict; - propertyName = _propName; - } - - @SuppressWarnings("unchecked") - public Object validate() { - // Validate the value by the definition of the datatype - - // A datatype can not have both 'type' and 'properties' definitions. - // If the datatype has 'type' definition - if (dataType.getValueType() != null) { - value = DataEntity.validateDatatype(dataType.getValueType(), value, null, customDef, null); - Schema schemaCls = new Schema(propertyName, dataType.getDefs()); - for (Constraint constraint : schemaCls.getConstraints()) { - constraint.validate(value); - } - } - // If the datatype has 'properties' definition - else { - if (!(value instanceof LinkedHashMap)) { - //ERROR under investigation - String checkedVal = value != null ? value.toString() : null; - - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE001", String.format( - "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"", - checkedVal, dataType.getType()))); - - if (value instanceof List && ((List) value).size() > 0) { - value = ((List) value).get(0); - } - - if (!(value instanceof LinkedHashMap)) { - return value; - } - } - - - LinkedHashMap valueDict = (LinkedHashMap) value; - ArrayList allowedProps = new ArrayList<>(); - ArrayList requiredProps = new ArrayList<>(); - LinkedHashMap defaultProps = new LinkedHashMap<>(); - if (schema != null) { - allowedProps.addAll(schema.keySet()); - for (String name : schema.keySet()) { - PropertyDef propDef = schema.get(name); - if (propDef.isRequired()) { - requiredProps.add(name); - } - if (propDef.getDefault() != null) { - defaultProps.put(name, propDef.getDefault()); - } - } - } - - // check allowed field - for (String valueKey : valueDict.keySet()) { - //1710 devlop JSON validation - if (!("json").equals(dataType.getType()) && !allowedProps.contains(valueKey)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE100", String.format( - "UnknownFieldError: Data value of type \"%s\" contains unknown field \"%s\"", - dataType.getType(), valueKey))); - } - } - - // check default field - for (String defKey : defaultProps.keySet()) { - Object defValue = defaultProps.get(defKey); - if (valueDict.get(defKey) == null) { - valueDict.put(defKey, defValue); - } - - } - - // check missing field - ArrayList missingProp = new ArrayList<>(); - for (String reqKey : requiredProps) { - if (!valueDict.keySet().contains(reqKey)) { - missingProp.add(reqKey); - } - } - if (missingProp.size() > 0) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", String.format( - "MissingRequiredFieldError: Data value of type \"%s\" is missing required field(s) \"%s\"", - dataType.getType(), missingProp.toString()))); - } - - // check every field - for (String vname : valueDict.keySet()) { - Object vvalue = valueDict.get(vname); - LinkedHashMap schemaName = _findSchema(vname); - if (schemaName == null) { - continue; - } - Schema propSchema = new Schema(vname, schemaName); - // check if field value meets type defined - DataEntity.validateDatatype(propSchema.getType(), - vvalue, - propSchema.getEntrySchema(), - customDef, - null); - - // check if field value meets constraints defined - if (propSchema.getConstraints() != null) { - for (Constraint constraint : propSchema.getConstraints()) { - if (vvalue instanceof ArrayList) { - for (Object val : (ArrayList) vvalue) { - constraint.validate(val); - } - } else { - constraint.validate(vvalue); - } - } - } - } - } - return value; - } - - private LinkedHashMap _findSchema(String name) { - if (schema != null && schema.get(name) != null) { - return schema.get(name).getSchema(); - } - return null; - } - - public static Object validateDatatype(String type, - Object value, - LinkedHashMap entrySchema, - LinkedHashMap customDef, - String propName) { - // Validate value with given type - - // If type is list or map, validate its entry by entry_schema(if defined) - // If type is a user-defined complex datatype, custom_def is required. - - if (Function.isFunction(value)) { - return value; - } else if (type == null) { - //NOT ANALYZED - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE002", String.format( - "MissingType: Type is missing for value \"%s\"", - value.toString()))); - return value; - } else if (type.equals(Schema.STRING)) { - return ValidateUtils.validateString(value); - } else if (type.equals(Schema.INTEGER)) { - return ValidateUtils.validateInteger(value); - } else if (type.equals(Schema.FLOAT)) { - return ValidateUtils.validateFloat(value); - } else if (type.equals(Schema.NUMBER)) { - return ValidateUtils.validateNumeric(value); - } else if (type.equals(Schema.BOOLEAN)) { - return ValidateUtils.validateBoolean(value); - } else if (type.equals(Schema.RANGE)) { - return ValidateUtils.validateRange(value); - } else if (type.equals(Schema.TIMESTAMP)) { - ValidateUtils.validateTimestamp(value); - return value; - } else if (type.equals(Schema.LIST)) { - ValidateUtils.validateList(value); - if (entrySchema != null) { - DataEntity.validateEntry(value, entrySchema, customDef); - } - return value; - } else if (type.equals(Schema.SCALAR_UNIT_SIZE)) { - return (new ScalarUnitSize(value)).validateScalarUnit(); - } else if (type.equals(Schema.SCALAR_UNIT_FREQUENCY)) { - return (new ScalarUnitFrequency(value)).validateScalarUnit(); - } else if (type.equals(Schema.SCALAR_UNIT_TIME)) { - return (new ScalarUnitTime(value)).validateScalarUnit(); - } else if (type.equals(Schema.VERSION)) { - return (new TOSCAVersionProperty(value.toString())).getVersion(); - } else if (type.equals(Schema.MAP)) { - ValidateUtils.validateMap(value); - if (entrySchema != null) { - DataEntity.validateEntry(value, entrySchema, customDef); - } - return value; - } else if (type.equals(Schema.PORTSPEC)) { - // tODO(TBD) bug 1567063, validate source & target as PortDef type - // as complex types not just as integers - PortSpec.validateAdditionalReq(value, propName, customDef); - } else { - DataEntity data = new DataEntity(type, value, customDef, null); - return data.validate(); - } - - return value; - } - - @SuppressWarnings("unchecked") - public static Object validateEntry(Object value, - LinkedHashMap entrySchema, - LinkedHashMap customDef) { - - // Validate entries for map and list - Schema schema = new Schema(null, entrySchema); - Object valueob = value; - ArrayList valueList = null; - if (valueob instanceof LinkedHashMap) { - valueList = new ArrayList(((LinkedHashMap) valueob).values()); - } else if (valueob instanceof ArrayList) { - valueList = (ArrayList) valueob; - } - if (valueList != null) { - for (Object v : valueList) { - DataEntity.validateDatatype(schema.getType(), v, schema.getEntrySchema(), customDef, null); - if (schema.getConstraints() != null) { - for (Constraint constraint : schema.getConstraints()) { - constraint.validate(v); - } - } - } - } - return value; - } - - @Override - public String toString() { - return "DataEntity{" + - "customDef=" + customDef + - ", dataType=" + dataType + - ", schema=" + schema + - ", value=" + value + - ", propertyName='" + propertyName + '\'' + - '}'; - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import MissingRequiredFieldError -from toscaparser.common.exception import TypeMismatchError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.elements.constraints import Schema -from toscaparser.elements.datatype import DataType -from toscaparser.elements.portspectype import PortSpec -from toscaparser.elements.scalarunit import ScalarUnit_Frequency -from toscaparser.elements.scalarunit import ScalarUnit_Size -from toscaparser.elements.scalarunit import ScalarUnit_Time -from toscaparser.utils.gettextutils import _ -from toscaparser.utils import validateutils - - -class DataEntity(object): - '''A complex data value entity.''' - - def __init__(self, datatypename, value_dict, custom_def=None, - prop_name=None): - self.custom_def = custom_def - self.datatype = DataType(datatypename, custom_def) - self.schema = self.datatype.get_all_properties() - self.value = value_dict - self.property_name = prop_name - - def validate(self): - '''Validate the value by the definition of the datatype.''' - - # A datatype can not have both 'type' and 'properties' definitions. - # If the datatype has 'type' definition - if self.datatype.value_type: - self.value = DataEntity.validate_datatype(self.datatype.value_type, - self.value, - None, - self.custom_def) - schema = Schema(self.property_name, self.datatype.defs) - for constraint in schema.constraints: - constraint.validate(self.value) - # If the datatype has 'properties' definition - else: - if not isinstance(self.value, dict): - ValidationIssueCollector.appendException( - TypeMismatchError(what=self.value, - type=self.datatype.type)) - allowed_props = [] - required_props = [] - default_props = {} - if self.schema: - allowed_props = self.schema.keys() - for name, prop_def in self.schema.items(): - if prop_def.required: - required_props.append(name) - if prop_def.default: - default_props[name] = prop_def.default - - # check allowed field - for value_key in list(self.value.keys()): - if value_key not in allowed_props: - ValidationIssueCollector.appendException( - UnknownFieldError(what=(_('Data value of type "%s"') - % self.datatype.type), - field=value_key)) - - # check default field - for def_key, def_value in list(default_props.items()): - if def_key not in list(self.value.keys()): - self.value[def_key] = def_value - - # check missing field - missingprop = [] - for req_key in required_props: - if req_key not in list(self.value.keys()): - missingprop.append(req_key) - if missingprop: - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what=(_('Data value of type "%s"') - % self.datatype.type), required=missingprop)) - - # check every field - for name, value in list(self.value.items()): - schema_name = self._find_schema(name) - if not schema_name: - continue - prop_schema = Schema(name, schema_name) - # check if field value meets type defined - DataEntity.validate_datatype(prop_schema.type, value, - prop_schema.entry_schema, - self.custom_def) - # check if field value meets constraints defined - if prop_schema.constraints: - for constraint in prop_schema.constraints: - if isinstance(value, list): - for val in value: - constraint.validate(val) - else: - constraint.validate(value) - - return self.value - - def _find_schema(self, name): - if self.schema and name in self.schema.keys(): - return self.schema[name].schema - - @staticmethod - def validate_datatype(type, value, entry_schema=None, custom_def=None, - prop_name=None): - '''Validate value with given type. - - If type is list or map, validate its entry by entry_schema(if defined) - If type is a user-defined complex datatype, custom_def is required. - ''' - from toscaparser.functions import is_function - if is_function(value): - return value - if type == Schema.STRING: - return validateutils.validate_string(value) - elif type == Schema.INTEGER: - return validateutils.validate_integer(value) - elif type == Schema.FLOAT: - return validateutils.validate_float(value) - elif type == Schema.NUMBER: - return validateutils.validate_numeric(value) - elif type == Schema.BOOLEAN: - return validateutils.validate_boolean(value) - elif type == Schema.RANGE: - return validateutils.validate_range(value) - elif type == Schema.TIMESTAMP: - validateutils.validate_timestamp(value) - return value - elif type == Schema.LIST: - validateutils.validate_list(value) - if entry_schema: - DataEntity.validate_entry(value, entry_schema, custom_def) - return value - elif type == Schema.SCALAR_UNIT_SIZE: - return ScalarUnit_Size(value).validate_scalar_unit() - elif type == Schema.SCALAR_UNIT_FREQUENCY: - return ScalarUnit_Frequency(value).validate_scalar_unit() - elif type == Schema.SCALAR_UNIT_TIME: - return ScalarUnit_Time(value).validate_scalar_unit() - elif type == Schema.VERSION: - return validateutils.TOSCAVersionProperty(value).get_version() - elif type == Schema.MAP: - validateutils.validate_map(value) - if entry_schema: - DataEntity.validate_entry(value, entry_schema, custom_def) - return value - elif type == Schema.PORTSPEC: - # tODO(TBD) bug 1567063, validate source & target as PortDef type - # as complex types not just as integers - PortSpec.validate_additional_req(value, prop_name, custom_def) - else: - data = DataEntity(type, value, custom_def) - return data.validate() - - @staticmethod - def validate_entry(value, entry_schema, custom_def=None): - '''Validate entries for map and list.''' - schema = Schema(None, entry_schema) - valuelist = value - if isinstance(value, dict): - valuelist = list(value.values()) - for v in valuelist: - DataEntity.validate_datatype(schema.type, v, schema.entry_schema, - custom_def) - if schema.constraints: - for constraint in schema.constraints: - constraint.validate(v) - return value -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java deleted file mode 100644 index 93bfe2b..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java +++ /dev/null @@ -1,885 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.elements.*; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - - -import javax.annotation.Nullable; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -public abstract class EntityTemplate { - // Base class for TOSCA templates - - protected static final String DERIVED_FROM = "derived_from"; - protected static final String PROPERTIES = "properties"; - protected static final String REQUIREMENTS = "requirements"; - protected static final String INTERFACES = "interfaces"; - protected static final String CAPABILITIES = "capabilities"; - protected static final String TYPE = "type"; - protected static final String DESCRIPTION = "description"; - protected static final String DIRECTIVES = "directives"; - protected static final String ATTRIBUTES = "attributes"; - protected static final String ARTIFACTS = "artifacts"; - protected static final String NODE_FILTER = "node_filter"; - protected static final String COPY = "copy"; - - protected static final String SECTIONS[] = { - DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES, - CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES, - ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY}; - - private static final String NODE = "node"; - private static final String CAPABILITY = "capability"; - private static final String RELATIONSHIP = "relationship"; - private static final String OCCURRENCES = "occurrences"; - - protected static final String REQUIREMENTS_SECTION[] = { - NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER}; - - //# Special key names - private static final String METADATA = "metadata"; - protected static final String SPECIAL_SECTIONS[] = {METADATA}; - - protected String name; - protected LinkedHashMap entityTpl; - protected LinkedHashMap customDef; - protected StatefulEntityType typeDefinition; - private ArrayList _properties; - private ArrayList _interfaces; - private ArrayList _requirements; - private ArrayList _capabilities; - - @Nullable - private NodeTemplate _parentNodeTemplate; - - // dummy constructor for subclasses that don't want super - public EntityTemplate() { - return; - } - - public EntityTemplate(String _name, - LinkedHashMap _template, - String _entityName, - LinkedHashMap _customDef) { - this(_name, _template, _entityName, _customDef, null); - } - - @SuppressWarnings("unchecked") - public EntityTemplate(String _name, - LinkedHashMap _template, - String _entityName, - LinkedHashMap _customDef, - NodeTemplate parentNodeTemplate) { - name = _name; - entityTpl = _template; - customDef = _customDef; - _validateField(entityTpl); - String type = (String) entityTpl.get("type"); - UnsupportedType.validateType(type); - if (_entityName.equals("node_type")) { - if (type != null) { - typeDefinition = new NodeType(type, customDef); - } else { - typeDefinition = null; - } - } - if (_entityName.equals("relationship_type")) { - Object relationship = _template.get("relationship"); - type = null; - if (relationship != null && relationship instanceof LinkedHashMap) { - type = (String) ((LinkedHashMap) relationship).get("type"); - } else if (relationship instanceof String) { - type = (String) entityTpl.get("relationship"); - } else { - type = (String) entityTpl.get("type"); - } - UnsupportedType.validateType(type); - typeDefinition = new RelationshipType(type, null, customDef); - } - if (_entityName.equals("policy_type")) { - if (type == null) { - //msg = (_('Policy definition of "%(pname)s" must have' - // ' a "type" ''attribute.') % dict(pname=name)) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE140", String.format( - "ValidationError: Policy definition of \"%s\" must have a \"type\" attribute", name))); - } - typeDefinition = new PolicyType(type, customDef); - } - if (_entityName.equals("group_type")) { - if (type != null) { - typeDefinition = new GroupType(type, customDef); - } else { - typeDefinition = null; - } - } - _properties = null; - _interfaces = null; - _requirements = null; - _capabilities = null; - _parentNodeTemplate = parentNodeTemplate; - } - - public NodeTemplate getParentNodeTemplate() { - return _parentNodeTemplate; - } - - public String getType() { - if (typeDefinition != null) { - String clType = typeDefinition.getClass().getSimpleName(); - if (clType.equals("NodeType")) { - return (String) ((NodeType) typeDefinition).getType(); - } else if (clType.equals("PolicyType")) { - return (String) ((PolicyType) typeDefinition).getType(); - } else if (clType.equals("GroupType")) { - return (String) ((GroupType) typeDefinition).getType(); - } else if (clType.equals("RelationshipType")) { - return (String) ((RelationshipType) typeDefinition).getType(); - } - } - return null; - } - - public Object getParentType() { - if (typeDefinition != null) { - String clType = typeDefinition.getClass().getSimpleName(); - if (clType.equals("NodeType")) { - return ((NodeType) typeDefinition).getParentType(); - } else if (clType.equals("PolicyType")) { - return ((PolicyType) typeDefinition).getParentType(); - } else if (clType.equals("GroupType")) { - return ((GroupType) typeDefinition).getParentType(); - } else if (clType.equals("RelationshipType")) { - return ((RelationshipType) typeDefinition).getParentType(); - } - } - return null; - } - - @SuppressWarnings("unchecked") - public RequirementAssignments getRequirements() { - if (_requirements == null) { - _requirements = _createRequirements(); - } - return new RequirementAssignments(_requirements); - } - - private ArrayList _createRequirements() { - ArrayList reqs = new ArrayList<>(); - ArrayList> requirements = (ArrayList>) - typeDefinition.getValue(REQUIREMENTS, entityTpl, false); - if (requirements == null) { - requirements = new ArrayList<>(); - } - for (Map req : requirements) { - for (String reqName : req.keySet()) { - Object reqItem = req.get(reqName); - if (reqItem instanceof LinkedHashMap) { - Object rel = ((LinkedHashMap) reqItem).get("relationship"); -// LinkedHashMap relationship = rel instanceof LinkedHashMap ? (LinkedHashMap) rel : null; - String nodeName = ((LinkedHashMap) reqItem).get("node").toString(); - Object capability = ((LinkedHashMap) reqItem).get("capability"); - String capabilityString = capability != null ? capability.toString() : null; - - reqs.add(new RequirementAssignment(reqName, nodeName, capabilityString, rel)); - } else if (reqItem instanceof String) { //short notation - String nodeName = String.valueOf(reqItem); - reqs.add(new RequirementAssignment(reqName, nodeName)); - } - } - } - return reqs; - } - - public ArrayList getPropertiesObjects() { - // Return properties objects for this template - if (_properties == null) { - _properties = _createProperties(); - } - return _properties; - } - - public LinkedHashMap getProperties() { - LinkedHashMap props = new LinkedHashMap<>(); - for (Property po : getPropertiesObjects()) { - props.put(po.getName(), po); - } - return props; - } - - public Object getPropertyValue(String name) { - LinkedHashMap props = getProperties(); - Property p = props.get(name); - return p != null ? p.getValue() : null; - } - - public String getPropertyType(String name) { - Property property = getProperties().get(name); - if (property != null) { - return property.getType(); - } - return null; - } - - public ArrayList getInterfaces() { - if (_interfaces == null) { - _interfaces = _createInterfaces(); - } - return _interfaces; - } - - public ArrayList getCapabilitiesObjects() { - // Return capabilities objects for this template - if (_capabilities == null) { - _capabilities = _createCapabilities(); - } - return _capabilities; - - } - - public CapabilityAssignments getCapabilities() { - LinkedHashMap caps = new LinkedHashMap(); - for (CapabilityAssignment cap : getCapabilitiesObjects()) { - caps.put(cap.getName(), cap); - } - return new CapabilityAssignments(caps); - } - - public boolean isDerivedFrom(String typeStr) { - // Returns true if this object is derived from 'type_str'. - // False otherwise - - if (getType() == null) { - return false; - } else if (getType().equals(typeStr)) { - return true; - } else if (getParentType() != null) { - return ((EntityType) getParentType()).isDerivedFrom(typeStr); - } - return false; - } - - @SuppressWarnings("unchecked") - private ArrayList _createCapabilities() { - ArrayList capability = new ArrayList(); - LinkedHashMap caps = (LinkedHashMap) - ((EntityType) typeDefinition).getValue(CAPABILITIES, entityTpl, true); - if (caps != null) { - //?!? getCapabilities defined only for NodeType... - LinkedHashMap capabilities = null; - if (typeDefinition instanceof NodeType) { - capabilities = ((NodeType) typeDefinition).getCapabilities(); - } else if (typeDefinition instanceof GroupType) { - capabilities = ((GroupType) typeDefinition).getCapabilities(); - } - for (Map.Entry me : caps.entrySet()) { - String name = me.getKey(); - LinkedHashMap props = (LinkedHashMap) me.getValue(); - if (capabilities.get(name) != null) { - CapabilityTypeDef c = capabilities.get(name); // a CapabilityTypeDef - LinkedHashMap properties = new LinkedHashMap(); - // first use the definition default value - LinkedHashMap cprops = c.getProperties(); - if (cprops != null) { - for (Map.Entry cpe : cprops.entrySet()) { - String propertyName = cpe.getKey(); - LinkedHashMap propertyDef = (LinkedHashMap) cpe.getValue(); - Object dob = propertyDef.get("default"); - if (dob != null) { - properties.put(propertyName, dob); - - } - } - } - // then update (if available) with the node properties - LinkedHashMap pp = (LinkedHashMap) props.get("properties"); - if (pp != null) { - properties.putAll(pp); - } - CapabilityAssignment cap = new CapabilityAssignment(name, properties, c, customDef); - capability.add(cap); - } - } - } - return capability; - } - - protected void _validateProperties(LinkedHashMap template, StatefulEntityType entityType) { - @SuppressWarnings("unchecked") - LinkedHashMap properties = (LinkedHashMap) entityType.getValue(PROPERTIES, template, false); - _commonValidateProperties(entityType, properties); - } - - protected void _validateCapabilities() { - //BUG??? getCapabilities only defined in NodeType... - LinkedHashMap typeCapabilities = ((NodeType) typeDefinition).getCapabilities(); - ArrayList allowedCaps = new ArrayList(); - if (typeCapabilities != null) { - allowedCaps.addAll(typeCapabilities.keySet()); - } - @SuppressWarnings("unchecked") - LinkedHashMap capabilities = (LinkedHashMap) - ((EntityType) typeDefinition).getValue(CAPABILITIES, entityTpl, false); - if (capabilities != null) { - _commonValidateField(capabilities, allowedCaps, "capabilities"); - _validateCapabilitiesProperties(capabilities); - } - } - - @SuppressWarnings("unchecked") - private void _validateCapabilitiesProperties(LinkedHashMap capabilities) { - for (Map.Entry me : capabilities.entrySet()) { - String cap = me.getKey(); - LinkedHashMap props = (LinkedHashMap) me.getValue(); - CapabilityAssignment capability = getCapability(cap); - if (capability == null) { - continue; - } - CapabilityTypeDef capabilitydef = capability.getDefinition(); - _commonValidateProperties(capabilitydef, (LinkedHashMap) props.get(PROPERTIES)); - - // validating capability properties values - for (Property prop : getCapability(cap).getPropertiesObjects()) { - prop.validate(); - - if (cap.equals("scalable") && prop.getName().equals("default_instances")) { - LinkedHashMap propDict = (LinkedHashMap) props.get(PROPERTIES); - int minInstances = (int) propDict.get("min_instances"); - int maxInstances = (int) propDict.get("max_instances"); - int defaultInstances = (int) propDict.get("default_instances"); - if (defaultInstances < minInstances || defaultInstances > maxInstances) { - //err_msg = ('"properties" of template "%s": ' - // '"default_instances" value is not between ' - // '"min_instances" and "max_instances".' % - // self.name) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE141", String.format( - "ValidationError: \"properties\" of template \"%s\": \"default_instances\" value is not between \"min_instances\" and \"max_instances\"", - name))); - } - } - } - } - } - - private void _commonValidateProperties(StatefulEntityType entityType, LinkedHashMap properties) { - ArrayList allowedProps = new ArrayList(); - ArrayList requiredProps = new ArrayList(); - for (PropertyDef p : entityType.getPropertiesDefObjects()) { - allowedProps.add(p.getName()); - // If property is 'required' and has no 'default' value then record - if (p.isRequired() && p.getDefault() == null) { - requiredProps.add(p.getName()); - } - } - // validate all required properties have values - if (properties != null) { - ArrayList reqPropsNoValueOrDefault = new ArrayList(); - _commonValidateField(properties, allowedProps, "properties"); - // make sure it's not missing any property required by a tosca type - for (String r : requiredProps) { - if (properties.get(r) == null) { - reqPropsNoValueOrDefault.add(r); - } - } - // Required properties found without value or a default value - if (!reqPropsNoValueOrDefault.isEmpty()) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", String.format( - "MissingRequiredFieldError: properties of template \"%s\" are missing field(s): %s", - name, reqPropsNoValueOrDefault.toString()))); - } - } else { - // Required properties in schema, but not in template - if (!requiredProps.isEmpty()) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE004", String.format( - "MissingRequiredFieldError2: properties of template \"%s\" are missing field(s): %s", - name, requiredProps.toString()))); - } - } - } - - @SuppressWarnings("unchecked") - private void _validateField(LinkedHashMap template) { - if (!(template instanceof LinkedHashMap)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE142", String.format( - "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"", name, TYPE))); - return;//??? - } - boolean bBad = false; - Object relationship = ((LinkedHashMap) template).get("relationship"); - if (relationship != null) { - if (!(relationship instanceof String)) { - bBad = (((LinkedHashMap) relationship).get(TYPE) == null); - } else if (relationship instanceof String) { - bBad = (template.get("relationship") == null); - } - } else { - bBad = (template.get(TYPE) == null); - } - if (bBad) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE143", String.format( - "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"", name, TYPE))); - } - } - - protected void _commonValidateField(LinkedHashMap schema, ArrayList allowedList, String section) { - for (String sname : schema.keySet()) { - boolean bFound = false; - for (String allowed : allowedList) { - if (sname.equals(allowed)) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE144", String.format( - "UnknownFieldError: Section \"%s\" of template \"%s\" contains unknown field \"%s\"", section, name, sname))); - } - } - - } - - @SuppressWarnings("unchecked") - private ArrayList _createProperties() { - ArrayList props = new ArrayList(); - LinkedHashMap properties = (LinkedHashMap) - ((EntityType) typeDefinition).getValue(PROPERTIES, entityTpl, false); - if (properties == null) { - properties = new LinkedHashMap(); - } - for (Map.Entry me : properties.entrySet()) { - String pname = me.getKey(); - Object pvalue = me.getValue(); - LinkedHashMap propsDef = ((StatefulEntityType) typeDefinition).getPropertiesDef(); - if (propsDef != null && propsDef.get(pname) != null) { - PropertyDef pd = (PropertyDef) propsDef.get(pname); - Property prop = new Property(pname, pvalue, pd.getSchema(), customDef); - props.add(prop); - } - } - ArrayList pds = ((StatefulEntityType) typeDefinition).getPropertiesDefObjects(); - for (Object pdo : pds) { - PropertyDef pd = (PropertyDef) pdo; - if (pd.getDefault() != null && properties.get(pd.getName()) == null) { - Property prop = new Property(pd.getName(), pd.getDefault(), pd.getSchema(), customDef); - props.add(prop); - } - } - return props; - } - - @SuppressWarnings("unchecked") - private ArrayList _createInterfaces() { - ArrayList interfaces = new ArrayList<>(); - LinkedHashMap typeInterfaces = new LinkedHashMap(); - if (typeDefinition instanceof RelationshipType) { - if (entityTpl instanceof LinkedHashMap) { - typeInterfaces = (LinkedHashMap) entityTpl.get(INTERFACES); - if (typeInterfaces == null) { - for (String relName : entityTpl.keySet()) { - Object relValue = entityTpl.get(relName); - if (!relName.equals("type")) { - Object relDef = relValue; - LinkedHashMap rel = null; - if (relDef instanceof LinkedHashMap) { - Object relob = ((LinkedHashMap) relDef).get("relationship"); - if (relob instanceof LinkedHashMap) { - rel = (LinkedHashMap) relob; - } - } - if (rel != null) { - if (rel.get(INTERFACES) != null) { - typeInterfaces = (LinkedHashMap) rel.get(INTERFACES); - break; - } - } - } - } - } - } - } else { - typeInterfaces = (LinkedHashMap) - ((EntityType) typeDefinition).getValue(INTERFACES, entityTpl, false); - } - if (typeInterfaces != null) { - for (Map.Entry me : typeInterfaces.entrySet()) { - String interfaceType = me.getKey(); - LinkedHashMap value = (LinkedHashMap) me.getValue(); - for (Map.Entry ve : value.entrySet()) { - String op = ve.getKey(); - Object opDef = ve.getValue(); - InterfacesDef iface = new InterfacesDef((EntityType) typeDefinition, - interfaceType, - this, - op, - opDef); - interfaces.add(iface); - } - - } - } - return interfaces; - } - - public CapabilityAssignment getCapability(String name) { - // Provide named capability - // :param name: name of capability - // :return: capability object if found, None otherwise - return getCapabilities().getCapabilityByName(name); - } - - // getter - public String getName() { - return name; - } - - public StatefulEntityType getTypeDefinition() { - return typeDefinition; - } - - public LinkedHashMap getCustomDef() { - return customDef; - } - - @Override - public String toString() { - return "EntityTemplate{" + - "name='" + name + '\'' + - ", entityTpl=" + entityTpl + - ", customDef=" + customDef + - ", typeDefinition=" + typeDefinition + - ", _properties=" + _properties + - ", _interfaces=" + _interfaces + - ", _requirements=" + _requirements + - ", _capabilities=" + _capabilities + - '}'; - } -} - -/*python - -class EntityTemplate(object): - '''Base class for TOSCA templates.''' - - SECTIONS = (DERIVED_FROM, PROPERTIES, REQUIREMENTS, - INTERFACES, CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES, - ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY) = \ - ('derived_from', 'properties', 'requirements', 'interfaces', - 'capabilities', 'type', 'description', 'directives', - 'attributes', 'artifacts', 'node_filter', 'copy') - REQUIREMENTS_SECTION = (NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER) = \ - ('node', 'capability', 'relationship', - 'occurrences', 'node_filter') - # Special key names - SPECIAL_SECTIONS = (METADATA) = ('metadata') - - def __init__(self, name, template, entity_name, custom_def=None): - self.name = name - self.entity_tpl = template - self.custom_def = custom_def - self._validate_field(self.entity_tpl) - type = self.entity_tpl.get('type') - UnsupportedType.validate_type(type) - if entity_name == 'node_type': - self.type_definition = NodeType(type, custom_def) \ - if type is not None else None - if entity_name == 'relationship_type': - relationship = template.get('relationship') - type = None - if relationship and isinstance(relationship, dict): - type = relationship.get('type') - elif isinstance(relationship, str): - type = self.entity_tpl['relationship'] - else: - type = self.entity_tpl['type'] - UnsupportedType.validate_type(type) - self.type_definition = RelationshipType(type, - None, custom_def) - if entity_name == 'policy_type': - if not type: - msg = (_('Policy definition of "%(pname)s" must have' - ' a "type" ''attribute.') % dict(pname=name)) - ValidationIssueCollector.appendException( - ValidationError(msg)) - - self.type_definition = PolicyType(type, custom_def) - if entity_name == 'group_type': - self.type_definition = GroupType(type, custom_def) \ - if type is not None else None - self._properties = None - self._interfaces = None - self._requirements = None - self._capabilities = None - - @property - def type(self): - if self.type_definition: - return self.type_definition.type - - @property - def parent_type(self): - if self.type_definition: - return self.type_definition.parent_type - - @property - def requirements(self): - if self._requirements is None: - self._requirements = self.type_definition.get_value( - self.REQUIREMENTS, - self.entity_tpl) or [] - return self._requirements - - def get_properties_objects(self): - '''Return properties objects for this template.''' - if self._properties is None: - self._properties = self._create_properties() - return self._properties - - def get_properties(self): - '''Return a dictionary of property name-object pairs.''' - return {prop.name: prop - for prop in self.get_properties_objects()} - - def get_property_value(self, name): - '''Return the value of a given property name.''' - props = self.get_properties() - if props and name in props.keys(): - return props[name].value - - @property - def interfaces(self): - if self._interfaces is None: - self._interfaces = self._create_interfaces() - return self._interfaces - - def get_capabilities_objects(self): - '''Return capabilities objects for this template.''' - if not self._capabilities: - self._capabilities = self._create_capabilities() - return self._capabilities - - def get_capabilities(self): - '''Return a dictionary of capability name-object pairs.''' - return {cap.name: cap - for cap in self.get_capabilities_objects()} - - def is_derived_from(self, type_str): - '''Check if object inherits from the given type. - - Returns true if this object is derived from 'type_str'. - False otherwise. - ''' - if not self.type: - return False - elif self.type == type_str: - return True - elif self.parent_type: - return self.parent_type.is_derived_from(type_str) - else: - return False - - def _create_capabilities(self): - capability = [] - caps = self.type_definition.get_value(self.CAPABILITIES, - self.entity_tpl, True) - if caps: - for name, props in caps.items(): - capabilities = self.type_definition.get_capabilities() - if name in capabilities.keys(): - c = capabilities[name] - properties = {} - # first use the definition default value - if c.properties: - for property_name in c.properties.keys(): - prop_def = c.properties[property_name] - if 'default' in prop_def: - properties[property_name] = prop_def['default'] - # then update (if available) with the node properties - if 'properties' in props and props['properties']: - properties.update(props['properties']) - - cap = CapabilityAssignment(name, properties, c) - capability.append(cap) - return capability - - def _validate_properties(self, template, entitytype): - properties = entitytype.get_value(self.PROPERTIES, template) - self._common_validate_properties(entitytype, properties) - - def _validate_capabilities(self): - type_capabilities = self.type_definition.get_capabilities() - allowed_caps = \ - type_capabilities.keys() if type_capabilities else [] - capabilities = self.type_definition.get_value(self.CAPABILITIES, - self.entity_tpl) - if capabilities: - self._common_validate_field(capabilities, allowed_caps, - 'capabilities') - self._validate_capabilities_properties(capabilities) - - def _validate_capabilities_properties(self, capabilities): - for cap, props in capabilities.items(): - capability = self.get_capability(cap) - if not capability: - continue - capabilitydef = capability.definition - self._common_validate_properties(capabilitydef, - props[self.PROPERTIES]) - - # validating capability properties values - for prop in self.get_capability(cap).get_properties_objects(): - prop.validate() - - # tODO(srinivas_tadepalli): temporary work around to validate - # default_instances until standardized in specification - if cap == "scalable" and prop.name == "default_instances": - prop_dict = props[self.PROPERTIES] - min_instances = prop_dict.get("min_instances") - max_instances = prop_dict.get("max_instances") - default_instances = prop_dict.get("default_instances") - if not (min_instances <= default_instances - <= max_instances): - err_msg = ('"properties" of template "%s": ' - '"default_instances" value is not between ' - '"min_instances" and "max_instances".' % - self.name) - ValidationIssueCollector.appendException( - ValidationError(message=err_msg)) - - def _common_validate_properties(self, entitytype, properties): - allowed_props = [] - required_props = [] - for p in entitytype.get_properties_def_objects(): - allowed_props.append(p.name) - # If property is 'required' and has no 'default' value then record - if p.required and p.default is None: - required_props.append(p.name) - # validate all required properties have values - if properties: - req_props_no_value_or_default = [] - self._common_validate_field(properties, allowed_props, - 'properties') - # make sure it's not missing any property required by a tosca type - for r in required_props: - if r not in properties.keys(): - req_props_no_value_or_default.append(r) - # Required properties found without value or a default value - if req_props_no_value_or_default: - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what='"properties" of template "%s"' % self.name, - required=req_props_no_value_or_default)) - else: - # Required properties in schema, but not in template - if required_props: - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what='"properties" of template "%s"' % self.name, - required=required_props)) - - def _validate_field(self, template): - if not isinstance(template, dict): - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what='Template "%s"' % self.name, required=self.TYPE)) - try: - relationship = template.get('relationship') - if relationship and not isinstance(relationship, str): - relationship[self.TYPE] - elif isinstance(relationship, str): - template['relationship'] - else: - template[self.TYPE] - except KeyError: - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what='Template "%s"' % self.name, required=self.TYPE)) - - def _common_validate_field(self, schema, allowedlist, section): - for name in schema: - if name not in allowedlist: - ValidationIssueCollector.appendException( - UnknownFieldError( - what=('"%(section)s" of template "%(nodename)s"' - % {'section': section, 'nodename': self.name}), - field=name)) - - def _create_properties(self): - props = [] - properties = self.type_definition.get_value(self.PROPERTIES, - self.entity_tpl) or {} - for name, value in properties.items(): - props_def = self.type_definition.get_properties_def() - if props_def and name in props_def: - prop = Property(name, value, - props_def[name].schema, self.custom_def) - props.append(prop) - for p in self.type_definition.get_properties_def_objects(): - if p.default is not None and p.name not in properties.keys(): - prop = Property(p.name, p.default, p.schema, self.custom_def) - props.append(prop) - return props - - def _create_interfaces(self): - interfaces = [] - type_interfaces = None - if isinstance(self.type_definition, RelationshipType): - if isinstance(self.entity_tpl, dict): - if self.INTERFACES in self.entity_tpl: - type_interfaces = self.entity_tpl[self.INTERFACES] - else: - for rel_def, value in self.entity_tpl.items(): - if rel_def != 'type': - rel_def = self.entity_tpl.get(rel_def) - rel = None - if isinstance(rel_def, dict): - rel = rel_def.get('relationship') - if rel: - if self.INTERFACES in rel: - type_interfaces = rel[self.INTERFACES] - break - else: - type_interfaces = self.type_definition.get_value(self.INTERFACES, - self.entity_tpl) - if type_interfaces: - for interface_type, value in type_interfaces.items(): - for op, op_def in value.items(): - iface = InterfacesDef(self.type_definition, - interfacetype=interface_type, - node_template=self, - name=op, - value=op_def) - interfaces.append(iface) - return interfaces - - def get_capability(self, name): - """Provide named capability - - :param name: name of capability - :return: capability object if found, None otherwise - """ - caps = self.get_capabilities() - if caps and name in caps.keys(): - return caps[name] -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Group.java b/src/main/java/org/onap/sdc/toscaparser/api/Group.java deleted file mode 100644 index 0591d9a..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/Group.java +++ /dev/null @@ -1,171 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.elements.Metadata; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.onap.sdc.toscaparser.api.utils.ValidateUtils; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -public class Group extends EntityTemplate { - - private static final String TYPE = "type"; - private static final String METADATA = "metadata"; - private static final String DESCRIPTION = "description"; - private static final String PROPERTIES = "properties"; - private static final String MEMBERS = "members"; - private static final String INTERFACES = "interfaces"; - private static final String[] SECTIONS = { - TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; - - private String name; - private LinkedHashMap tpl; - private ArrayList memberNodes; - private LinkedHashMap customDef; - private Metadata metaData; - - - public Group(String name, LinkedHashMap templates, - ArrayList memberNodes, - LinkedHashMap customDef) { - this(name, templates, memberNodes, customDef, null); - } - - public Group(String name, LinkedHashMap templates, - ArrayList memberNodes, - LinkedHashMap customDef, NodeTemplate parentNodeTemplate) { - super(name, templates, "group_type", customDef, parentNodeTemplate); - - this.name = name; - tpl = templates; - if (tpl.get(METADATA) != null) { - Object metadataObject = tpl.get(METADATA); - ValidateUtils.validateMap(metadataObject); - metaData = new Metadata((Map) metadataObject); - } - this.memberNodes = memberNodes; - validateKeys(); - getCapabilities(); - } - - public Metadata getMetadata() { - return metaData; - } - - public ArrayList getMembers() { - return (ArrayList) entityTpl.get("members"); - } - - public String getDescription() { - return (String) entityTpl.get("description"); - - } - - public ArrayList getMemberNodes() { - return memberNodes; - } - - private void validateKeys() { - for (String key : entityTpl.keySet()) { - boolean bFound = false; - for (String sect : SECTIONS) { - if (key.equals(sect)) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE183", String.format( - "UnknownFieldError: Groups \"%s\" contains unknown field \"%s\"", - name, key))); - } - } - } - - @Override - public String toString() { - return "Group{" - + "name='" + name + '\'' - + ", tpl=" + tpl - + ", memberNodes=" + memberNodes - + ", customDef=" + customDef - + ", metaData=" + metaData - + '}'; - } - - public int compareTo(Group other) { - if (this.equals(other)) { - return 0; - } - return this.getName().compareTo(other.getName()) == 0 ? this.getType().compareTo(other.getType()) : this.getName().compareTo(other.getName()); - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import UnknownFieldError -from toscaparser.entity_template import EntityTemplate -from toscaparser.utils import validateutils - -SECTIONS = (TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES) = \ - ('type', 'metadata', 'description', - 'properties', 'members', 'interfaces') - - -class Group(EntityTemplate): - - def __init__(self, name, group_templates, member_nodes, custom_defs=None): - super(Group, self).__init__(name, - group_templates, - 'group_type', - custom_defs) - self.name = name - self.tpl = group_templates - self.meta_data = None - if self.METADATA in self.tpl: - self.meta_data = self.tpl.get(self.METADATA) - validateutils.validate_map(self.meta_data) - self.member_nodes = member_nodes - self._validate_keys() - - @property - def members(self): - return self.entity_tpl.get('members') - - @property - def description(self): - return self.entity_tpl.get('description') - - def get_member_nodes(self): - return self.member_nodes - - def _validate_keys(self): - for key in self.entity_tpl.keys(): - if key not in SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Groups "%s"' % self.name, - field=key)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java b/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java deleted file mode 100644 index 019adb3..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java +++ /dev/null @@ -1,748 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import com.google.common.base.Charsets; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.onap.sdc.toscaparser.api.utils.UrlUtils; - -import org.onap.sdc.toscaparser.api.elements.TypeValidation; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.yaml.snakeyaml.Yaml; - -import java.io.*; -import java.net.URL; -import java.nio.file.Paths; -import java.util.*; - -public class ImportsLoader { - - private static Logger log = LoggerFactory.getLogger(ImportsLoader.class.getName()); - private static final String FILE = "file"; - private static final String REPOSITORY = "repository"; - private static final String NAMESPACE_URI = "namespace_uri"; - private static final String NAMESPACE_PREFIX = "namespace_prefix"; - private String IMPORTS_SECTION[] = {FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX}; - - private ArrayList importslist; - private String path; - private ArrayList typeDefinitionList; - - private LinkedHashMap customDefs; - private LinkedHashMap allCustomDefs; - private ArrayList> nestedToscaTpls; - private LinkedHashMap repositories; - - @SuppressWarnings("unchecked") - public ImportsLoader(ArrayList _importslist, - String _path, - Object _typeDefinitionList, - LinkedHashMap tpl) { - - this.importslist = _importslist; - customDefs = new LinkedHashMap(); - allCustomDefs = new LinkedHashMap(); - nestedToscaTpls = new ArrayList>(); - if ((_path == null || _path.isEmpty()) && tpl == null) { - //msg = _('Input tosca template is not provided.') - //log.warning(msg) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE184", "ValidationError: Input tosca template is not provided")); - } - - this.path = _path; - this.repositories = new LinkedHashMap(); - - if (tpl != null && tpl.get("repositories") != null) { - this.repositories = (LinkedHashMap) tpl.get("repositories"); - } - this.typeDefinitionList = new ArrayList(); - if (_typeDefinitionList != null) { - if (_typeDefinitionList instanceof ArrayList) { - this.typeDefinitionList = (ArrayList) _typeDefinitionList; - } else { - this.typeDefinitionList.add((String) _typeDefinitionList); - } - } - _validateAndLoadImports(); - } - - public LinkedHashMap getCustomDefs() { - return allCustomDefs; - } - - public ArrayList> getNestedToscaTpls() { - return nestedToscaTpls; - } - - @SuppressWarnings({"unchecked", "unused"}) - public void _validateAndLoadImports() { - Set importNames = new HashSet(); - - if (importslist == null) { - //msg = _('"imports" keyname is defined without including templates.') - //log.error(msg) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE185", - "ValidationError: \"imports\" keyname is defined without including templates")); - return; - } - - for (Object importDef : importslist) { - String fullFileName = null; - LinkedHashMap customType = null; - if (importDef instanceof LinkedHashMap) { - for (Map.Entry me : ((LinkedHashMap) importDef).entrySet()) { - String importName = me.getKey(); - Object importUri = me.getValue(); - if (importNames.contains(importName)) { - //msg = (_('Duplicate import name "%s" was found.') % import_name) - //log.error(msg) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE186", String.format( - "ValidationError: Duplicate import name \"%s\" was found", importName))); - } - importNames.add(importName); //??? - - // _loadImportTemplate returns 2 objects - Object ffnct[] = _loadImportTemplate(importName, importUri); - fullFileName = (String) ffnct[0]; - customType = (LinkedHashMap) ffnct[1]; - String namespacePrefix = ""; - if (importUri instanceof LinkedHashMap) { - namespacePrefix = (String) - ((LinkedHashMap) importUri).get(NAMESPACE_PREFIX); - } - - if (customType != null) { - TypeValidation tv = new TypeValidation(customType, importDef); - _updateCustomDefs(customType, namespacePrefix); - } - } - } else { // old style of imports - // _loadImportTemplate returns 2 objects - Object ffnct[] = _loadImportTemplate(null, importDef); - fullFileName = (String) ffnct[0]; - customType = (LinkedHashMap) ffnct[1]; - if (customType != null) { - TypeValidation tv = new TypeValidation(customType, importDef); - _updateCustomDefs(customType, null); - } - } - _updateNestedToscaTpls(fullFileName, customType); - - - } - } - - /** - * This method is used to get consolidated custom definitions by passing custom Types from - * each import. The resultant collection is then passed back which contains all import - * definitions - * - * @param customType the custom type - * @param namespacePrefix the namespace prefix - */ - @SuppressWarnings("unchecked") - private void _updateCustomDefs(LinkedHashMap customType, String namespacePrefix) { - LinkedHashMap outerCustomTypes; - for (String typeDef : typeDefinitionList) { - if (typeDef.equals("imports")) { - customDefs.put("imports", customType.get(typeDef)); - if (allCustomDefs.isEmpty() || allCustomDefs.get("imports") == null) { - allCustomDefs.put("imports", customType.get(typeDef)); - } else if (customType.get(typeDef) != null) { - Set allCustomImports = new HashSet<>((ArrayList) allCustomDefs.get("imports")); - allCustomImports.addAll((ArrayList) customType.get(typeDef)); - allCustomDefs.put("imports", new ArrayList<>(allCustomImports)); - } - } else { - outerCustomTypes = (LinkedHashMap) customType.get(typeDef); - if (outerCustomTypes != null) { - if (namespacePrefix != null && !namespacePrefix.isEmpty()) { - LinkedHashMap prefixCustomTypes = new LinkedHashMap(); - for (Map.Entry me : outerCustomTypes.entrySet()) { - String typeDefKey = me.getKey(); - String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey; - prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey)); - } - customDefs.putAll(prefixCustomTypes); - allCustomDefs.putAll(prefixCustomTypes); - } else { - customDefs.putAll(outerCustomTypes); - allCustomDefs.putAll(outerCustomTypes); - } - } - } - } - } - - private void _updateNestedToscaTpls(String fullFileName, LinkedHashMap customTpl) { - if (fullFileName != null && customTpl != null) { - LinkedHashMap tt = new LinkedHashMap(); - tt.put(fullFileName, customTpl); - nestedToscaTpls.add(tt); - } - } - - private void _validateImportKeys(String importName, LinkedHashMap importUri) { - if (importUri.get(FILE) == null) { - //log.warning(_('Missing keyname "file" in import "%(name)s".') % {'name': import_name}) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE187", String.format( - "MissingRequiredFieldError: Import of template \"%s\" is missing field %s", importName, FILE))); - } - for (String key : importUri.keySet()) { - boolean bFound = false; - for (String is : IMPORTS_SECTION) { - if (is.equals(key)) { - bFound = true; - break; - } - } - if (!bFound) { - //log.warning(_('Unknown keyname "%(key)s" error in ' - // 'imported definition "%(def)s".') - // % {'key': key, 'def': import_name}) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE188", String.format( - "UnknownFieldError: Import of template \"%s\" has unknown fiels %s", importName, key))); - } - } - } - - @SuppressWarnings("unchecked") - private Object[] _loadImportTemplate(String importName, Object importUriDef) { - /* - This method loads the custom type definitions referenced in "imports" - section of the TOSCA YAML template by determining whether each import - is specified via a file reference (by relative or absolute path) or a - URL reference. - - Possibilities: - +----------+--------+------------------------------+ - | template | import | comment | - +----------+--------+------------------------------+ - | file | file | OK | - | file | URL | OK | - | preparsed| file | file must be a full path | - | preparsed| URL | OK | - | URL | file | file must be a relative path | - | URL | URL | OK | - +----------+--------+------------------------------+ - */ - Object al[] = new Object[2]; - - boolean shortImportNotation = false; - String fileName; - String repository; - if (importUriDef instanceof LinkedHashMap) { - _validateImportKeys(importName, (LinkedHashMap) importUriDef); - fileName = (String) ((LinkedHashMap) importUriDef).get(FILE); - repository = (String) ((LinkedHashMap) importUriDef).get(REPOSITORY); - if (repository != null) { - if (!repositories.keySet().contains(repository)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE189", String.format( - "InvalidPropertyValueError: Repository \"%s\" not found in \"%s\"", - repository, repositories.keySet().toString()))); - } - } - } else { - fileName = (String) importUriDef; - repository = null; - shortImportNotation = true; - } - - if (fileName == null || fileName.isEmpty()) { - //msg = (_('A template file name is not provided with import ' - // 'definition "%(import_name)s".') - // % {'import_name': import_name}) - //log.error(msg) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE190", String.format( - "ValidationError: A template file name is not provided with import definition \"%s\"", importName))); - al[0] = al[1] = null; - return al; - } - - if (UrlUtils.validateUrl(fileName)) { - try (InputStream input = new URL(fileName).openStream();) { - al[0] = fileName; - Yaml yaml = new Yaml(); - al[1] = yaml.load(input); - return al; - } catch (IOException e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE191", String.format( - "ImportError: \"%s\" loading YAML import from \"%s\"", e.getClass().getSimpleName(), fileName))); - al[0] = al[1] = null; - return al; - } - } else if (repository == null || repository.isEmpty()) { - boolean aFile = false; - String importTemplate = null; - if (path != null && !path.isEmpty()) { - if (UrlUtils.validateUrl(path)) { - File fp = new File(path); - if (fp.isAbsolute()) { - String msg = String.format( - "ImportError: Absolute file name \"%s\" cannot be used in the URL-based input template \"%s\"", - fileName, path); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE192", msg)); - al[0] = al[1] = null; - return al; - } - importTemplate = UrlUtils.joinUrl(path, fileName); - aFile = false; - } else { - - aFile = true; - File fp = new File(path); - if (fp.isFile()) { - File fn = new File(fileName); - if (fn.isFile()) { - importTemplate = fileName; - } else { - String fullPath = Paths.get(path).toAbsolutePath().getParent().toString() + File.separator + fileName; - File ffp = new File(fullPath); - if (ffp.isFile()) { - importTemplate = fullPath; - } else { - String dirPath = Paths.get(path).toAbsolutePath().getParent().toString(); - String filePath; - if (Paths.get(fileName).getParent() != null) { - filePath = Paths.get(fileName).getParent().toString(); - } else { - filePath = ""; - } - if (!filePath.isEmpty() && dirPath.endsWith(filePath)) { - String sFileName = Paths.get(fileName).getFileName().toString(); - importTemplate = dirPath + File.separator + sFileName; - File fit = new File(importTemplate); - if (!fit.isFile()) { - //msg = (_('"%(import_template)s" is' - // 'not a valid file') - // % {'import_template': - // import_template}) - //log.error(msg) - String msg = String.format( - "ValueError: \"%s\" is not a valid file", importTemplate); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE193", msg)); - log.debug("ImportsLoader - _loadImportTemplate - {}", msg); - } - } - } - } - } - } - } else { // template is pre-parsed - File fn = new File(fileName); - if (fn.isAbsolute() && fn.isFile()) { - aFile = true; - importTemplate = fileName; - } else { - String msg = String.format( - "Relative file name \"%s\" cannot be used in a pre-parsed input template", fileName); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE194", "ImportError: " + msg)); - al[0] = al[1] = null; - return al; - } - } - - if (importTemplate == null || importTemplate.isEmpty()) { - //log.error(_('Import "%(name)s" is not valid.') % - // {'name': import_uri_def}) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE195", String.format( - "ImportError: Import \"%s\" is not valid", importUriDef))); - al[0] = al[1] = null; - return al; - } - - // for now, this must be a file - if (!aFile) { - log.error("ImportsLoader - _loadImportTemplate - Error!! Expected a file. importUriDef = {}, importTemplate = {}", importUriDef, importTemplate); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE196", String.format( - "ImportError: Import \"%s\" is not a file", importName))); - al[0] = al[1] = null; - return al; - } - try (BufferedReader br = new BufferedReader(new FileReader(importTemplate));) { - al[0] = importTemplate; - - Yaml yaml = new Yaml(); - al[1] = yaml.load(br); - return al; - } catch (FileNotFoundException e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE197", String.format( - "ImportError: Failed to load YAML from \"%s\"" + e, importName))); - al[0] = al[1] = null; - return al; - } catch (Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE198", String.format( - "ImportError: Exception from SnakeYAML file = \"%s\"" + e, importName))); - al[0] = al[1] = null; - return al; - } - } - - if (shortImportNotation) { - //log.error(_('Import "%(name)s" is not valid.') % import_uri_def) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE199", String.format( - "ImportError: Import \"%s\" is not valid", importName))); - al[0] = al[1] = null; - return al; - } - - String fullUrl = ""; - String repoUrl = ""; - if (repository != null && !repository.isEmpty()) { - if (repositories != null) { - for (String repoName : repositories.keySet()) { - if (repoName.equals(repository)) { - Object repoDef = repositories.get(repoName); - if (repoDef instanceof String) { - repoUrl = (String) repoDef; - } else if (repoDef instanceof LinkedHashMap) { - repoUrl = (String) ((LinkedHashMap) repoDef).get("url"); - } - // Remove leading, ending spaces and strip - // the last character if "/" - repoUrl = repoUrl.trim(); - if (repoUrl.endsWith("/")) { - repoUrl = repoUrl.substring(0, repoUrl.length() - 1); - } - fullUrl = repoUrl + "/" + fileName; - break; - } - } - } - if (fullUrl.isEmpty()) { - String msg = String.format( - "referenced repository \"%s\" in import definition \"%s\" not found", - repository, importName); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE200", "ImportError: " + msg)); - al[0] = al[1] = null; - return al; - } - } - if (UrlUtils.validateUrl(fullUrl)) { - try (InputStream input = new URL(fullUrl).openStream();) { - al[0] = fullUrl; - Yaml yaml = new Yaml(); - al[1] = yaml.load(input); - return al; - } catch (IOException e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE201", String.format( - "ImportError: Exception loading YAML import from \"%s\"", fullUrl))); - al[0] = al[1] = null; - return al; - } - } else { - String msg = String.format( - "repository URL \"%s\" in import definition \"%s\" is not valid", - repoUrl, importName); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE202", "ImportError: " + msg)); - } - - // if we got here something is wrong with the flow... - log.error("ImportsLoader - _loadImportTemplate - got to dead end (importName {})", importName); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE203", String.format( - "ImportError: _loadImportTemplate got to dead end (importName %s)\n", importName))); - al[0] = al[1] = null; - return al; - } - - @Override - public String toString() { - return "ImportsLoader{" + - "IMPORTS_SECTION=" + Arrays.toString(IMPORTS_SECTION) + - ", importslist=" + importslist + - ", path='" + path + '\'' + - ", typeDefinitionList=" + typeDefinitionList + - ", customDefs=" + customDefs + - ", nestedToscaTpls=" + nestedToscaTpls + - ", repositories=" + repositories + - '}'; - } -} - -/*python - -import logging -import os - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidPropertyValueError -from toscaparser.common.exception import MissingRequiredFieldError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.common.exception import ValidationError -from toscaparser.elements.tosca_type_validation import TypeValidation -from toscaparser.utils.gettextutils import _ -import org.openecomp.sdc.toscaparser.api.utils.urlutils -import org.openecomp.sdc.toscaparser.api.utils.yamlparser - -YAML_LOADER = toscaparser.utils.yamlparser.load_yaml -log = logging.getLogger("tosca") - - -class ImportsLoader(object): - - IMPORTS_SECTION = (FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX) = \ - ('file', 'repository', 'namespace_uri', - 'namespace_prefix') - - def __init__(self, importslist, path, type_definition_list=None, - tpl=None): - self.importslist = importslist - self.custom_defs = {} - if not path and not tpl: - msg = _('Input tosca template is not provided.') - log.warning(msg) - ValidationIssueCollector.appendException(ValidationError(message=msg)) - self.path = path - self.repositories = {} - if tpl and tpl.get('repositories'): - self.repositories = tpl.get('repositories') - self.type_definition_list = [] - if type_definition_list: - if isinstance(type_definition_list, list): - self.type_definition_list = type_definition_list - else: - self.type_definition_list.append(type_definition_list) - self._validate_and_load_imports() - - def get_custom_defs(self): - return self.custom_defs - - def _validate_and_load_imports(self): - imports_names = set() - - if not self.importslist: - msg = _('"imports" keyname is defined without including ' - 'templates.') - log.error(msg) - ValidationIssueCollector.appendException(ValidationError(message=msg)) - return - - for import_def in self.importslist: - if isinstance(import_def, dict): - for import_name, import_uri in import_def.items(): - if import_name in imports_names: - msg = (_('Duplicate import name "%s" was found.') % - import_name) - log.error(msg) - ValidationIssueCollector.appendException( - ValidationError(message=msg)) - imports_names.add(import_name) - - custom_type = self._load_import_template(import_name, - import_uri) - namespace_prefix = None - if isinstance(import_uri, dict): - namespace_prefix = import_uri.get( - self.NAMESPACE_PREFIX) - if custom_type: - TypeValidation(custom_type, import_def) - self._update_custom_def(custom_type, namespace_prefix) - else: # old style of imports - custom_type = self._load_import_template(None, - import_def) - if custom_type: - TypeValidation( - custom_type, import_def) - self._update_custom_def(custom_type, None) - - def _update_custom_def(self, custom_type, namespace_prefix): - outer_custom_types = {} - for type_def in self.type_definition_list: - outer_custom_types = custom_type.get(type_def) - if outer_custom_types: - if type_def == "imports": - self.custom_defs.update({'imports': outer_custom_types}) - else: - if namespace_prefix: - prefix_custom_types = {} - for type_def_key in outer_custom_types.keys(): - namespace_prefix_to_key = (namespace_prefix + - "." + type_def_key) - prefix_custom_types[namespace_prefix_to_key] = \ - outer_custom_types[type_def_key] - self.custom_defs.update(prefix_custom_types) - else: - self.custom_defs.update(outer_custom_types) - - def _validate_import_keys(self, import_name, import_uri_def): - if self.FILE not in import_uri_def.keys(): - log.warning(_('Missing keyname "file" in import "%(name)s".') - % {'name': import_name}) - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what='Import of template "%s"' % import_name, - required=self.FILE)) - for key in import_uri_def.keys(): - if key not in self.IMPORTS_SECTION: - log.warning(_('Unknown keyname "%(key)s" error in ' - 'imported definition "%(def)s".') - % {'key': key, 'def': import_name}) - ValidationIssueCollector.appendException( - UnknownFieldError( - what='Import of template "%s"' % import_name, - field=key)) - - def _load_import_template(self, import_name, import_uri_def): - """Handle custom types defined in imported template files - - This method loads the custom type definitions referenced in "imports" - section of the TOSCA YAML template by determining whether each import - is specified via a file reference (by relative or absolute path) or a - URL reference. - - Possibilities: - +----------+--------+------------------------------+ - | template | import | comment | - +----------+--------+------------------------------+ - | file | file | OK | - | file | URL | OK | - | preparsed| file | file must be a full path | - | preparsed| URL | OK | - | URL | file | file must be a relative path | - | URL | URL | OK | - +----------+--------+------------------------------+ - """ - short_import_notation = False - if isinstance(import_uri_def, dict): - self._validate_import_keys(import_name, import_uri_def) - file_name = import_uri_def.get(self.FILE) - repository = import_uri_def.get(self.REPOSITORY) - repos = self.repositories.keys() - if repository is not None: - if repository not in repos: - ValidationIssueCollector.appendException( - InvalidPropertyValueError( - what=_('Repository is not found in "%s"') % repos)) - else: - file_name = import_uri_def - repository = None - short_import_notation = True - - if not file_name: - msg = (_('A template file name is not provided with import ' - 'definition "%(import_name)s".') - % {'import_name': import_name}) - log.error(msg) - ValidationIssueCollector.appendException(ValidationError(message=msg)) - return - - if toscaparser.utils.urlutils.UrlUtils.validate_url(file_name): - return YAML_LOADER(file_name, False) - elif not repository: - import_template = None - if self.path: - if toscaparser.utils.urlutils.UrlUtils.validate_url(self.path): - if os.path.isabs(file_name): - msg = (_('Absolute file name "%(name)s" cannot be ' - 'used in a URL-based input template ' - '"%(template)s".') - % {'name': file_name, 'template': self.path}) - log.error(msg) - ValidationIssueCollector.appendException(ImportError(msg)) - return - import_template = toscaparser.utils.urlutils.UrlUtils.\ - join_url(self.path, file_name) - a_file = False - else: - a_file = True - main_a_file = os.path.isfile(self.path) - - if main_a_file: - if os.path.isfile(file_name): - import_template = file_name - else: - full_path = os.path.join( - os.path.dirname(os.path.abspath(self.path)), - file_name) - if os.path.isfile(full_path): - import_template = full_path - else: - file_path = file_name.rpartition("/") - dir_path = os.path.dirname(os.path.abspath( - self.path)) - if file_path[0] != '' and dir_path.endswith( - file_path[0]): - import_template = dir_path + "/" +\ - file_path[2] - if not os.path.isfile(import_template): - msg = (_('"%(import_template)s" is' - 'not a valid file') - % {'import_template': - import_template}) - log.error(msg) - ValidationIssueCollector.appendException - (ValueError(msg)) - else: # template is pre-parsed - if os.path.isabs(file_name) and os.path.isfile(file_name): - a_file = True - import_template = file_name - else: - msg = (_('Relative file name "%(name)s" cannot be used ' - 'in a pre-parsed input template.') - % {'name': file_name}) - log.error(msg) - ValidationIssueCollector.appendException(ImportError(msg)) - return - - if not import_template: - log.error(_('Import "%(name)s" is not valid.') % - {'name': import_uri_def}) - ValidationIssueCollector.appendException( - ImportError(_('Import "%s" is not valid.') % - import_uri_def)) - return - return YAML_LOADER(import_template, a_file) - - if short_import_notation: - log.error(_('Import "%(name)s" is not valid.') % import_uri_def) - ValidationIssueCollector.appendException( - ImportError(_('Import "%s" is not valid.') % import_uri_def)) - return - - full_url = "" - if repository: - if self.repositories: - for repo_name, repo_def in self.repositories.items(): - if repo_name == repository: - # Remove leading, ending spaces and strip - # the last character if "/" - repo_url = ((repo_def['url']).strip()).rstrip("//") - full_url = repo_url + "/" + file_name - - if not full_url: - msg = (_('referenced repository "%(n_uri)s" in import ' - 'definition "%(tpl)s" not found.') - % {'n_uri': repository, 'tpl': import_name}) - log.error(msg) - ValidationIssueCollector.appendException(ImportError(msg)) - return - - if toscaparser.utils.urlutils.UrlUtils.validate_url(full_url): - return YAML_LOADER(full_url, False) - else: - msg = (_('repository url "%(n_uri)s" is not valid in import ' - 'definition "%(tpl)s".') - % {'n_uri': repo_url, 'tpl': import_name}) - log.error(msg) - ValidationIssueCollector.appendException(ImportError(msg)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java deleted file mode 100644 index 4fabe38..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java +++ /dev/null @@ -1,824 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.elements.EntityType; -import org.onap.sdc.toscaparser.api.elements.InterfacesDef; -import org.onap.sdc.toscaparser.api.elements.Metadata; -import org.onap.sdc.toscaparser.api.elements.NodeType; -import org.onap.sdc.toscaparser.api.elements.RelationshipType; -import org.onap.sdc.toscaparser.api.utils.CopyUtils; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - -import static org.onap.sdc.toscaparser.api.elements.EntityType.TOSCA_DEF; - -public class NodeTemplate extends EntityTemplate { - - private LinkedHashMap templates; - private LinkedHashMap customDef; - private ArrayList availableRelTpls; - private LinkedHashMap availableRelTypes; - private LinkedHashMap related; - private ArrayList relationshipTpl; - private LinkedHashMap _relationships; - private SubstitutionMappings subMappingToscaTemplate; - private TopologyTemplate originComponentTemplate; - private Metadata metadata; - - private static final String METADATA = "metadata"; - - public NodeTemplate(String name, - LinkedHashMap ntnodeTemplates, - LinkedHashMap ntcustomDef, - ArrayList ntavailableRelTpls, - LinkedHashMap ntavailableRelTypes) { - this(name, ntnodeTemplates, ntcustomDef, ntavailableRelTpls, - ntavailableRelTypes, null); - } - - @SuppressWarnings("unchecked") - public NodeTemplate(String name, - LinkedHashMap ntnodeTemplates, - LinkedHashMap ntcustomDef, - ArrayList ntavailableRelTpls, - LinkedHashMap ntavailableRelTypes, - NodeTemplate parentNodeTemplate) { - - super(name, (LinkedHashMap) ntnodeTemplates.get(name), - "node_type", ntcustomDef, parentNodeTemplate); - - templates = ntnodeTemplates; - _validateFields((LinkedHashMap) templates.get(name)); - customDef = ntcustomDef; - related = new LinkedHashMap(); - relationshipTpl = new ArrayList(); - availableRelTpls = ntavailableRelTpls; - availableRelTypes = ntavailableRelTypes; - _relationships = new LinkedHashMap(); - subMappingToscaTemplate = null; - metadata = _metaData(); - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getRelationships() { - if (_relationships.isEmpty()) { - List requires = getRequirements().getAll(); - if (requires != null && requires instanceof List) { - for (RequirementAssignment r : requires) { - LinkedHashMap explicit = _getExplicitRelationship(r); - if (explicit != null) { - // _relationships.putAll(explicit)... - for (Map.Entry ee : explicit.entrySet()) { - _relationships.put(ee.getKey(), ee.getValue()); - } - } - } - } - } - return _relationships; - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _getExplicitRelationship(RequirementAssignment req) { - // Handle explicit relationship - - // For example, - // - req: - // node: DBMS - // relationship: tosca.relationships.HostedOn - - LinkedHashMap explicitRelation = new LinkedHashMap(); - String node = req.getNodeTemplateName(); - - if (node != null && !node.isEmpty()) { - //msg = _('Lookup by TOSCA types is not supported. ' - // 'Requirement for "%s" can not be full-filled.') % self.name - boolean bFound = false; - for (String k : EntityType.TOSCA_DEF.keySet()) { - if (k.equals(node)) { - bFound = true; - break; - } - } - if (bFound || customDef.get(node) != null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE205", String.format( - "NotImplementedError: Lookup by TOSCA types is not supported. Requirement for \"%s\" can not be full-filled", - getName()))); - return null; - } - if (templates.get(node) == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE206", String.format( - "KeyError: Node template \"%s\" was not found", node))); - return null; - } - NodeTemplate relatedTpl = new NodeTemplate(node, templates, customDef, null, null); - Object relationship = req.getRelationship(); - String relationshipString = null; -// // here relationship can be a string or a LHM with 'type': - - // check if its type has relationship defined - if (relationship == null) { - ArrayList parentReqs = ((NodeType) typeDefinition).getAllRequirements(); - if (parentReqs == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE207", "ValidationError: parent_req is null")); - } else { -// for(String key: req.keySet()) { -// boolean bFoundRel = false; - for (Object rdo : parentReqs) { - LinkedHashMap reqDict = (LinkedHashMap) rdo; - LinkedHashMap relDict = (LinkedHashMap) reqDict.get(req.getName()); - if (relDict != null) { - relationship = relDict.get("relationship"); - //BUG-python??? need to break twice? -// bFoundRel = true; - break; - } - } -// if(bFoundRel) { -// break; -// } -// } - } - } - - if (relationship != null) { - // here relationship can be a string or a LHM with 'type': - if (relationship instanceof String) { - relationshipString = (String) relationship; - } else if (relationship instanceof LinkedHashMap) { - relationshipString = (String) ((LinkedHashMap) relationship).get("type"); - } - - boolean foundRelationshipTpl = false; - // apply available relationship templates if found - if (availableRelTpls != null) { - for (RelationshipTemplate tpl : availableRelTpls) { - if (tpl.getName().equals(relationshipString)) { - RelationshipType rtype = new RelationshipType(tpl.getType(), null, customDef); - explicitRelation.put(rtype, relatedTpl); - tpl.setTarget(relatedTpl); - tpl.setSource(this); - relationshipTpl.add(tpl); - foundRelationshipTpl = true; - } - } - } - // create relationship template object. - String relPrfx = EntityType.RELATIONSHIP_PREFIX; - if (!foundRelationshipTpl) { - if (relationship instanceof LinkedHashMap) { - relationshipString = (String) ((LinkedHashMap) relationship).get("type"); - if (relationshipString != null) { - if (availableRelTypes != null && !availableRelTypes.isEmpty() && - availableRelTypes.get(relationshipString) != null) { - ; - } else if (!(relationshipString).startsWith(relPrfx)) { - relationshipString = relPrfx + relationshipString; - } - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE208", String.format( - "MissingRequiredFieldError: \"relationship\" used in template \"%s\" is missing required field \"type\"", - relatedTpl.getName()))); - } - } - for (RelationshipType rtype : ((NodeType) typeDefinition).getRelationship().keySet()) { - if (rtype.getType().equals(relationshipString)) { - explicitRelation.put(rtype, relatedTpl); - relatedTpl._addRelationshipTemplate(req, rtype.getType(), this); - } else if (availableRelTypes != null && !availableRelTypes.isEmpty()) { - LinkedHashMap relTypeDef = (LinkedHashMap) availableRelTypes.get(relationshipString); - if (relTypeDef != null) { - String superType = (String) relTypeDef.get("derived_from"); - if (superType != null) { - if (!superType.startsWith(relPrfx)) { - superType = relPrfx + superType; - } - if (rtype.getType().equals(superType)) { - explicitRelation.put(rtype, relatedTpl); - relatedTpl._addRelationshipTemplate(req, rtype.getType(), this); - } - } - } - } - } - } - } - } - return explicitRelation; - } - - @SuppressWarnings("unchecked") - private void _addRelationshipTemplate(RequirementAssignment requirement, String rtype, NodeTemplate source) { - LinkedHashMap req = new LinkedHashMap<>(); - req.put("relationship", CopyUtils.copyLhmOrAl(requirement.getRelationship())); - req.put("type", rtype); - RelationshipTemplate tpl = new RelationshipTemplate(req, rtype, customDef, this, source, getParentNodeTemplate()); - relationshipTpl.add(tpl); - } - - public ArrayList getRelationshipTemplate() { - return relationshipTpl; - } - - void _addNext(NodeTemplate nodetpl, RelationshipType relationship) { - related.put(nodetpl, relationship); - } - - public ArrayList getRelatedNodes() { - if (related.isEmpty()) { - for (Map.Entry me : ((NodeType) typeDefinition).getRelationship().entrySet()) { - RelationshipType relation = me.getKey(); - NodeType node = me.getValue(); - for (String tpl : templates.keySet()) { - if (tpl.equals(node.getType())) { - //BUG.. python has - // self.related[NodeTemplate(tpl)] = relation - // but NodeTemplate doesn't have a constructor with just name... - //???? - related.put(new NodeTemplate(tpl, null, null, null, null), relation); - } - } - } - } - return new ArrayList(related.keySet()); - } - - public void validate(/*tosca_tpl=none is not used...*/) { - _validateCapabilities(); - _validateRequirements(); - _validateProperties(entityTpl, (NodeType) typeDefinition); - _validateInterfaces(); - for (Property prop : getPropertiesObjects()) { - prop.validate(); - } - } - - public Object getPropertyValueFromTemplatesByName(String propertyName) { - LinkedHashMap nodeObject = (LinkedHashMap) templates.get(name); - if (nodeObject != null) { - LinkedHashMap properties = (LinkedHashMap) nodeObject.get(PROPERTIES); - if (properties != null) { - return properties.get(propertyName); - } - } - return null; - } - - private Metadata _metaData() { - if (entityTpl.get(METADATA) != null) { - return new Metadata((Map) entityTpl.get(METADATA)); - } else { - return null; - } - } - - @SuppressWarnings("unchecked") - private void _validateRequirements() { - ArrayList typeRequires = ((NodeType) typeDefinition).getAllRequirements(); - ArrayList allowedReqs = new ArrayList<>(); - allowedReqs.add("template"); - if (typeRequires != null) { - for (Object to : typeRequires) { - LinkedHashMap treq = (LinkedHashMap) to; - for (Map.Entry me : treq.entrySet()) { - String key = me.getKey(); - Object value = me.getValue(); - allowedReqs.add(key); - if (value instanceof LinkedHashMap) { - allowedReqs.addAll(((LinkedHashMap) value).keySet()); - } - } - - } - } - - ArrayList requires = (ArrayList) ((NodeType) typeDefinition).getValue(REQUIREMENTS, entityTpl, false); - if (requires != null) { - if (!(requires instanceof ArrayList)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE209", String.format( - "TypeMismatchError: \"requirements\" of template \"%s\" are not of type \"list\"", name))); - } else { - for (Object ro : requires) { - LinkedHashMap req = (LinkedHashMap) ro; - for (Map.Entry me : req.entrySet()) { - String rl = me.getKey(); - Object vo = me.getValue(); - if (vo instanceof LinkedHashMap) { - LinkedHashMap value = (LinkedHashMap) vo; - _validateRequirementsKeys(value); - _validateRequirementsProperties(value); - allowedReqs.add(rl); - } - } - _commonValidateField(req, allowedReqs, "requirements"); - } - } - } - } - - @SuppressWarnings("unchecked") - private void _validateRequirementsProperties(LinkedHashMap reqs) { - // TO-DO(anyone): Only occurrences property of the requirements is - // validated here. Validation of other requirement properties are being - // validated in different files. Better to keep all the requirements - // properties validation here. - for (Map.Entry me : reqs.entrySet()) { - if (me.getKey().equals("occurrences")) { - ArrayList val = (ArrayList) me.getValue(); - _validateOccurrences(val); - } - - } - } - - private void _validateOccurrences(ArrayList occurrences) { - DataEntity.validateDatatype("list", occurrences, null, null, null); - for (Object val : occurrences) { - DataEntity.validateDatatype("Integer", val, null, null, null); - } - if (occurrences.size() != 2 || - !(0 <= (int) occurrences.get(0) && (int) occurrences.get(0) <= (int) occurrences.get(1)) || - (int) occurrences.get(1) == 0) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE210", String.format( - "InvalidPropertyValueError: property has invalid value %s", occurrences.toString()))); - } - } - - private void _validateRequirementsKeys(LinkedHashMap reqs) { - for (String key : reqs.keySet()) { - boolean bFound = false; - for (int i = 0; i < REQUIREMENTS_SECTION.length; i++) { - if (key.equals(REQUIREMENTS_SECTION[i])) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE211", String.format( - "UnknownFieldError: \"requirements\" of template \"%s\" contains unknown field \"%s\"", name, key))); - } - } - } - - @SuppressWarnings("unchecked") - private void _validateInterfaces() { - LinkedHashMap ifaces = (LinkedHashMap) - ((NodeType) typeDefinition).getValue(INTERFACES, entityTpl, false); - if (ifaces != null) { - for (Map.Entry me : ifaces.entrySet()) { - String iname = me.getKey(); - LinkedHashMap value = (LinkedHashMap) me.getValue(); - if (iname.equals(InterfacesDef.LIFECYCLE) || iname.equals(InterfacesDef.LIFECYCLE_SHORTNAME)) { - // maybe we should convert [] to arraylist??? - ArrayList inlo = new ArrayList<>(); - for (int i = 0; i < InterfacesDef.INTERFACE_NODE_LIFECYCLE_OPERATIONS.length; i++) { - inlo.add(InterfacesDef.INTERFACE_NODE_LIFECYCLE_OPERATIONS[i]); - } - _commonValidateField(value, inlo, "interfaces"); - } else if (iname.equals(InterfacesDef.CONFIGURE) || iname.equals(InterfacesDef.CONFIGURE_SHORTNAME)) { - // maybe we should convert [] to arraylist??? - ArrayList irco = new ArrayList<>(); - for (int i = 0; i < InterfacesDef.INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS.length; i++) { - irco.add(InterfacesDef.INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS[i]); - } - _commonValidateField(value, irco, "interfaces"); - } else if (((NodeType) typeDefinition).getInterfaces().keySet().contains(iname)) { - _commonValidateField(value, _collectCustomIfaceOperations(iname), "interfaces"); - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE212", String.format( - "UnknownFieldError: \"interfaces\" of template \"%s\" contains unknown field %s", name, iname))); - } - } - } - } - - @SuppressWarnings("unchecked") - private ArrayList _collectCustomIfaceOperations(String iname) { - ArrayList allowedOperations = new ArrayList<>(); - LinkedHashMap nodetypeIfaceDef = (LinkedHashMap) ((NodeType) - typeDefinition).getInterfaces().get(iname); - allowedOperations.addAll(nodetypeIfaceDef.keySet()); - String ifaceType = (String) nodetypeIfaceDef.get("type"); - if (ifaceType != null) { - LinkedHashMap ifaceTypeDef = null; - if (((NodeType) typeDefinition).customDef != null) { - ifaceTypeDef = (LinkedHashMap) ((NodeType) typeDefinition).customDef.get(ifaceType); - } - if (ifaceTypeDef == null) { - ifaceTypeDef = (LinkedHashMap) EntityType.TOSCA_DEF.get(ifaceType); - } - allowedOperations.addAll(ifaceTypeDef.keySet()); - } - // maybe we should convert [] to arraylist??? - ArrayList idrw = new ArrayList<>(); - for (int i = 0; i < InterfacesDef.INTERFACE_DEF_RESERVED_WORDS.length; i++) { - idrw.add(InterfacesDef.INTERFACE_DEF_RESERVED_WORDS[i]); - } - allowedOperations.removeAll(idrw); - return allowedOperations; - } - - /** - * Get all interface details for given node template.
- * - * @return Map that contains the list of all interfaces and their definitions. - * If none found, an empty map will be returned. - */ - public Map> getAllInterfaceDetailsForNodeType() { - Map> interfaceMap = new LinkedHashMap<>(); - - // Get custom interface details - Map customInterfacesDetails = ((NodeType) typeDefinition).getInterfaces(); - // Get native interface details from tosca definitions - Object nativeInterfaceDetails = TOSCA_DEF.get(InterfacesDef.LIFECYCLE); - Map allInterfaceDetails = new LinkedHashMap<>(); - allInterfaceDetails.putAll(customInterfacesDetails); - if (nativeInterfaceDetails != null) { - allInterfaceDetails.put(InterfacesDef.LIFECYCLE, nativeInterfaceDetails); - } - - // Process all interface details from combined collection and return an interface Map with - // interface names and their definitions - for (Map.Entry me : allInterfaceDetails.entrySet()) { - ArrayList interfaces = new ArrayList<>(); - String interfaceType = me.getKey(); - Map interfaceValue = (Map) me.getValue(); - if (interfaceValue.containsKey("type")) { - interfaceType = (String) interfaceValue.get("type"); - } - - for (Map.Entry ve : interfaceValue.entrySet()) { - // Filter type as this is a reserved key and not an operation - if (!ve.getKey().equals("type")) { - InterfacesDef iface = new InterfacesDef(typeDefinition, interfaceType, this, ve.getKey(), ve.getValue()); - interfaces.add(iface); - } - } - interfaceMap.put(interfaceType, interfaces); - } - return interfaceMap; - } - - private void _validateFields(LinkedHashMap nodetemplate) { - for (String ntname : nodetemplate.keySet()) { - boolean bFound = false; - for (int i = 0; i < SECTIONS.length; i++) { - if (ntname.equals(SECTIONS[i])) { - bFound = true; - break; - } - } - if (!bFound) { - for (int i = 0; i < SPECIAL_SECTIONS.length; i++) { - if (ntname.equals(SPECIAL_SECTIONS[i])) { - bFound = true; - break; - } - } - - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE213", String.format( - "UnknownFieldError: Node template \"%s\" has unknown field \"%s\"", name, ntname))); - } - } - } - - // getter/setter - - // multilevel nesting - public SubstitutionMappings getSubMappingToscaTemplate() { - return subMappingToscaTemplate; - } - - public void setSubMappingToscaTemplate(SubstitutionMappings sm) { - subMappingToscaTemplate = sm; - } - - public Metadata getMetaData() { - return metadata; - } - - public void setMetaData(Metadata metadata) { - this.metadata = metadata; - } - - @Override - public String toString() { - return getName(); - } - - public TopologyTemplate getOriginComponentTemplate() { - return originComponentTemplate; - } - - public void setOriginComponentTemplate(TopologyTemplate originComponentTemplate) { - this.originComponentTemplate = originComponentTemplate; - } - -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidPropertyValueError -from toscaparser.common.exception import MissingRequiredFieldError -from toscaparser.common.exception import TypeMismatchError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.common.exception import ValidationError -from toscaparser.dataentity import DataEntity -from toscaparser.elements.interfaces import CONFIGURE -from toscaparser.elements.interfaces import CONFIGURE_SHORTNAME -from toscaparser.elements.interfaces import INTERFACE_DEF_RESERVED_WORDS -from toscaparser.elements.interfaces import InterfacesDef -from toscaparser.elements.interfaces import LIFECYCLE -from toscaparser.elements.interfaces import LIFECYCLE_SHORTNAME -from toscaparser.elements.relationshiptype import RelationshipType -from toscaparser.entity_template import EntityTemplate -from toscaparser.relationship_template import RelationshipTemplate -from toscaparser.utils.gettextutils import _ - -log = logging.getLogger('tosca') - - -class NodeTemplate(EntityTemplate): - '''Node template from a Tosca profile.''' - def __init__(self, name, node_templates, custom_def=None, - available_rel_tpls=None, available_rel_types=None): - super(NodeTemplate, self).__init__(name, node_templates[name], - 'node_type', - custom_def) - self.templates = node_templates - self._validate_fields(node_templates[name]) - self.custom_def = custom_def - self.related = {} - self.relationship_tpl = [] - self.available_rel_tpls = available_rel_tpls - self.available_rel_types = available_rel_types - self._relationships = {} - self.sub_mapping_tosca_template = None - - @property - def relationships(self): - if not self._relationships: - requires = self.requirements - if requires and isinstance(requires, list): - for r in requires: - for r1, value in r.items(): - explicit = self._get_explicit_relationship(r, value) - if explicit: - for key, value in explicit.items(): - self._relationships[key] = value - return self._relationships - - def _get_explicit_relationship(self, req, value): - """Handle explicit relationship - - For example, - - req: - node: DBMS - relationship: tosca.relationships.HostedOn - """ - explicit_relation = {} - node = value.get('node') if isinstance(value, dict) else value - - if node: - # TO-DO(spzala) implement look up once Glance meta data is available - # to find a matching TOSCA node using the TOSCA types - msg = _('Lookup by TOSCA types is not supported. ' - 'Requirement for "%s" can not be full-filled.') % self.name - if (node in list(self.type_definition.TOSCA_DEF.keys()) - or node in self.custom_def): - ValidationIssueCollector.appendException(NotImplementedError(msg)) - return - - if node not in self.templates: - ValidationIssueCollector.appendException( - KeyError(_('Node template "%s" was not found.') % node)) - return - - related_tpl = NodeTemplate(node, self.templates, self.custom_def) - relationship = value.get('relationship') \ - if isinstance(value, dict) else None - # check if it's type has relationship defined - if not relationship: - parent_reqs = self.type_definition.get_all_requirements() - if parent_reqs is None: - ValidationIssueCollector.appendException( - ValidationError(message='parent_req is ' + - str(parent_reqs))) - else: - for key in req.keys(): - for req_dict in parent_reqs: - if key in req_dict.keys(): - relationship = (req_dict.get(key). - get('relationship')) - break - if relationship: - found_relationship_tpl = False - # apply available relationship templates if found - if self.available_rel_tpls: - for tpl in self.available_rel_tpls: - if tpl.name == relationship: - rtype = RelationshipType(tpl.type, None, - self.custom_def) - explicit_relation[rtype] = related_tpl - tpl.target = related_tpl - tpl.source = self - self.relationship_tpl.append(tpl) - found_relationship_tpl = True - # create relationship template object. - rel_prfx = self.type_definition.RELATIONSHIP_PREFIX - if not found_relationship_tpl: - if isinstance(relationship, dict): - relationship = relationship.get('type') - if relationship: - if self.available_rel_types and \ - relationship in self.available_rel_types.keys(): - pass - elif not relationship.startswith(rel_prfx): - relationship = rel_prfx + relationship - else: - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what=_('"relationship" used in template ' - '"%s"') % related_tpl.name, - required=self.TYPE)) - for rtype in self.type_definition.relationship.keys(): - if rtype.type == relationship: - explicit_relation[rtype] = related_tpl - related_tpl._add_relationship_template(req, - rtype.type, - self) - elif self.available_rel_types: - if relationship in self.available_rel_types.keys(): - rel_type_def = self.available_rel_types.\ - get(relationship) - if 'derived_from' in rel_type_def: - super_type = \ - rel_type_def.get('derived_from') - if not super_type.startswith(rel_prfx): - super_type = rel_prfx + super_type - if rtype.type == super_type: - explicit_relation[rtype] = related_tpl - related_tpl.\ - _add_relationship_template( - req, rtype.type, self) - return explicit_relation - - def _add_relationship_template(self, requirement, rtype, source): - req = requirement.copy() - req['type'] = rtype - tpl = RelationshipTemplate(req, rtype, self.custom_def, self, source) - self.relationship_tpl.append(tpl) - - def get_relationship_template(self): - return self.relationship_tpl - - def _add_next(self, nodetpl, relationship): - self.related[nodetpl] = relationship - - @property - def related_nodes(self): - if not self.related: - for relation, node in self.type_definition.relationship.items(): - for tpl in self.templates: - if tpl == node.type: - self.related[NodeTemplate(tpl)] = relation - return self.related.keys() - - def validate(self, tosca_tpl=None): - self._validate_capabilities() - self._validate_requirements() - self._validate_properties(self.entity_tpl, self.type_definition) - self._validate_interfaces() - for prop in self.get_properties_objects(): - prop.validate() - - def _validate_requirements(self): - type_requires = self.type_definition.get_all_requirements() - allowed_reqs = ["template"] - if type_requires: - for treq in type_requires: - for key, value in treq.items(): - allowed_reqs.append(key) - if isinstance(value, dict): - for key in value: - allowed_reqs.append(key) - - requires = self.type_definition.get_value(self.REQUIREMENTS, - self.entity_tpl) - if requires: - if not isinstance(requires, list): - ValidationIssueCollector.appendException( - TypeMismatchError( - what='"requirements" of template "%s"' % self.name, - type='list')) - else: - for req in requires: - for r1, value in req.items(): - if isinstance(value, dict): - self._validate_requirements_keys(value) - self._validate_requirements_properties(value) - allowed_reqs.append(r1) - self._common_validate_field(req, allowed_reqs, - 'requirements') - - def _validate_requirements_properties(self, requirements): - # TO-DO(anyone): Only occurrences property of the requirements is - # validated here. Validation of other requirement properties are being - # validated in different files. Better to keep all the requirements - # properties validation here. - for key, value in requirements.items(): - if key == 'occurrences': - self._validate_occurrences(value) - break - - def _validate_occurrences(self, occurrences): - DataEntity.validate_datatype('list', occurrences) - for value in occurrences: - DataEntity.validate_datatype('integer', value) - if len(occurrences) != 2 or not (0 <= occurrences[0] <= occurrences[1]) \ - or occurrences[1] == 0: - ValidationIssueCollector.appendException( - InvalidPropertyValueError(what=(occurrences))) - - def _validate_requirements_keys(self, requirement): - for key in requirement.keys(): - if key not in self.REQUIREMENTS_SECTION: - ValidationIssueCollector.appendException( - UnknownFieldError( - what='"requirements" of template "%s"' % self.name, - field=key)) - - def _validate_interfaces(self): - ifaces = self.type_definition.get_value(self.INTERFACES, - self.entity_tpl) - if ifaces: - for name, value in ifaces.items(): - if name in (LIFECYCLE, LIFECYCLE_SHORTNAME): - self._common_validate_field( - value, InterfacesDef. - interfaces_node_lifecycle_operations, - 'interfaces') - elif name in (CONFIGURE, CONFIGURE_SHORTNAME): - self._common_validate_field( - value, InterfacesDef. - interfaces_relationship_configure_operations, - 'interfaces') - elif name in self.type_definition.interfaces.keys(): - self._common_validate_field( - value, - self._collect_custom_iface_operations(name), - 'interfaces') - else: - ValidationIssueCollector.appendException( - UnknownFieldError( - what='"interfaces" of template "%s"' % - self.name, field=name)) - - def _collect_custom_iface_operations(self, name): - allowed_operations = [] - nodetype_iface_def = self.type_definition.interfaces[name] - allowed_operations.extend(nodetype_iface_def.keys()) - if 'type' in nodetype_iface_def: - iface_type = nodetype_iface_def['type'] - if iface_type in self.type_definition.custom_def: - iface_type_def = self.type_definition.custom_def[iface_type] - else: - iface_type_def = self.type_definition.TOSCA_DEF[iface_type] - allowed_operations.extend(iface_type_def.keys()) - allowed_operations = [op for op in allowed_operations if - op not in INTERFACE_DEF_RESERVED_WORDS] - return allowed_operations - - def _validate_fields(self, nodetemplate): - for name in nodetemplate.keys(): - if name not in self.SECTIONS and name not in self.SPECIAL_SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Node template "%s"' % self.name, - field=name))*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java b/src/main/java/org/onap/sdc/toscaparser/api/Policy.java deleted file mode 100644 index ca8ac55..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/Policy.java +++ /dev/null @@ -1,232 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.onap.sdc.toscaparser.api.elements.Metadata; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.onap.sdc.toscaparser.api.utils.ValidateUtils; - -public class Policy extends EntityTemplate { - - - static final String TYPE = "type"; - static final String METADATA = "metadata"; - static final String DESCRIPTION = "description"; - static final String PROPERTIES = "properties"; - static final String TARGETS = "targets"; - private static final String TRIGGERS = "triggers"; - private static final String SECTIONS[] = { - TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS}; - - Metadata metaDataObject; - LinkedHashMap metaData = null; - ArrayList targetsList; // *** a list of NodeTemplate OR a list of Group *** - String targetsType; - ArrayList triggers; - LinkedHashMap properties; - - public Policy(String _name, - LinkedHashMap _policy, - ArrayList targetObjects, - String _targetsType, - LinkedHashMap _customDef) { - this(_name, _policy, targetObjects, _targetsType, _customDef, null); - } - - public Policy(String _name, - LinkedHashMap _policy, -// ArrayList targetObjects, - ArrayList targetObjects, - String _targetsType, - LinkedHashMap _customDef, NodeTemplate parentNodeTemplate) { - super(_name, _policy, "policy_type", _customDef, parentNodeTemplate); - - if (_policy.get(METADATA) != null) { - metaData = (LinkedHashMap) _policy.get(METADATA); - ValidateUtils.validateMap(metaData); - metaDataObject = new Metadata(metaData); - } - - targetsList = targetObjects; - targetsType = _targetsType; - triggers = _triggers((LinkedHashMap) _policy.get(TRIGGERS)); - properties = null; - if (_policy.get("properties") != null) { - properties = (LinkedHashMap) _policy.get("properties"); - } - _validateKeys(); - } - - public ArrayList getTargets() { - return (ArrayList) entityTpl.get("targets"); - } - - public ArrayList getDescription() { - return (ArrayList) entityTpl.get("description"); - } - - public ArrayList getmetadata() { - return (ArrayList) entityTpl.get("metadata"); - } - - public String getTargetsType() { - return targetsType; - } - - public Metadata getMetaDataObj() { - return metaDataObject; - } - - public LinkedHashMap getMetaData() { - return metaData; - } - - // public ArrayList getTargetsList() { - public ArrayList getTargetsList() { - return targetsList; - } - - // entityTemplate already has a different getProperties... - // this is to access the local properties variable - public LinkedHashMap getPolicyProperties() { - return properties; - } - - private ArrayList _triggers(LinkedHashMap triggers) { - ArrayList triggerObjs = new ArrayList<>(); - if (triggers != null) { - for (Map.Entry me : triggers.entrySet()) { - String tname = me.getKey(); - LinkedHashMap ttriggerTpl = - (LinkedHashMap) me.getValue(); - Triggers triggersObj = new Triggers(tname, ttriggerTpl); - triggerObjs.add(triggersObj); - } - } - return triggerObjs; - } - - private void _validateKeys() { - for (String key : entityTpl.keySet()) { - boolean bFound = false; - for (int i = 0; i < SECTIONS.length; i++) { - if (key.equals(SECTIONS[i])) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE219", String.format( - "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"", - name, key))); - } - } - } - - @Override - public String toString() { - return "Policy{" + - "metaData=" + metaData + - ", targetsList=" + targetsList + - ", targetsType='" + targetsType + '\'' + - ", triggers=" + triggers + - ", properties=" + properties + - '}'; - } - - public int compareTo(Policy other) { - if (this.equals(other)) - return 0; - return this.getName().compareTo(other.getName()) == 0 ? this.getType().compareTo(other.getType()) : this.getName().compareTo(other.getName()); - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import UnknownFieldError -from toscaparser.entity_template import EntityTemplate -from toscaparser.triggers import Triggers -from toscaparser.utils import validateutils - - -SECTIONS = (TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS) = \ - ('type', 'metadata', 'description', - 'properties', 'targets', 'triggers') - -log = logging.getLogger('tosca') - - -class Policy(EntityTemplate): - '''Policies defined in Topology template.''' - def __init__(self, name, policy, targets, targets_type, custom_def=None): - super(Policy, self).__init__(name, - policy, - 'policy_type', - custom_def) - self.meta_data = None - if self.METADATA in policy: - self.meta_data = policy.get(self.METADATA) - validateutils.validate_map(self.meta_data) - self.targets_list = targets - self.targets_type = targets_type - self.triggers = self._triggers(policy.get(TRIGGERS)) - self._validate_keys() - - @property - def targets(self): - return self.entity_tpl.get('targets') - - @property - def description(self): - return self.entity_tpl.get('description') - - @property - def metadata(self): - return self.entity_tpl.get('metadata') - - def get_targets_type(self): - return self.targets_type - - def get_targets_list(self): - return self.targets_list - - def _triggers(self, triggers): - triggerObjs = [] - if triggers: - for name, trigger_tpl in triggers.items(): - triggersObj = Triggers(name, trigger_tpl) - triggerObjs.append(triggersObj) - return triggerObjs - - def _validate_keys(self): - for key in self.entity_tpl.keys(): - if key not in SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Policy "%s"' % self.name, - field=key)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Property.java b/src/main/java/org/onap/sdc/toscaparser/api/Property.java deleted file mode 100644 index e20bd2f..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/Property.java +++ /dev/null @@ -1,401 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import com.google.common.collect.Lists; -import org.onap.sdc.toscaparser.api.elements.constraints.Constraint; -import org.onap.sdc.toscaparser.api.elements.constraints.Schema; -import org.onap.sdc.toscaparser.api.functions.Function; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -public class Property { - // TOSCA built-in Property type - private static final Logger LOGGER = LoggerFactory.getLogger(Property.class.getName()); - - private static final String TYPE = "type"; - private static final String REQUIRED = "required"; - private static final String DESCRIPTION = "description"; - private static final String DEFAULT = "default"; - private static final String CONSTRAINTS = "constraints"; - private static String entrySchema = "entry_schema"; - private static String dataType = "datatypes"; - - private static final String[] PROPERTY_KEYS = { - TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS}; - - private static final String ENTRYTYPE = "type"; - private static final String ENTRYPROPERTIES = "properties"; - private static final String PATH_DELIMITER = "#"; - private static final String[] ENTRY_SCHEMA_KEYS = { - ENTRYTYPE, ENTRYPROPERTIES}; - - private String name; - private Object value; - private Schema schema; - private LinkedHashMap customDef; - - public Property(Map.Entry propertyEntry) { - name = propertyEntry.getKey(); - value = propertyEntry.getValue(); - } - - public Property(String propname, - Object propvalue, - LinkedHashMap propschemaDict, - LinkedHashMap propcustomDef) { - - name = propname; - value = propvalue; - customDef = propcustomDef; - schema = new Schema(propname, propschemaDict); - } - - public String getType() { - return schema.getType(); - } - - public boolean isRequired() { - return schema.isRequired(); - } - - public String getDescription() { - return schema.getDescription(); - } - - public Object getDefault() { - return schema.getDefault(); - } - - public ArrayList getConstraints() { - return schema.getConstraints(); - } - - public LinkedHashMap getEntrySchema() { - return schema.getEntrySchema(); - } - - - public String getName() { - return name; - } - - public Object getValue() { - return value; - } - - // setter - public Object setValue(Object vob) { - value = vob; - return value; - } - - public void validate() { - // Validate if not a reference property - if (!Function.isFunction(value)) { - if (getType().equals(Schema.STRING)) { - value = value.toString(); - } - value = DataEntity.validateDatatype(getType(), value, - getEntrySchema(), - customDef, - name); - validateConstraints(); - } - } - - private void validateConstraints() { - if (getConstraints() != null) { - for (Constraint constraint : getConstraints()) { - constraint.validate(value); - } - } - } - - @Override - public String toString() { - return "Property{" - + "name='" + name + '\'' - + ", value=" + value - + ", schema=" + schema - + ", customDef=" + customDef - + '}'; - } - - /** - * Retrieves property value as list of strings if
- * - the value is simple
- * - the value is list of simple values
- * - the provided path refers to a simple property inside a data type
- * - * @param propertyPath valid name of property for search.
- * If a name refers to a simple field inside a datatype, the property name should be defined with # delimiter.
- * @return List of property values. If not found, empty list will be returned.
- * If property value is a list either of simple fields or of simple fields inside a datatype, all values from the list should be returned - */ - public List getLeafPropertyValue(String propertyPath) { - List propertyValueList = Collections.emptyList(); - - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("getLeafPropertyValue=> A new request: propertyPath: {}, value: {}", propertyPath, getValue()); - } - if (propertyPath == null || getValue() == null - //if entry_schema disappears, it is datatype, - // otherwise it is map of simple types - should be ignored - || isValueMapOfSimpleTypes()) { - LOGGER.error("It is a wrong request - ignoring! propertyPath: {}, value: {}", propertyPath, getValue()); - return propertyValueList; - } - String[] path = propertyPath.split(PATH_DELIMITER); - - if (Schema.isRequestedTypeSimple(getPropertyTypeByPath(path))) { - //the internal property type in the path is either simple or list of simple types - if (isValueInsideDataType()) { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("The requested is an internal simple property inside of a data type"); - } - //requested value is an internal simple property inside of a data type - propertyValueList = getSimplePropertyValueForComplexType(path); - } else { - if (LOGGER.isDebugEnabled()) { - LOGGER.debug("The requested property has simple type or list of simple types"); - } - //the requested property is simple type or list of simple types - propertyValueList = getSimplePropertyValueForSimpleType(); - } - } - return propertyValueList; - } - - private boolean isValueMapOfSimpleTypes() { - if (getValue() instanceof Map && getEntrySchema() != null) { - LOGGER.warn("This property value is a map of simple types"); - return true; - } - return false; - } - - private boolean isValueInsideDataType() { - //value is either a list of values for data type - //or data type - return (Schema.LIST.equals(getType()) && isDataTypeInEntrySchema()) - || (getEntrySchema() == null && getType().contains(dataType)); - } - - private Object getSimpleValueFromComplexObject(Object current, String[] path) { - if (current == null) { - return null; - } - int index = 0; - - if (path.length > index) { - for (int i = index; i < path.length; i++) { - if (current instanceof Map) { - current = ((Map) current).get(path[i]); - } else if (current instanceof List) { - current = ((List) current).get(0); - i--; - } else { - return null; - } - } - } - if (current != null) { - return current; - } - return null; - } - - private List getSimplePropertyValueForSimpleType() { - if (getValue() instanceof List || getValue() instanceof Map) { - return getSimplePropertyValueForComplexType(null); - } - return Lists.newArrayList(String.valueOf(value)); - } - - private List getSimplePropertyValueForComplexType(String[] path) { - if (getValue() instanceof List) { - return ((List) getValue()).stream() - .map(v -> { - if (path != null) { - return getSimpleValueFromComplexObject(v, path); - } else { - return v; - } - }) - //it might be null when get_input can't be resolved - // e.g.: - // - get_input has two parameters: 1. list and 2. index in this list - //and list has no value - // - neither value no default is defined for get_input - .filter(Objects::nonNull) - .map(String::valueOf) - .collect(Collectors.toList()); - } - //it is data type - List valueList = Lists.newArrayList(); - String valueString = String.valueOf(getSimpleValueFromComplexObject(getValue(), path)); - if (Objects.nonNull(valueString)) { - valueList.add(valueString); - } - return valueList; - } - - private String getPropertyTypeByPath(String[] path) { - String propertyType = calculatePropertyType(); - - if (path.length > 0 && !path[0].isEmpty()) { - return getInternalPropertyType(propertyType, path, 0); - } - return propertyType; - } - - private String calculatePropertyType() { - String propertyType = getType(); - if (Schema.LIST.equals(propertyType)) { - //if it is list, return entry schema type - return (String) getEntrySchema().get(ENTRYTYPE); - } - return propertyType; - } - - private String calculatePropertyType(LinkedHashMap property) { - String type = (String) property.get(TYPE); - if (Schema.LIST.equals(type)) { - //it might be a data type - return getEntrySchemaType(property); - } - return type; - } - - private String getInternalPropertyType(String dataTypeName, String[] path, int index) { - if (path.length > index) { - LinkedHashMap complexProperty = (LinkedHashMap) customDef.get(dataTypeName); - if (complexProperty != null) { - LinkedHashMap dataTypeProperties = (LinkedHashMap) complexProperty.get(ENTRYPROPERTIES); - return getPropertyTypeFromCustomDefDeeply(path, index, dataTypeProperties); - } - } - //stop searching - seems as wrong flow: the path is finished but the value is not found yet - return null; - } - - private String getEntrySchemaType(LinkedHashMap property) { - LinkedHashMap entrySchema = (LinkedHashMap) property.get(Property.entrySchema); - if (entrySchema != null) { - return (String) entrySchema.get(TYPE); - } - return null; - } - - private String getPropertyTypeFromCustomDefDeeply(String[] path, int index, LinkedHashMap properties) { - if (properties != null) { - LinkedHashMap foundProperty = (LinkedHashMap) (properties).get(path[index]); - if (foundProperty != null) { - String propertyType = calculatePropertyType(foundProperty); - if (propertyType == null || index == path.length - 1) { - return propertyType; - } - return getInternalPropertyType(propertyType, path, index + 1); - } - } - return null; - } - - private boolean isDataTypeInEntrySchema() { - String entrySchemaType = (String) getEntrySchema().get(ENTRYTYPE); - return entrySchemaType != null && entrySchemaType.contains(dataType); - } - - -} - -/*python - -class Property(object): - '''TOSCA built-in Property type.''' - - PROPERTY_KEYS = ( - TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS - ) = ( - 'type', 'required', 'description', 'default', 'constraints' - ) - - ENTRY_SCHEMA_KEYS = ( - ENTRYTYPE, ENTRYPROPERTIES - ) = ( - 'type', 'properties' - ) - - def __init__(self, property_name, value, schema_dict, custom_def=None): - self.name = property_name - self.value = value - self.custom_def = custom_def - self.schema = Schema(property_name, schema_dict) - - @property - def type(self): - return self.schema.type - - @property - def required(self): - return self.schema.required - - @property - def description(self): - return self.schema.description - - @property - def default(self): - return self.schema.default - - @property - def constraints(self): - return self.schema.constraints - - @property - def entry_schema(self): - return self.schema.entry_schema - - def validate(self): - '''Validate if not a reference property.''' - if not is_function(self.value): - if self.type == Schema.STRING: - self.value = str(self.value) - self.value = DataEntity.validate_datatype(self.type, self.value, - self.entry_schema, - self.custom_def, - self.name) - self._validate_constraints() - - def _validate_constraints(self): - if self.constraints: - for constraint in self.constraints: - constraint.validate(self.value) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java deleted file mode 100644 index d1a1383..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java +++ /dev/null @@ -1,227 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.onap.sdc.toscaparser.api.elements.PropertyDef; -import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; -import org.onap.sdc.toscaparser.api.elements.EntityType; - -public class RelationshipTemplate extends EntityTemplate { - - private static final String DERIVED_FROM = "derived_from"; - private static final String PROPERTIES = "properties"; - private static final String REQUIREMENTS = "requirements"; - private static final String INTERFACES = "interfaces"; - private static final String CAPABILITIES = "capabilities"; - private static final String TYPE = "type"; - @SuppressWarnings("unused") - private static final String SECTIONS[] = { - DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES, CAPABILITIES, TYPE}; - - private String name; - private NodeTemplate target; - private NodeTemplate source; - private ArrayList _properties; - - public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, - String rtname, - LinkedHashMap rtcustomDef, - NodeTemplate rttarget, - NodeTemplate rtsource) { - this(rtrelationshipTemplate, rtname, rtcustomDef, rttarget, rtsource, null); - } - - public RelationshipTemplate(LinkedHashMap rtrelationshipTemplate, - String rtname, - LinkedHashMap rtcustomDef, - NodeTemplate rttarget, - NodeTemplate rtsource, NodeTemplate parentNodeTemplate) { - super(rtname, rtrelationshipTemplate, "relationship_type", rtcustomDef, parentNodeTemplate); - - name = rtname; - target = rttarget; - source = rtsource; - _properties = null; - } - - public ArrayList getPropertiesObjects() { - // Return properties objects for this template - if (_properties == null) { - _properties = _createRelationshipProperties(); - } - return _properties; - } - - @SuppressWarnings({"unchecked", "unused"}) - public ArrayList _createRelationshipProperties() { - ArrayList props = new ArrayList(); - LinkedHashMap properties = new LinkedHashMap(); - LinkedHashMap relationship = (LinkedHashMap) entityTpl.get("relationship"); - - if (relationship == null) { - for (Object val : entityTpl.values()) { - if (val instanceof LinkedHashMap) { - relationship = (LinkedHashMap) ((LinkedHashMap) val).get("relationship"); - break; - } - } - } - - if (relationship != null) { - properties = (LinkedHashMap) ((EntityType) typeDefinition).getValue(PROPERTIES, relationship, false); - } - if (properties == null) { - properties = new LinkedHashMap(); - } - if (properties == null) { - properties = (LinkedHashMap) entityTpl.get(PROPERTIES); - } - if (properties == null) { - properties = new LinkedHashMap(); - } - - if (properties != null) { - for (Map.Entry me : properties.entrySet()) { - String pname = me.getKey(); - Object pvalue = me.getValue(); - LinkedHashMap propsDef = ((StatefulEntityType) typeDefinition).getPropertiesDef(); - if (propsDef != null && propsDef.get(pname) != null) { - if (properties.get(pname) != null) { - pvalue = properties.get(name); - } - PropertyDef pd = (PropertyDef) propsDef.get(pname); - Property prop = new Property(pname, pvalue, pd.getSchema(), customDef); - props.add(prop); - } - } - } - ArrayList pds = ((StatefulEntityType) typeDefinition).getPropertiesDefObjects(); - for (PropertyDef p : pds) { - if (p.getDefault() != null && properties.get(p.getName()) == null) { - Property prop = new Property(p.getName(), (LinkedHashMap) p.getDefault(), p.getSchema(), customDef); - props.add(prop); - } - } - return props; - } - - public void validate() { - _validateProperties(entityTpl, (StatefulEntityType) typeDefinition); - } - - // getters/setters - public NodeTemplate getTarget() { - return target; - } - - public NodeTemplate getSource() { - return source; - } - - public void setSource(NodeTemplate nt) { - source = nt; - } - - public void setTarget(NodeTemplate nt) { - target = nt; - } - - @Override - public String toString() { - return "RelationshipTemplate{" + - "name='" + name + '\'' + - ", target=" + target.getName() + - ", source=" + source.getName() + - ", _properties=" + _properties + - '}'; - } - -} - -/*python - -from toscaparser.entity_template import EntityTemplate -from toscaparser.properties import Property - -SECTIONS = (DERIVED_FROM, PROPERTIES, REQUIREMENTS, - INTERFACES, CAPABILITIES, TYPE) = \ - ('derived_from', 'properties', 'requirements', 'interfaces', - 'capabilities', 'type') - -log = logging.getLogger('tosca') - - -class RelationshipTemplate(EntityTemplate): - '''Relationship template.''' - def __init__(self, relationship_template, name, custom_def=None, - target=None, source=None): - super(RelationshipTemplate, self).__init__(name, - relationship_template, - 'relationship_type', - custom_def) - self.name = name.lower() - self.target = target - self.source = source - - def get_properties_objects(self): - '''Return properties objects for this template.''' - if self._properties is None: - self._properties = self._create_relationship_properties() - return self._properties - - def _create_relationship_properties(self): - props = [] - properties = {} - relationship = self.entity_tpl.get('relationship') - - if not relationship: - for value in self.entity_tpl.values(): - if isinstance(value, dict): - relationship = value.get('relationship') - break - - if relationship: - properties = self.type_definition.get_value(self.PROPERTIES, - relationship) or {} - if not properties: - properties = self.entity_tpl.get(self.PROPERTIES) or {} - - if properties: - for name, value in properties.items(): - props_def = self.type_definition.get_properties_def() - if props_def and name in props_def: - if name in properties.keys(): - value = properties.get(name) - prop = Property(name, value, - props_def[name].schema, self.custom_def) - props.append(prop) - for p in self.type_definition.get_properties_def_objects(): - if p.default is not None and p.name not in properties.keys(): - prop = Property(p.name, p.default, p.schema, self.custom_def) - props.append(prop) - return props - - def validate(self): - self._validate_properties(self.entity_tpl, self.type_definition)*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Repository.java b/src/main/java/org/onap/sdc/toscaparser/api/Repository.java deleted file mode 100644 index ee5e5bc..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/Repository.java +++ /dev/null @@ -1,137 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.onap.sdc.toscaparser.api.utils.UrlUtils; - -import java.util.LinkedHashMap; - -public class Repository { - - private static final String DESCRIPTION = "description"; - private static final String URL = "url"; - private static final String CREDENTIAL = "credential"; - private static final String SECTIONS[] = {DESCRIPTION, URL, CREDENTIAL}; - - private String name; - private Object reposit; - private String url; - - @SuppressWarnings("unchecked") - public Repository(String repName, Object repValue) { - name = repName; - reposit = repValue; - if (reposit instanceof LinkedHashMap) { - url = (String) ((LinkedHashMap) reposit).get("url"); - if (url == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE229", String.format( - "MissingRequiredFieldError: Repository \"%s\" is missing required field \"url\"", - name))); - } - } - loadAndValidate(name, reposit); - } - - @SuppressWarnings("unchecked") - private void loadAndValidate(String val, Object repositDef) { - String keyname = val; - if (repositDef instanceof LinkedHashMap) { - for (String key : ((LinkedHashMap) reposit).keySet()) { - boolean bFound = false; - for (String sect : SECTIONS) { - if (key.equals(sect)) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE230", String.format( - "UnknownFieldError: repositories \"%s\" contains unknown field \"%s\"", - keyname, key))); - } - } - - String repositUrl = (String) ((LinkedHashMap) repositDef).get("url"); - if (repositUrl != null) { - boolean urlVal = UrlUtils.validateUrl(repositUrl); - if (!urlVal) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE231", String.format( - "URLException: repsositories \"%s\" Invalid Url", keyname))); - } - } - } - } - - @Override - public String toString() { - return "Repository{" + - "name='" + name + '\'' + - ", reposit=" + reposit + - ", url='" + url + '\'' + - '}'; - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import MissingRequiredFieldError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.common.exception import URLException -from toscaparser.utils.gettextutils import _ -import org.openecomp.sdc.toscaparser.api.utils.urlutils - -SECTIONS = (DESCRIPTION, URL, CREDENTIAL) = \ - ('description', 'url', 'credential') - - -class Repository(object): - def __init__(self, repositories, values): - self.name = repositories - self.reposit = values - if isinstance(self.reposit, dict): - if 'url' not in self.reposit.keys(): - ValidationIssueCollector.appendException( - MissingRequiredFieldError(what=_('Repository "%s"') - % self.name, required='url')) - self.url = self.reposit['url'] - self.load_and_validate(self.name, self.reposit) - - def load_and_validate(self, val, reposit_def): - self.keyname = val - if isinstance(reposit_def, dict): - for key in reposit_def.keys(): - if key not in SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what=_('repositories "%s"') - % self.keyname, field=key)) - - if URL in reposit_def.keys(): - reposit_url = reposit_def.get(URL) - url_val = toscaparser.utils.urlutils.UrlUtils.\ - validate_url(reposit_url) - if url_val is not True: - ValidationIssueCollector.appendException( - URLException(what=_('repsositories "%s" Invalid Url') - % self.keyname)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java deleted file mode 100644 index 227b2a9..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java +++ /dev/null @@ -1,111 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - - -public class RequirementAssignment { - - private String name; - private String nodeName; - private String capabilityName; - private Object relationship; - - public RequirementAssignment(String reqName, String nodeName) { - this.name = reqName; - this.nodeName = nodeName; - } - - public RequirementAssignment(String reqName, String nodeName, String capabilityName) { - this.name = reqName; - this.nodeName = nodeName; - this.capabilityName = capabilityName; - } - - public RequirementAssignment(String reqName, String nodeName, String capabilityName, Object relationship) { - this.name = reqName; - this.nodeName = nodeName; - this.capabilityName = capabilityName; - this.relationship = relationship; - } - - /** - * Get the name for requirement assignment. - * - * @return the name for requirement assignment. - */ - public String getName() { - return name; - } - - /** - * Set the name for requirement - * - * @param name - the name for requirement to set - */ - public void setName(String name) { - this.name = name; - } - - /** - * Get the node name for requirement assignment. - * - * @return the node name for requirement - */ - public String getNodeTemplateName() { - return nodeName; - } - - /** - * Set the node name for requirement - * - * @param nodeName - the node name for requirement to set - */ - public void setNodeTemplateName(String nodeName) { - this.nodeName = nodeName; - } - - /** - * Get the capability name for requirement assignment. - * - * @return the capability name for requirement - */ - public String getCapabilityName() { - return capabilityName; - } - - /** - * Set the capability name for requirement assignment. - * - * @param capabilityName - the capability name for requirement to set - */ - public void setCapabilityName(String capabilityName) { - this.capabilityName = capabilityName; - } - - /** - * Get the relationship object for requirement - * - * @return the relationship object for requirement - */ - public Object getRelationship() { - return relationship; - } -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java b/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java deleted file mode 100644 index 2ba6230..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java +++ /dev/null @@ -1,59 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import java.util.ArrayList; -import java.util.List; -import java.util.stream.Collectors; - -public class RequirementAssignments { - - private List requirementAssignmentList; - - public RequirementAssignments(List requirementAssignments) { - this.requirementAssignmentList = requirementAssignments != null ? new ArrayList<>(requirementAssignments) : new ArrayList<>(); - } - - /** - * Get all requirement assignments for Node Template.
- * This object can be either the original one, holding all requirement assignments for this node template,or a filtered one, holding a filtered subset.
- * - * @return list of requirement assignments for the node template.
- * If there are no requirement assignments, empty list is returned. - */ - public List getAll() { - return new ArrayList<>(requirementAssignmentList); - } - - /** - * Filter requirement assignments by requirement name. - * - * @param reqName - The name of requirement - * @return RequirementAssignments object, containing requirement assignments of this type.
- * If no such found, filtering will result in an empty collection. - */ - public RequirementAssignments getRequirementsByName(String reqName) { - List requirementAssignments = requirementAssignmentList.stream() - .filter(req -> req.getName().equals(reqName)).collect(Collectors.toList()); - - return new RequirementAssignments(requirementAssignments); - } -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java b/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java deleted file mode 100644 index a622a9a..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java +++ /dev/null @@ -1,539 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.elements.NodeType; -import org.onap.sdc.toscaparser.api.elements.PropertyDef; -import org.onap.sdc.toscaparser.api.parameters.Input; -import org.onap.sdc.toscaparser.api.parameters.Output; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; - - -public class SubstitutionMappings { - // SubstitutionMappings class declaration - - // SubstitutionMappings exports the topology template as an - // implementation of a Node type. - - private static final String NODE_TYPE = "node_type"; - private static final String REQUIREMENTS = "requirements"; - private static final String CAPABILITIES = "capabilities"; - - private static final String SECTIONS[] = {NODE_TYPE, REQUIREMENTS, CAPABILITIES}; - - private static final String OPTIONAL_OUTPUTS[] = {"tosca_id", "tosca_name", "state"}; - - private LinkedHashMap subMappingDef; - private ArrayList nodetemplates; - private ArrayList inputs; - private ArrayList outputs; - private ArrayList groups; - private NodeTemplate subMappedNodeTemplate; - private LinkedHashMap customDefs; - private LinkedHashMap _capabilities; - private LinkedHashMap _requirements; - - public SubstitutionMappings(LinkedHashMap smsubMappingDef, - ArrayList smnodetemplates, - ArrayList sminputs, - ArrayList smoutputs, - ArrayList smgroups, - NodeTemplate smsubMappedNodeTemplate, - LinkedHashMap smcustomDefs) { - - subMappingDef = smsubMappingDef; - nodetemplates = smnodetemplates; - inputs = sminputs != null ? sminputs : new ArrayList(); - outputs = smoutputs != null ? smoutputs : new ArrayList(); - groups = smgroups != null ? smgroups : new ArrayList(); - subMappedNodeTemplate = smsubMappedNodeTemplate; - customDefs = smcustomDefs != null ? smcustomDefs : new LinkedHashMap(); - _validate(); - - _capabilities = null; - _requirements = null; - } - - public String getType() { - if (subMappingDef != null) { - return (String) subMappingDef.get(NODE_TYPE); - } - return null; - } - - public ArrayList getNodeTemplates() { - return nodetemplates; - } - - /* - @classmethod - def get_node_type(cls, sub_mapping_def): - if isinstance(sub_mapping_def, dict): - return sub_mapping_def.get(cls.NODE_TYPE) - */ - - public static String stGetNodeType(LinkedHashMap _subMappingDef) { - if (_subMappingDef instanceof LinkedHashMap) { - return (String) _subMappingDef.get(NODE_TYPE); - } - return null; - } - - public String getNodeType() { - return (String) subMappingDef.get(NODE_TYPE); - } - - public ArrayList getInputs() { - return inputs; - } - - public ArrayList getGroups() { - return groups; - } - - public LinkedHashMap getCapabilities() { - return (LinkedHashMap) subMappingDef.get(CAPABILITIES); - } - - public LinkedHashMap getRequirements() { - return (LinkedHashMap) subMappingDef.get(REQUIREMENTS); - } - - public NodeType getNodeDefinition() { - return new NodeType(getNodeType(), customDefs); - } - - private void _validate() { - // Basic validation - _validateKeys(); - _validateType(); - - // SubstitutionMapping class syntax validation - _validateInputs(); - _validateCapabilities(); - _validateRequirements(); - _validateOutputs(); - } - - private void _validateKeys() { - // validate the keys of substitution mappings - for (String key : subMappingDef.keySet()) { - boolean bFound = false; - for (String s : SECTIONS) { - if (s.equals(key)) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE232", String.format( - "UnknownFieldError: SubstitutionMappings contain unknown field \"%s\"", - key))); - } - } - } - - private void _validateType() { - // validate the node_type of substitution mappings - String nodeType = (String) subMappingDef.get(NODE_TYPE); - if (nodeType == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE233", String.format( - "MissingRequiredFieldError: SubstitutionMappings used in topology_template is missing required field \"%s\"", - NODE_TYPE))); - } - Object nodeTypeDef = customDefs.get(nodeType); - if (nodeTypeDef == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE234", String.format( - "InvalidNodeTypeError: \"%s\" is invalid", nodeType))); - } - } - - private void _validateInputs() { - // validate the inputs of substitution mappings. - - // The inputs defined by the topology template have to match the - // properties of the node type or the substituted node. If there are - // more inputs than the substituted node has properties, default values - //must be defined for those inputs. - - HashSet allInputs = new HashSet<>(); - for (Input inp : inputs) { - allInputs.add(inp.getName()); - } - HashSet requiredProperties = new HashSet<>(); - for (PropertyDef pd : getNodeDefinition().getPropertiesDefObjects()) { - if (pd.isRequired() && pd.getDefault() == null) { - requiredProperties.add(pd.getName()); - } - } - // Must provide inputs for required properties of node type. - for (String property : requiredProperties) { - // Check property which is 'required' and has no 'default' value - if (!allInputs.contains(property)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE235", String.format( - "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", - getNodeType(), property))); - } - } - // If the optional properties of node type need to be customized by - // substituted node, it also is necessary to define inputs for them, - // otherwise they are not mandatory to be defined. - HashSet customizedParameters = new HashSet<>(); - if (subMappedNodeTemplate != null) { - customizedParameters.addAll(subMappedNodeTemplate.getProperties().keySet()); - } - HashSet allProperties = new HashSet( - getNodeDefinition().getPropertiesDef().keySet()); - HashSet diffset = customizedParameters; - diffset.removeAll(allInputs); - for (String parameter : diffset) { - if (allProperties.contains(parameter)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE236", String.format( - "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"", - getNodeType(), parameter))); - } - } - // Additional inputs are not in the properties of node type must - // provide default values. Currently the scenario may not happen - // because of parameters validation in nodetemplate, here is a - // guarantee. - for (Input inp : inputs) { - diffset = allInputs; - diffset.removeAll(allProperties); - if (diffset.contains(inp.getName()) && inp.getDefault() == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE237", String.format( - "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing rquired input \"%s\"", - getNodeType(), inp.getName()))); - } - } - } - - private void _validateCapabilities() { - // validate the capabilities of substitution mappings - - // The capabilities must be in node template which be mapped. - LinkedHashMap tplsCapabilities = - (LinkedHashMap) subMappingDef.get(CAPABILITIES); - List nodeCapabilities = null; - if (subMappedNodeTemplate != null) { - nodeCapabilities = subMappedNodeTemplate.getCapabilities().getAll(); - } - if (nodeCapabilities != null) { - for (CapabilityAssignment cap : nodeCapabilities) { - if (tplsCapabilities != null && tplsCapabilities.get(cap.getName()) == null) { - ; //pass - // ValidationIssueCollector.appendException( - // UnknownFieldError(what='SubstitutionMappings', - // field=cap)) - } - } - } - } - - private void _validateRequirements() { - // validate the requirements of substitution mappings - //***************************************************** - //TO-DO - Different from Python code!! one is a bug... - //***************************************************** - // The requirements must be in node template which be mapped. - LinkedHashMap tplsRequirements = - (LinkedHashMap) subMappingDef.get(REQUIREMENTS); - List nodeRequirements = null; - if (subMappedNodeTemplate != null) { - nodeRequirements = subMappedNodeTemplate.getRequirements().getAll(); - } - if (nodeRequirements != null) { - for (RequirementAssignment ro : nodeRequirements) { - String cap = ro.getName(); - if (tplsRequirements != null && tplsRequirements.get(cap) == null) { - ; //pass - // ValidationIssueCollector.appendException( - // UnknownFieldError(what='SubstitutionMappings', - // field=cap)) - } - } - } - } - - private void _validateOutputs() { - // validate the outputs of substitution mappings. - - // The outputs defined by the topology template have to match the - // attributes of the node type or the substituted node template, - // and the observable attributes of the substituted node template - // have to be defined as attributes of the node type or outputs in - // the topology template. - - // The outputs defined by the topology template have to match the - // attributes of the node type according to the specification, but - // it's reasonable that there are more inputs than the node type - // has properties, the specification will be amended? - - for (Output output : outputs) { - Object ado = getNodeDefinition().getAttributesDef(); - if (ado != null && ((LinkedHashMap) ado).get(output.getName()) == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE238", String.format( - "UnknownOutputError: Unknown output \"%s\" in SubstitutionMappings with node_type \"%s\"", - output.getName(), getNodeType()))); - } - } - } - - @Override - public String toString() { - return "SubstitutionMappings{" + -// "subMappingDef=" + subMappingDef + -// ", nodetemplates=" + nodetemplates + -// ", inputs=" + inputs + -// ", outputs=" + outputs + -// ", groups=" + groups + - ", subMappedNodeTemplate=" + (subMappedNodeTemplate == null ? "" : subMappedNodeTemplate.getName()) + -// ", customDefs=" + customDefs + -// ", _capabilities=" + _capabilities + -// ", _requirements=" + _requirements + - '}'; - } - - @Deprecated - public String toLimitedString() { - return "SubstitutionMappings{" + - "subMappingDef=" + subMappingDef + - ", nodetemplates=" + nodetemplates + - ", inputs=" + inputs + - ", outputs=" + outputs + - ", groups=" + groups + - ", subMappedNodeTemplate=" + (subMappedNodeTemplate == null ? "" : subMappedNodeTemplate.getName()) + - ", customDefs=" + customDefs + - ", _capabilities=" + _capabilities + - ", _requirements=" + _requirements + - '}'; - } -} - - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidNodeTypeError -from toscaparser.common.exception import MissingDefaultValueError -from toscaparser.common.exception import MissingRequiredFieldError -from toscaparser.common.exception import MissingRequiredInputError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.common.exception import UnknownOutputError -from toscaparser.elements.nodetype import NodeType -from toscaparser.utils.gettextutils import _ - -log = logging.getLogger('tosca') - - -class SubstitutionMappings(object): - '''SubstitutionMappings class declaration - - SubstitutionMappings exports the topology template as an - implementation of a Node type. - ''' - - SECTIONS = (NODE_TYPE, REQUIREMENTS, CAPABILITIES) = \ - ('node_type', 'requirements', 'capabilities') - - OPTIONAL_OUTPUTS = ['tosca_id', 'tosca_name', 'state'] - - def __init__(self, sub_mapping_def, nodetemplates, inputs, outputs, - sub_mapped_node_template, custom_defs): - self.nodetemplates = nodetemplates - self.sub_mapping_def = sub_mapping_def - self.inputs = inputs or [] - self.outputs = outputs or [] - self.sub_mapped_node_template = sub_mapped_node_template - self.custom_defs = custom_defs or {} - self._validate() - - self._capabilities = None - self._requirements = None - - @property - def type(self): - if self.sub_mapping_def: - return self.sub_mapping_def.get(self.NODE_TYPE) - - @classmethod - def get_node_type(cls, sub_mapping_def): - if isinstance(sub_mapping_def, dict): - return sub_mapping_def.get(cls.NODE_TYPE) - - @property - def node_type(self): - return self.sub_mapping_def.get(self.NODE_TYPE) - - @property - def capabilities(self): - return self.sub_mapping_def.get(self.CAPABILITIES) - - @property - def requirements(self): - return self.sub_mapping_def.get(self.REQUIREMENTS) - - @property - def node_definition(self): - return NodeType(self.node_type, self.custom_defs) - - def _validate(self): - # Basic validation - self._validate_keys() - self._validate_type() - - # SubstitutionMapping class syntax validation - self._validate_inputs() - self._validate_capabilities() - self._validate_requirements() - self._validate_outputs() - - def _validate_keys(self): - """validate the keys of substitution mappings.""" - for key in self.sub_mapping_def.keys(): - if key not in self.SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what=_('SubstitutionMappings'), - field=key)) - - def _validate_type(self): - """validate the node_type of substitution mappings.""" - node_type = self.sub_mapping_def.get(self.NODE_TYPE) - if not node_type: - ValidationIssueCollector.appendException( - MissingRequiredFieldError( - what=_('SubstitutionMappings used in topology_template'), - required=self.NODE_TYPE)) - - node_type_def = self.custom_defs.get(node_type) - if not node_type_def: - ValidationIssueCollector.appendException( - InvalidNodeTypeError(what=node_type)) - - def _validate_inputs(self): - """validate the inputs of substitution mappings. - - The inputs defined by the topology template have to match the - properties of the node type or the substituted node. If there are - more inputs than the substituted node has properties, default values - must be defined for those inputs. - """ - - all_inputs = set([input.name for input in self.inputs]) - required_properties = set([p.name for p in - self.node_definition. - get_properties_def_objects() - if p.required and p.default is None]) - # Must provide inputs for required properties of node type. - for property in required_properties: - # Check property which is 'required' and has no 'default' value - if property not in all_inputs: - ValidationIssueCollector.appendException( - MissingRequiredInputError( - what=_('SubstitutionMappings with node_type ') - + self.node_type, - input_name=property)) - - # If the optional properties of node type need to be customized by - # substituted node, it also is necessary to define inputs for them, - # otherwise they are not mandatory to be defined. - customized_parameters = set(self.sub_mapped_node_template - .get_properties().keys() - if self.sub_mapped_node_template else []) - all_properties = set(self.node_definition.get_properties_def()) - for parameter in customized_parameters - all_inputs: - if parameter in all_properties: - ValidationIssueCollector.appendException( - MissingRequiredInputError( - what=_('SubstitutionMappings with node_type ') - + self.node_type, - input_name=parameter)) - - # Additional inputs are not in the properties of node type must - # provide default values. Currently the scenario may not happen - # because of parameters validation in nodetemplate, here is a - # guarantee. - for input in self.inputs: - if input.name in all_inputs - all_properties \ - and input.default is None: - ValidationIssueCollector.appendException( - MissingDefaultValueError( - what=_('SubstitutionMappings with node_type ') - + self.node_type, - input_name=input.name)) - - def _validate_capabilities(self): - """validate the capabilities of substitution mappings.""" - - # The capabilites must be in node template wchich be mapped. - tpls_capabilities = self.sub_mapping_def.get(self.CAPABILITIES) - node_capabiliteys = self.sub_mapped_node_template.get_capabilities() \ - if self.sub_mapped_node_template else None - for cap in node_capabiliteys.keys() if node_capabiliteys else []: - if (tpls_capabilities and - cap not in list(tpls_capabilities.keys())): - pass - # ValidationIssueCollector.appendException( - # UnknownFieldError(what='SubstitutionMappings', - # field=cap)) - - def _validate_requirements(self): - """validate the requirements of substitution mappings.""" - - # The requirements must be in node template wchich be mapped. - tpls_requirements = self.sub_mapping_def.get(self.REQUIREMENTS) - node_requirements = self.sub_mapped_node_template.requirements \ - if self.sub_mapped_node_template else None - for req in node_requirements if node_requirements else []: - if (tpls_requirements and - req not in list(tpls_requirements.keys())): - pass - # ValidationIssueCollector.appendException( - # UnknownFieldError(what='SubstitutionMappings', - # field=req)) - - def _validate_outputs(self): - """validate the outputs of substitution mappings. - - The outputs defined by the topology template have to match the - attributes of the node type or the substituted node template, - and the observable attributes of the substituted node template - have to be defined as attributes of the node type or outputs in - the topology template. - """ - - # The outputs defined by the topology template have to match the - # attributes of the node type according to the specification, but - # it's reasonable that there are more inputs than the node type - # has properties, the specification will be amended? - for output in self.outputs: - if output.name not in self.node_definition.get_attributes_def(): - ValidationIssueCollector.appendException( - UnknownOutputError( - where=_('SubstitutionMappings with node_type ') - + self.node_type, - output_name=output.name))*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java deleted file mode 100644 index efc6948..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java +++ /dev/null @@ -1,866 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.elements.InterfacesDef; -import org.onap.sdc.toscaparser.api.elements.NodeType; -import org.onap.sdc.toscaparser.api.elements.RelationshipType; -import org.onap.sdc.toscaparser.api.functions.Function; -import org.onap.sdc.toscaparser.api.functions.GetAttribute; -import org.onap.sdc.toscaparser.api.functions.GetInput; -import org.onap.sdc.toscaparser.api.parameters.Input; -import org.onap.sdc.toscaparser.api.parameters.Output; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.Map; - -public class TopologyTemplate { - - private static final String DESCRIPTION = "description"; - private static final String INPUTS = "inputs"; - private static final String NODE_TEMPLATES = "node_templates"; - private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; - private static final String OUTPUTS = "outputs"; - private static final String GROUPS = "groups"; - private static final String SUBSTITUTION_MAPPINGS = "substitution_mappings"; - private static final String POLICIES = "policies"; - private static final String METADATA = "metadata"; - - private static String[] SECTIONS = { - DESCRIPTION, INPUTS, NODE_TEMPLATES, RELATIONSHIP_TEMPLATES, - OUTPUTS, GROUPS, SUBSTITUTION_MAPPINGS, POLICIES, METADATA - }; - - private LinkedHashMap tpl; - LinkedHashMap metaData; - private ArrayList inputs; - private ArrayList outputs; - private ArrayList relationshipTemplates; - private ArrayList nodeTemplates; - private LinkedHashMap customDefs; - private LinkedHashMap relTypes;//TYPE - private NodeTemplate subMappedNodeTemplate; - private ArrayList groups; - private ArrayList policies; - private LinkedHashMap parsedParams = null;//TYPE - private String description; - private ToscaGraph graph; - private SubstitutionMappings substitutionMappings; - private boolean resolveGetInput; - - public TopologyTemplate( - LinkedHashMap _template, - LinkedHashMap _customDefs, - LinkedHashMap _relTypes,//TYPE - LinkedHashMap _parsedParams, - NodeTemplate _subMappedNodeTemplate, - boolean _resolveGetInput) { - - tpl = _template; - if (tpl != null) { - subMappedNodeTemplate = _subMappedNodeTemplate; - metaData = _metaData(); - customDefs = _customDefs; - relTypes = _relTypes; - parsedParams = _parsedParams; - resolveGetInput = _resolveGetInput; - _validateField(); - description = _tplDescription(); - inputs = _inputs(); - relationshipTemplates = _relationshipTemplates(); - //todo: pass subMappedNodeTemplate to ET constractor - nodeTemplates = _nodeTemplates(); - outputs = _outputs(); - if (nodeTemplates != null) { - graph = new ToscaGraph(nodeTemplates); - } - groups = _groups(); - policies = _policies(); - _processIntrinsicFunctions(); - substitutionMappings = _substitutionMappings(); - } - } - - @SuppressWarnings("unchecked") - private ArrayList _inputs() { - ArrayList alInputs = new ArrayList<>(); - for (String name : _tplInputs().keySet()) { - Object attrs = _tplInputs().get(name); - Input input = new Input(name, (LinkedHashMap) attrs, customDefs); - if (parsedParams != null && parsedParams.get(name) != null) { - input.validate(parsedParams.get(name)); - } else { - Object _default = input.getDefault(); - if (_default != null) { - input.validate(_default); - } - } - if ((parsedParams != null && parsedParams.get(input.getName()) == null || parsedParams == null) - && input.isRequired() && input.getDefault() == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", - String.format("MissingRequiredFieldError: The required input \"%s\" was not provided" - , input.getName())) - ); - } - alInputs.add(input); - } - return alInputs; - - } - - private LinkedHashMap _metaData() { - if (tpl.get(METADATA) != null) { - return (LinkedHashMap) tpl.get(METADATA); - } else { - return new LinkedHashMap(); - } - - } - - private ArrayList _nodeTemplates() { - ArrayList alNodeTemplates = new ArrayList<>(); - LinkedHashMap tpls = _tplNodeTemplates(); - if (tpls != null) { - for (String name : tpls.keySet()) { - NodeTemplate tpl = new NodeTemplate(name, - tpls, - customDefs, - relationshipTemplates, - relTypes, - subMappedNodeTemplate); - if (tpl.getTypeDefinition() != null) { - boolean b = NodeType.TOSCA_DEF.get(tpl.getType()) != null; - if (b || (tpl.getCustomDef() != null && !tpl.getCustomDef().isEmpty())) { - tpl.validate(); - alNodeTemplates.add(tpl); - } - } - } - } - return alNodeTemplates; - } - - @SuppressWarnings("unchecked") - private ArrayList _relationshipTemplates() { - ArrayList alRelationshipTemplates = new ArrayList<>(); - LinkedHashMap tpls = _tplRelationshipTemplates(); - if (tpls != null) { - for (String name : tpls.keySet()) { - RelationshipTemplate tpl = new RelationshipTemplate( - (LinkedHashMap) tpls.get(name), name, customDefs, null, null, subMappedNodeTemplate); - - alRelationshipTemplates.add(tpl); - } - } - return alRelationshipTemplates; - } - - private ArrayList _outputs() { - ArrayList alOutputs = new ArrayList<>(); - for (Map.Entry me : _tplOutputs().entrySet()) { - String oname = me.getKey(); - LinkedHashMap oattrs = (LinkedHashMap) me.getValue(); - Output o = new Output(oname, oattrs); - o.validate(); - alOutputs.add(o); - } - return alOutputs; - } - - private SubstitutionMappings _substitutionMappings() { - LinkedHashMap tplSubstitutionMapping = (LinkedHashMap) _tplSubstitutionMappings(); - - //*** the commenting-out below and the weaker condition are in the Python source - // #if tpl_substitution_mapping and self.sub_mapped_node_template: - if (tplSubstitutionMapping != null && tplSubstitutionMapping.size() > 0) { - return new SubstitutionMappings(tplSubstitutionMapping, - nodeTemplates, - inputs, - outputs, - groups, - subMappedNodeTemplate, - customDefs); - } - return null; - - } - - @SuppressWarnings("unchecked") - private ArrayList _policies() { - ArrayList alPolicies = new ArrayList<>(); - for (Map.Entry me : _tplPolicies().entrySet()) { - String policyName = me.getKey(); - LinkedHashMap policyTpl = (LinkedHashMap) me.getValue(); - ArrayList targetList = (ArrayList) policyTpl.get("targets"); - ArrayList targetNodes = new ArrayList<>(); - ArrayList targetObjects = new ArrayList<>(); - ArrayList targetGroups = new ArrayList<>(); - String targetsType = "groups"; - if (targetList != null && targetList.size() >= 1) { - targetGroups = _getPolicyGroups(targetList); - if (targetGroups == null || targetGroups.isEmpty()) { - targetsType = "node_templates"; - targetNodes = _getGroupMembers(targetList); - for (NodeTemplate nt : targetNodes) { - targetObjects.add(nt); - } - } else { - for (Group gr : targetGroups) { - targetObjects.add(gr); - } - } - } - Policy policyObj = new Policy(policyName, - policyTpl, - targetObjects, - targetsType, - customDefs, - subMappedNodeTemplate); - alPolicies.add(policyObj); - } - return alPolicies; - } - - private ArrayList _groups() { - ArrayList groups = new ArrayList<>(); - ArrayList memberNodes = null; - for (Map.Entry me : _tplGroups().entrySet()) { - String groupName = me.getKey(); - LinkedHashMap groupTpl = (LinkedHashMap) me.getValue(); - ArrayList memberNames = (ArrayList) groupTpl.get("members"); - if (memberNames != null) { - DataEntity.validateDatatype("list", memberNames, null, null, null); - if (memberNames.size() < 1 || - (new HashSet(memberNames)).size() != memberNames.size()) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE005", String.format( - "InvalidGroupTargetException: Member nodes \"%s\" should be >= 1 and not repeated", - memberNames.toString()))); - } else { - memberNodes = _getGroupMembers(memberNames); - } - } - Group group = new Group(groupName, - groupTpl, - memberNodes, - customDefs, subMappedNodeTemplate); - groups.add(group); - } - return groups; - } - - private ArrayList _getGroupMembers(ArrayList memberNames) { - ArrayList memberNodes = new ArrayList<>(); - _validateGroupMembers(memberNames); - for (String member : memberNames) { - for (NodeTemplate node : nodeTemplates) { - if (member.equals(node.getName())) { - memberNodes.add(node); - } - } - } - return memberNodes; - } - - private ArrayList _getPolicyGroups(ArrayList memberNames) { - ArrayList memberGroups = new ArrayList<>(); - for (String member : memberNames) { - for (Group group : groups) { - if (member.equals(group.getName())) { - memberGroups.add(group); - } - } - } - return memberGroups; - } - - private void _validateGroupMembers(ArrayList members) { - ArrayList nodeNames = new ArrayList<>(); - for (NodeTemplate node : nodeTemplates) { - nodeNames.add(node.getName()); - } - for (String member : members) { - if (!nodeNames.contains(member)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE239", String.format( - "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"", member))); - } - } - } - - // topology template can act like node template - // it is exposed by substitution_mappings. - - public String nodetype() { - return substitutionMappings.getNodeType(); - } - - public LinkedHashMap capabilities() { - return substitutionMappings.getCapabilities(); - } - - public LinkedHashMap requirements() { - return substitutionMappings.getRequirements(); - } - - private String _tplDescription() { - return (String) tpl.get(DESCRIPTION); - //if description: - // return description.rstrip() - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplInputs() { - if (tpl.get(INPUTS) != null) { - return (LinkedHashMap) tpl.get(INPUTS); - } - return new LinkedHashMap(); - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplNodeTemplates() { - return (LinkedHashMap) tpl.get(NODE_TEMPLATES); - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplRelationshipTemplates() { - if (tpl.get(RELATIONSHIP_TEMPLATES) != null) { - return (LinkedHashMap) tpl.get(RELATIONSHIP_TEMPLATES); - } - return new LinkedHashMap(); - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplOutputs() { - if (tpl.get(OUTPUTS) != null) { - return (LinkedHashMap) tpl.get(OUTPUTS); - } - return new LinkedHashMap(); - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplSubstitutionMappings() { - if (tpl.get(SUBSTITUTION_MAPPINGS) != null) { - return (LinkedHashMap) tpl.get(SUBSTITUTION_MAPPINGS); - } - return new LinkedHashMap(); - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplGroups() { - if (tpl.get(GROUPS) != null) { - return (LinkedHashMap) tpl.get(GROUPS); - } - return new LinkedHashMap(); - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplPolicies() { - if (tpl.get(POLICIES) != null) { - return (LinkedHashMap) tpl.get(POLICIES); - } - return new LinkedHashMap<>(); - } - - private void _validateField() { - for (String name : tpl.keySet()) { - boolean bFound = false; - for (String section : SECTIONS) { - if (name.equals(section)) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE240", String.format( - "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"", name))); - } - } - } - - @SuppressWarnings("unchecked") - private void _processIntrinsicFunctions() { - // Process intrinsic functions - - // Current implementation processes functions within node template - // properties, requirements, interfaces inputs and template outputs. - - if (nodeTemplates != null) { - for (NodeTemplate nt : nodeTemplates) { - for (Property prop : nt.getPropertiesObjects()) { - prop.setValue(Function.getFunction(this, nt, prop.getValue(), resolveGetInput)); - } - for (InterfacesDef ifd : nt.getInterfaces()) { - LinkedHashMap ifin = ifd.getInputs(); - if (ifin != null) { - for (Map.Entry me : ifin.entrySet()) { - String name = me.getKey(); - Object value = Function.getFunction(this, nt, me.getValue(), resolveGetInput); - ifd.setInput(name, value); - } - } - } - if (nt.getRequirements() != null) { - for (RequirementAssignment req : nt.getRequirements().getAll()) { - LinkedHashMap rel; - Object t = req.getRelationship(); - // it can be a string or a LHM... - if (t instanceof LinkedHashMap) { - rel = (LinkedHashMap) t; - } else { - // we set it to null to fail the next test - // and avoid the get("proprties") - rel = null; - } - - if (rel != null && rel.get("properties") != null) { - LinkedHashMap relprops = - (LinkedHashMap) rel.get("properties"); - for (String key : relprops.keySet()) { - Object value = relprops.get(key); - Object func = Function.getFunction(this, req, value, resolveGetInput); - relprops.put(key, func); - } - } - } - } - if (nt.getCapabilitiesObjects() != null) { - for (CapabilityAssignment cap : nt.getCapabilitiesObjects()) { - if (cap.getPropertiesObjects() != null) { - for (Property prop : cap.getPropertiesObjects()) { - Object propvalue = Function.getFunction(this, nt, prop.getValue(), resolveGetInput); - if (propvalue instanceof GetInput) { - propvalue = ((GetInput) propvalue).result(); - for (String p : cap.getProperties().keySet()) { - //Object v = cap.getProperties().get(p); - if (p.equals(prop.getName())) { - cap.setProperty(p, propvalue); - } - } - } - } - } - } - } - for (RelationshipType rel : nt.getRelationships().keySet()) { - NodeTemplate node = nt.getRelationships().get(rel); - ArrayList relTpls = node.getRelationshipTemplate(); - if (relTpls != null) { - for (RelationshipTemplate relTpl : relTpls) { - // TT 5 - for (InterfacesDef iface : relTpl.getInterfaces()) { - if (iface.getInputs() != null) { - for (String name : iface.getInputs().keySet()) { - Object value = iface.getInputs().get(name); - Object func = Function.getFunction( - this, - relTpl, - value, - resolveGetInput); - iface.setInput(name, func); - } - } - } - } - } - } - } - } - for (Output output : outputs) { - Object func = Function.getFunction(this, outputs, output.getValue(), resolveGetInput); - if (func instanceof GetAttribute) { - output.setAttr(Output.VALUE, func); - } - } - } - - public static String getSubMappingNodeType(LinkedHashMap topologyTpl) { - if (topologyTpl != null && topologyTpl instanceof LinkedHashMap) { - Object submapTpl = topologyTpl.get(SUBSTITUTION_MAPPINGS); - return SubstitutionMappings.stGetNodeType((LinkedHashMap) submapTpl); - } - return null; - } - - // getters - - public LinkedHashMap getTpl() { - return tpl; - } - - public LinkedHashMap getMetadata() { - return metaData; - } - - public ArrayList getInputs() { - return inputs; - } - - public ArrayList getOutputs() { - return outputs; - } - - public ArrayList getPolicies() { - return policies; - } - - public ArrayList getRelationshipTemplates() { - return relationshipTemplates; - } - - public ArrayList getNodeTemplates() { - return nodeTemplates; - } - - public ArrayList getGroups() { - return groups; - } - - public SubstitutionMappings getSubstitutionMappings() { - return substitutionMappings; - } - - public LinkedHashMap getParsedParams() { - return parsedParams; - } - - public boolean getResolveGetInput() { - return resolveGetInput; - } - - public LinkedHashMap getCustomDefs() { - return customDefs; - } -} - -/*python - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - - -import logging - -from toscaparser.common import exception -from toscaparser.dataentity import DataEntity -from toscaparser import functions -from toscaparser.groups import Group -from toscaparser.nodetemplate import NodeTemplate -from toscaparser.parameters import Input -from toscaparser.parameters import Output -from toscaparser.policy import Policy -from toscaparser.relationship_template import RelationshipTemplate -from toscaparser.substitution_mappings import SubstitutionMappings -from toscaparser.tpl_relationship_graph import ToscaGraph -from toscaparser.utils.gettextutils import _ - - -# Topology template key names -SECTIONS = (DESCRIPTION, INPUTS, NODE_TEMPLATES, - RELATIONSHIP_TEMPLATES, OUTPUTS, GROUPS, - SUBSTITUION_MAPPINGS, POLICIES) = \ - ('description', 'inputs', 'node_templates', - 'relationship_templates', 'outputs', 'groups', - 'substitution_mappings', 'policies') - -log = logging.getLogger("tosca.model") - - -class TopologyTemplate(object): - - '''Load the template data.''' - def __init__(self, template, custom_defs, - rel_types=None, parsed_params=None, - sub_mapped_node_template=None): - self.tpl = template - self.sub_mapped_node_template = sub_mapped_node_template - if self.tpl: - self.custom_defs = custom_defs - self.rel_types = rel_types - self.parsed_params = parsed_params - self._validate_field() - self.description = self._tpl_description() - self.inputs = self._inputs() - self.relationship_templates = self._relationship_templates() - self.nodetemplates = self._nodetemplates() - self.outputs = self._outputs() - if hasattr(self, 'nodetemplates'): - self.graph = ToscaGraph(self.nodetemplates) - self.groups = self._groups() - self.policies = self._policies() - self._process_intrinsic_functions() - self.substitution_mappings = self._substitution_mappings() - - def _inputs(self): - inputs = [] - for name, attrs in self._tpl_inputs().items(): - input = Input(name, attrs) - if self.parsed_params and name in self.parsed_params: - input.validate(self.parsed_params[name]) - else: - default = input.default - if default: - input.validate(default) - if (self.parsed_params and input.name not in self.parsed_params - or self.parsed_params is None) and input.required \ - and input.default is None: - log.warning(_('The required parameter %s ' - 'is not provided') % input.name) - - inputs.append(input) - return inputs - - def _nodetemplates(self): - nodetemplates = [] - tpls = self._tpl_nodetemplates() - if tpls: - for name in tpls: - tpl = NodeTemplate(name, tpls, self.custom_defs, - self.relationship_templates, - self.rel_types) - if (tpl.type_definition and - (tpl.type in tpl.type_definition.TOSCA_DEF or - (tpl.type not in tpl.type_definition.TOSCA_DEF and - bool(tpl.custom_def)))): - tpl.validate(self) - nodetemplates.append(tpl) - return nodetemplates - - def _relationship_templates(self): - rel_templates = [] - tpls = self._tpl_relationship_templates() - for name in tpls: - tpl = RelationshipTemplate(tpls[name], name, self.custom_defs) - rel_templates.append(tpl) - return rel_templates - - def _outputs(self): - outputs = [] - for name, attrs in self._tpl_outputs().items(): - output = Output(name, attrs) - output.validate() - outputs.append(output) - return outputs - - def _substitution_mappings(self): - tpl_substitution_mapping = self._tpl_substitution_mappings() - # if tpl_substitution_mapping and self.sub_mapped_node_template: - if tpl_substitution_mapping: - return SubstitutionMappings(tpl_substitution_mapping, - self.nodetemplates, - self.inputs, - self.outputs, - self.sub_mapped_node_template, - self.custom_defs) - - def _policies(self): - policies = [] - for policy in self._tpl_policies(): - for policy_name, policy_tpl in policy.items(): - target_list = policy_tpl.get('targets') - if target_list and len(target_list) >= 1: - target_objects = [] - targets_type = "groups" - target_objects = self._get_policy_groups(target_list) - if not target_objects: - targets_type = "node_templates" - target_objects = self._get_group_members(target_list) - policyObj = Policy(policy_name, policy_tpl, - target_objects, targets_type, - self.custom_defs) - policies.append(policyObj) - return policies - - def _groups(self): - groups = [] - member_nodes = None - for group_name, group_tpl in self._tpl_groups().items(): - member_names = group_tpl.get('members') - if member_names is not None: - DataEntity.validate_datatype('list', member_names) - if len(member_names) < 1 or \ - len(member_names) != len(set(member_names)): - exception.ValidationIssueCollector.appendException( - exception.InvalidGroupTargetException( - message=_('Member nodes "%s" should be >= 1 ' - 'and not repeated') % member_names)) - else: - member_nodes = self._get_group_members(member_names) - group = Group(group_name, group_tpl, - member_nodes, - self.custom_defs) - groups.append(group) - return groups - - def _get_group_members(self, member_names): - member_nodes = [] - self._validate_group_members(member_names) - for member in member_names: - for node in self.nodetemplates: - if node.name == member: - member_nodes.append(node) - return member_nodes - - def _get_policy_groups(self, member_names): - member_groups = [] - for member in member_names: - for group in self.groups: - if group.name == member: - member_groups.append(group) - return member_groups - - def _validate_group_members(self, members): - node_names = [] - for node in self.nodetemplates: - node_names.append(node.name) - for member in members: - if member not in node_names: - exception.ValidationIssueCollector.appendException( - exception.InvalidGroupTargetException( - message=_('Target member "%s" is not found in ' - 'node_templates') % member)) - - # topology template can act like node template - # it is exposed by substitution_mappings. - def nodetype(self): - return self.substitution_mappings.node_type \ - if self.substitution_mappings else None - - def capabilities(self): - return self.substitution_mappings.capabilities \ - if self.substitution_mappings else None - - def requirements(self): - return self.substitution_mappings.requirements \ - if self.substitution_mappings else None - - def _tpl_description(self): - description = self.tpl.get(DESCRIPTION) - if description: - return description.rstrip() - - def _tpl_inputs(self): - return self.tpl.get(INPUTS) or {} - - def _tpl_nodetemplates(self): - return self.tpl.get(NODE_TEMPLATES) - - def _tpl_relationship_templates(self): - return self.tpl.get(RELATIONSHIP_TEMPLATES) or {} - - def _tpl_outputs(self): - return self.tpl.get(OUTPUTS) or {} - - def _tpl_substitution_mappings(self): - return self.tpl.get(SUBSTITUION_MAPPINGS) or {} - - def _tpl_groups(self): - return self.tpl.get(GROUPS) or {} - - def _tpl_policies(self): - return self.tpl.get(POLICIES) or {} - - def _validate_field(self): - for name in self.tpl: - if name not in SECTIONS: - exception.ValidationIssueCollector.appendException( - exception.UnknownFieldError(what='Template', field=name)) - - def _process_intrinsic_functions(self): - """Process intrinsic functions - - Current implementation processes functions within node template - properties, requirements, interfaces inputs and template outputs. - """ - if hasattr(self, 'nodetemplates'): - for node_template in self.nodetemplates: - for prop in node_template.get_properties_objects(): - prop.value = functions.get_function(self, - node_template, - prop.value) - for interface in node_template.interfaces: - if interface.inputs: - for name, value in interface.inputs.items(): - interface.inputs[name] = functions.get_function( - self, - node_template, - value) - if node_template.requirements and \ - isinstance(node_template.requirements, list): - for req in node_template.requirements: - rel = req - for req_name, req_item in req.items(): - if isinstance(req_item, dict): - rel = req_item.get('relationship') - break - if rel and 'properties' in rel: - for key, value in rel['properties'].items(): - rel['properties'][key] = \ - functions.get_function(self, - req, - value) - if node_template.get_capabilities_objects(): - for cap in node_template.get_capabilities_objects(): - if cap.get_properties_objects(): - for prop in cap.get_properties_objects(): - propvalue = functions.get_function( - self, - node_template, - prop.value) - if isinstance(propvalue, functions.GetInput): - propvalue = propvalue.result() - for p, v in cap._properties.items(): - if p == prop.name: - cap._properties[p] = propvalue - for rel, node in node_template.relationships.items(): - rel_tpls = node.relationship_tpl - if rel_tpls: - for rel_tpl in rel_tpls: - for interface in rel_tpl.interfaces: - if interface.inputs: - for name, value in \ - interface.inputs.items(): - interface.inputs[name] = \ - functions.get_function(self, - rel_tpl, - value) - for output in self.outputs: - func = functions.get_function(self, self.outputs, output.value) - if isinstance(func, functions.GetAttribute): - output.attrs[output.VALUE] = func - - @classmethod - def get_sub_mapping_node_type(cls, topology_tpl): - if topology_tpl and isinstance(topology_tpl, dict): - submap_tpl = topology_tpl.get(SUBSTITUION_MAPPINGS) - return SubstitutionMappings.get_node_type(submap_tpl) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java deleted file mode 100644 index 1706cdc..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java +++ /dev/null @@ -1,129 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import org.onap.sdc.toscaparser.api.elements.RelationshipType; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -//import java.util.Iterator; - -public class ToscaGraph { - // Graph of Tosca Node Templates - - private ArrayList nodeTemplates; - private LinkedHashMap vertices; - - public ToscaGraph(ArrayList inodeTemplates) { - nodeTemplates = inodeTemplates; - vertices = new LinkedHashMap(); - create(); - } - - private void createVertex(NodeTemplate node) { - if (vertices.get(node.getName()) == null) { - vertices.put(node.getName(), node); - } - } - - private void createEdge(NodeTemplate node1, - NodeTemplate node2, - RelationshipType relation) { - if (vertices.get(node1.getName()) == null) { - createVertex(node1); - vertices.get(node1.name)._addNext(node2, relation); - } - } - - public NodeTemplate vertex(String name) { - if (vertices.get(name) != null) { - return vertices.get(name); - } - return null; - } - -// public Iterator getIter() { -// return vertices.values().iterator(); -// } - - private void create() { - for (NodeTemplate node : nodeTemplates) { - LinkedHashMap relation = node.getRelationships(); - if (relation != null) { - for (RelationshipType rel : relation.keySet()) { - NodeTemplate nodeTpls = relation.get(rel); - for (NodeTemplate tpl : nodeTemplates) { - if (tpl.getName().equals(nodeTpls.getName())) { - createEdge(node, tpl, rel); - } - } - } - } - createVertex(node); - } - } - - @Override - public String toString() { - return "ToscaGraph{" - + "nodeTemplates=" + nodeTemplates - + ", vertices=" + vertices - + '}'; - } -} - -/*python - -class ToscaGraph(object): - '''Graph of Tosca Node Templates.''' - def __init__(self, nodetemplates): - self.nodetemplates = nodetemplates - self.vertices = {} - self._create() - - def _create_vertex(self, node): - if node not in self.vertices: - self.vertices[node.name] = node - - def _create_edge(self, node1, node2, relationship): - if node1 not in self.vertices: - self._create_vertex(node1) - self.vertices[node1.name]._add_next(node2, - relationship) - - def vertex(self, node): - if node in self.vertices: - return self.vertices[node] - - def __iter__(self): - return iter(self.vertices.values()) - - def _create(self): - for node in self.nodetemplates: - relation = node.relationships - if relation: - for rel, nodetpls in relation.items(): - for tpl in self.nodetemplates: - if tpl.name == nodetpls.name: - self._create_edge(node, tpl, rel) - self._create_vertex(node) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java b/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java deleted file mode 100644 index ddb8ddb..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java +++ /dev/null @@ -1,1267 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (c) 2017 AT&T Intellectual Property. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * Modifications copyright (c) 2019 Fujitsu Limited. - * ================================================================================ - */ -package org.onap.sdc.toscaparser.api; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.InputStream; -import java.nio.file.Files; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.ConcurrentHashMap; -import java.util.function.Predicate; - -import org.onap.sdc.toscaparser.api.common.JToscaException; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.common.ValidationIssueCollector; -import org.onap.sdc.toscaparser.api.elements.EntityType; -import org.onap.sdc.toscaparser.api.elements.DataType; -import org.onap.sdc.toscaparser.api.elements.Metadata; -import org.onap.sdc.toscaparser.api.extensions.ExtTools; -import org.onap.sdc.toscaparser.api.parameters.Input; -import org.onap.sdc.toscaparser.api.parameters.Output; -import org.onap.sdc.toscaparser.api.prereq.CSAR; -import org.onap.sdc.toscaparser.api.utils.JToscaErrorCodes; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.yaml.snakeyaml.Yaml; - -public class ToscaTemplate extends Object { - - public static final int MAX_LEVELS = 20; - private static Logger log = LoggerFactory.getLogger(ToscaTemplate.class.getName()); - - // TOSCA template key names - private static final String DEFINITION_VERSION = "tosca_definitions_version"; - private static final String DEFAULT_NAMESPACE = "tosca_default_namespace"; - private static final String TEMPLATE_NAME = "template_name"; - private static final String TOPOLOGY_TEMPLATE = "topology_template"; - private static final String TEMPLATE_AUTHOR = "template_author"; - private static final String TEMPLATE_VERSION = "template_version"; - private static final String DESCRIPTION = "description"; - private static final String IMPORTS = "imports"; - private static final String DSL_DEFINITIONS = "dsl_definitions"; - private static final String NODE_TYPES = "node_types"; - private static final String RELATIONSHIP_TYPES = "relationship_types"; - private static final String RELATIONSHIP_TEMPLATES = "relationship_templates"; - private static final String CAPABILITY_TYPES = "capability_types"; - private static final String ARTIFACT_TYPES = "artifact_types"; - private static final String DATA_TYPES = "data_types"; - private static final String INTERFACE_TYPES = "interface_types"; - private static final String POLICY_TYPES = "policy_types"; - private static final String GROUP_TYPES = "group_types"; - private static final String REPOSITORIES = "repositories"; - - private static String SECTIONS[] = { - DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME, - TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION, - DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES, - RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES, - CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES, - INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES, REPOSITORIES - }; - - // Sections that are specific to individual template definitions - private static final String METADATA = "metadata"; - private static ArrayList SPECIAL_SECTIONS; - - private ExtTools exttools = new ExtTools(); - - private ArrayList VALID_TEMPLATE_VERSIONS; - private LinkedHashMap> ADDITIONAL_SECTIONS; - - private boolean isFile; - private String path; - private String inputPath; - private String rootPath; - private LinkedHashMap parsedParams; - private boolean resolveGetInput; - private LinkedHashMap tpl; - private String version; - private ArrayList imports; - private LinkedHashMap relationshipTypes; - private Metadata metaData; - private String description; - private TopologyTemplate topologyTemplate; - private ArrayList repositories; - private ArrayList inputs; - private ArrayList relationshipTemplates; - private ArrayList nodeTemplates; - private ArrayList outputs; - private ArrayList policies; - private ArrayList groups; - private ConcurrentHashMap nestedToscaTplsWithTopology; - private ArrayList nestedToscaTemplatesWithTopology; - private ToscaGraph graph; - private String csarTempDir; - private int nestingLoopCounter; - private LinkedHashMap> metaProperties; - private Set processedImports; - private LinkedHashMap customDefsFinal = new LinkedHashMap<>(); - private HashSet dataTypes; - - public ToscaTemplate(String _path, - LinkedHashMap _parsedParams, - boolean aFile, - LinkedHashMap yamlDictTpl) throws JToscaException { - init(_path, _parsedParams, aFile, yamlDictTpl, true); - } - - public ToscaTemplate(String _path, - LinkedHashMap _parsedParams, - boolean aFile, - LinkedHashMap yamlDictTpl, boolean resolveGetInput) throws JToscaException { - init(_path, _parsedParams, aFile, yamlDictTpl, resolveGetInput); - } - - @SuppressWarnings("unchecked") - private void init(String _path, - LinkedHashMap _parsedParams, - boolean aFile, - LinkedHashMap yamlDictTpl, boolean _resolveGetInput) throws JToscaException { - - ThreadLocalsHolder.setCollector(new ValidationIssueCollector()); - - VALID_TEMPLATE_VERSIONS = new ArrayList<>(); - VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_0"); - VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_1"); - VALID_TEMPLATE_VERSIONS.addAll(exttools.getVersions()); - ADDITIONAL_SECTIONS = new LinkedHashMap<>(); - SPECIAL_SECTIONS = new ArrayList<>(); - SPECIAL_SECTIONS.add(METADATA); - ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_0", SPECIAL_SECTIONS); - ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_1", SPECIAL_SECTIONS); - ADDITIONAL_SECTIONS.putAll(exttools.getSections()); - - //long startTime = System.nanoTime(); - - - isFile = aFile; - inputPath = null; - path = null; - tpl = null; - csarTempDir = null; - nestedToscaTplsWithTopology = new ConcurrentHashMap<>(); - nestedToscaTemplatesWithTopology = new ArrayList(); - resolveGetInput = _resolveGetInput; - metaProperties = new LinkedHashMap<>(); - - if (_path != null && !_path.isEmpty()) { - // save the original input path - inputPath = _path; - // get the actual path (will change with CSAR) - path = _getPath(_path); - // load the YAML template - if (path != null && !path.isEmpty()) { - try (InputStream input = new FileInputStream(new File(path));) { - //System.out.println("Loading YAML file " + path); - log.debug("ToscaTemplate Loading YAMEL file {}", path); - Yaml yaml = new Yaml(); - Object data = yaml.load(input); - this.tpl = (LinkedHashMap) data; - } catch (FileNotFoundException e) { - log.error("ToscaTemplate - Exception loading yaml: {}", e.getMessage()); - log.error("Exception", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", - "ToscaTemplate - Exception loading yaml: -> " + e.getMessage())); - return; - } catch (Exception e) { - log.error("ToscaTemplate - Error loading yaml, aborting -> ", e.getMessage()); - log.error("Exception", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275", - "ToscaTemplate - Error loading yaml, aborting -> " + e.getMessage())); - return; - } - - if (yamlDictTpl != null) { - //msg = (_('Both path and yaml_dict_tpl arguments were ' - // 'provided. Using path and ignoring yaml_dict_tpl.')) - //log.info(msg) - log.debug("ToscaTemplate - Both path and yaml_dict_tpl arguments were provided. Using path and ignoring yaml_dict_tpl"); - } - } else { - // no input to process... - _abort(); - } - } else { - if (yamlDictTpl != null) { - tpl = yamlDictTpl; - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE244", - "ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse")); - log.debug("ToscaTemplate ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse"); - - } - } - - if (tpl != null) { - parsedParams = _parsedParams; - _validateField(); - this.rootPath = path; - this.processedImports = new HashSet(); - this.imports = _tplImports(); - this.version = _tplVersion(); - this.metaData = _tplMetaData(); - this.relationshipTypes = _tplRelationshipTypes(); - this.description = _tplDescription(); - this.dataTypes = getTopologyDataTypes(); - this.topologyTemplate = _topologyTemplate(); - this.repositories = _tplRepositories(); - if (topologyTemplate.getTpl() != null) { - this.inputs = _inputs(); - this.relationshipTemplates = _relationshipTemplates(); - this.nodeTemplates = _nodeTemplates(); - this.outputs = _outputs(); - this.policies = _policies(); - this.groups = _groups(); -// _handleNestedToscaTemplatesWithTopology(); - _handleNestedToscaTemplatesWithTopology(topologyTemplate); - graph = new ToscaGraph(nodeTemplates); - } - } - - if (csarTempDir != null) { - CSAR.deleteDir(new File(csarTempDir)); - csarTempDir = null; - } - - verifyTemplate(); - - } - - private void _abort() throws JToscaException { - // print out all exceptions caught - verifyTemplate(); - throw new JToscaException("jtosca aborting", JToscaErrorCodes.PATH_NOT_VALID.getValue()); - } - - private TopologyTemplate _topologyTemplate() { - return new TopologyTemplate( - _tplTopologyTemplate(), - _getAllCustomDefs(imports), - relationshipTypes, - parsedParams, - null, - resolveGetInput); - } - - private ArrayList _inputs() { - return topologyTemplate.getInputs(); - } - - private ArrayList _nodeTemplates() { - return topologyTemplate.getNodeTemplates(); - } - - private ArrayList _relationshipTemplates() { - return topologyTemplate.getRelationshipTemplates(); - } - - private ArrayList _outputs() { - return topologyTemplate.getOutputs(); - } - - private String _tplVersion() { - return (String) tpl.get(DEFINITION_VERSION); - } - - @SuppressWarnings("unchecked") - private Metadata _tplMetaData() { - Object mdo = tpl.get(METADATA); - if (mdo instanceof LinkedHashMap) { - return new Metadata((Map) mdo); - } else { - return null; - } - } - - private String _tplDescription() { - return (String) tpl.get(DESCRIPTION); - } - - @SuppressWarnings("unchecked") - private ArrayList _tplImports() { - return (ArrayList) tpl.get(IMPORTS); - } - - @SuppressWarnings("unchecked") - private ArrayList _tplRepositories() { - LinkedHashMap repositories = - (LinkedHashMap) tpl.get(REPOSITORIES); - ArrayList reposit = new ArrayList<>(); - if (repositories != null) { - for (Map.Entry me : repositories.entrySet()) { - Repository reposits = new Repository(me.getKey(), me.getValue()); - reposit.add(reposits); - } - } - return reposit; - } - - private LinkedHashMap _tplRelationshipTypes() { - return (LinkedHashMap) _getCustomTypes(RELATIONSHIP_TYPES, null); - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _tplTopologyTemplate() { - return (LinkedHashMap) tpl.get(TOPOLOGY_TEMPLATE); - } - - private ArrayList _policies() { - return topologyTemplate.getPolicies(); - } - - private ArrayList _groups() { - return topologyTemplate.getGroups(); - } - - /** - * Read datatypes field - * - * @return return list of datatypes. - */ - @SuppressWarnings("unchecked") - private HashSet getTopologyDataTypes() { - LinkedHashMap value = - (LinkedHashMap) tpl.get(DATA_TYPES); - HashSet datatypes = new HashSet<>(); - if (value != null) { - customDefsFinal.putAll(value); - for (Map.Entry me : value.entrySet()) { - DataType datatype = new DataType(me.getKey(), value); - datatypes.add(datatype); - } - } - - - return datatypes; - } - - /** - * This method is used to get consolidated custom definitions from all imports - * It is logically divided in two parts to handle imports; map and list formats. - * Before processing the imports; it sorts them to make sure the current directory imports are - * being processed first and then others. Once sorted; it processes each import one by one in - * recursive manner. - * To avoid cyclic dependency among imports; this method uses a set to keep track of all - * imports which are already processed and filters the imports which occurs more than once. - * - * @param alImports all imports which needs to be processed - * @return the linked hash map containing all import definitions - */ - - @SuppressWarnings("unchecked") - private LinkedHashMap _getAllCustomDefs(Object alImports) { - - - String types[] = { - IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, - DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES - }; - - List> imports = (List>) alImports; - if (imports != null && !imports.isEmpty()) { - if (imports.get(0) instanceof LinkedHashMap) { - imports = sortImports(imports); - - for (Map map : imports) { - List> singleImportList = new ArrayList<>(); - singleImportList.add(map); - - Map importNameDetails = getValidFileNameForImportReference(singleImportList); - singleImportList = filterImportsForRecursion(singleImportList, importNameDetails); - - if (!singleImportList.get(0).isEmpty()) { - LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(singleImportList)); - processedImports.add(importNameDetails.get("importFileName")); - - if (customDefs != null) { - customDefsFinal.putAll(customDefs); - - if (customDefs.get(IMPORTS) != null) { - resetPathForRecursiveImports(importNameDetails.get("importRelativeName")); - LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); - customDefsFinal.putAll(importDefs); - } - } - } - } - } else { - LinkedHashMap customDefs = _getCustomTypes(types, new ArrayList<>(imports)); - if (customDefs != null) { - customDefsFinal.putAll(customDefs); - - if (customDefs.get(IMPORTS) != null) { - LinkedHashMap importDefs = _getAllCustomDefs(customDefs.get(IMPORTS)); - customDefsFinal.putAll(importDefs); - } - } - } - } - - // As imports are not custom_types, remove from the dict - customDefsFinal.remove(IMPORTS); - - return customDefsFinal; - } - - /** - * This method is used to sort the imports in order so that same directory - * imports will be processed first - * - * @param customImports the custom imports - * @return the sorted list of imports - */ - private List> sortImports(List> customImports) { - List> finalList1 = new ArrayList<>(); - List> finalList2 = new ArrayList<>(); - Iterator> itr = customImports.iterator(); - while (itr.hasNext()) { - Map innerMap = itr.next(); - if (innerMap.toString().contains("../")) { - finalList2.add(innerMap); - itr.remove(); - } else if (innerMap.toString().contains("/")) { - finalList1.add(innerMap); - itr.remove(); - } - } - - customImports.addAll(finalList1); - customImports.addAll(finalList2); - return customImports; - } - - /** - * This method is used to reset PATH variable after processing of current import file is done - * This is required because of relative path nature of imports present in files. - * - * @param currImportRelativeName the current import relative name - */ - private void resetPathForRecursiveImports(String currImportRelativeName) { - path = getPath(path, currImportRelativeName); - } - - /** - * This is a recursive method which starts from current import and then recursively finds a - * valid path relative to current import file name. - * By doing this it handles all nested hierarchy of imports defined in CSARs - * - * @param path the path - * @param importFileName the import file name - * @return the string containing updated path value - */ - private String getPath(String path, String importFileName) { - String tempFullPath = (Paths.get(path).toAbsolutePath().getParent() - .toString() + File.separator + importFileName.replace("../", "")).replace('\\', '/'); - String tempPartialPath = (Paths.get(path).toAbsolutePath().getParent().toString()).replace('\\', '/'); - if (Files.exists(Paths.get(tempFullPath))) - return tempFullPath; - else - return getPath(tempPartialPath, importFileName); - } - - /** - * This method is used to get full path name for the file which needs to be processed. It helps - * in situation where files are present in different directory and are references as relative - * paths. - * - * @param customImports the custom imports - * @return the map containing import file full and relative paths - */ - private Map getValidFileNameForImportReference(List> customImports) { - String importFileName; - Map retMap = new HashMap<>(); - for (Map map1 : customImports) { - for (Map.Entry entry : map1.entrySet()) { - Map innerMostMap = (Map) entry.getValue(); - Iterator> it = innerMostMap.entrySet().iterator(); - while (it.hasNext()) { - Map.Entry val = it.next(); - if (val.getValue().contains("/")) { - importFileName = (Paths.get(rootPath).toAbsolutePath().getParent().toString() + File - .separator + val.getValue().replace("../", "")).replace('\\', '/'); - } else { - importFileName = (Paths.get(path).toAbsolutePath().getParent().toString() + File - .separator + val.getValue().replace("../", "")).replace('\\', '/'); - } - retMap.put("importFileName", importFileName); - retMap.put("importRelativeName", val.getValue()); - } - } - } - return retMap; - } - - /** - * This method is used to filter the imports which already gets processed in previous step. - * It handles the use case of cyclic dependency in imports which may cause Stack Overflow - * exception - * - * @param customImports the custom imports - * @param importNameDetails the import name details - * @return the list containing filtered imports - */ - private List> filterImportsForRecursion(List> - customImports, Map importNameDetails) { - for (Map map1 : customImports) { - for (Map.Entry entry : map1.entrySet()) { - Map innerMostMap = (Map) entry.getValue(); - Iterator> it = innerMostMap.entrySet().iterator(); - while (it.hasNext()) { - it.next(); - if (processedImports.contains(importNameDetails.get("importFileName"))) { - it.remove(); - } - } - } - } - - // Remove Empty elements - Iterator> itr = customImports.iterator(); - while (itr.hasNext()) { - Map innerMap = itr.next(); - Predicate predicate = p -> p.values().isEmpty(); - innerMap.values().removeIf(predicate); - } - - return customImports; - } - - @SuppressWarnings("unchecked") - private LinkedHashMap _getCustomTypes(Object typeDefinitions, ArrayList alImports) { - - // Handle custom types defined in imported template files - // This method loads the custom type definitions referenced in "imports" - // section of the TOSCA YAML template. - - LinkedHashMap customDefs = new LinkedHashMap(); - ArrayList typeDefs = new ArrayList(); - if (typeDefinitions instanceof String[]) { - for (String s : (String[]) typeDefinitions) { - typeDefs.add(s); - } - } else { - typeDefs.add((String) typeDefinitions); - } - - if (alImports == null) { - alImports = _tplImports(); - } - - if (alImports != null) { - ImportsLoader customService = new ImportsLoader(alImports, path, typeDefs, tpl); - ArrayList> nestedToscaTpls = customService.getNestedToscaTpls(); - _updateNestedToscaTplsWithTopology(nestedToscaTpls); - - customDefs = customService.getCustomDefs(); - if (customDefs == null) { - return null; - } - } - - //Handle custom types defined in current template file - for (String td : typeDefs) { - if (!td.equals(IMPORTS)) { - LinkedHashMap innerCustomTypes = (LinkedHashMap) tpl.get(td); - if (innerCustomTypes != null) { - customDefs.putAll(innerCustomTypes); - } - } - } - return customDefs; - } - - private void _updateNestedToscaTplsWithTopology(ArrayList> nestedToscaTpls) { - for (LinkedHashMap ntpl : nestedToscaTpls) { - // there is just one key:value pair in ntpl - for (Map.Entry me : ntpl.entrySet()) { - String fileName = me.getKey(); - @SuppressWarnings("unchecked") - LinkedHashMap toscaTpl = (LinkedHashMap) me.getValue(); - if (toscaTpl.get(TOPOLOGY_TEMPLATE) != null) { - if (nestedToscaTplsWithTopology.get(fileName) == null) { - nestedToscaTplsWithTopology.putAll(ntpl); - } - } - } - } - } - - // multi level nesting - RECURSIVE - @SuppressWarnings("unchecked") - private void _handleNestedToscaTemplatesWithTopology(TopologyTemplate tt) { - if (++nestingLoopCounter > MAX_LEVELS) { - log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting"); - return; - } - // Reset Processed Imports for nested templates - this.processedImports = new HashSet<>(); - for (Map.Entry me : nestedToscaTplsWithTopology.entrySet()) { - LinkedHashMap toscaTpl = - (LinkedHashMap) me.getValue(); - for (NodeTemplate nt : tt.getNodeTemplates()) { - if (_isSubMappedNode(nt, toscaTpl)) { - parsedParams = _getParamsForNestedTemplate(nt); - ArrayList alim = (ArrayList) toscaTpl.get(IMPORTS); - LinkedHashMap topologyTpl = - (LinkedHashMap) toscaTpl.get(TOPOLOGY_TEMPLATE); - TopologyTemplate topologyWithSubMapping = - new TopologyTemplate(topologyTpl, - _getAllCustomDefs(alim), - relationshipTypes, - parsedParams, - nt, - resolveGetInput); - nt.setOriginComponentTemplate(topologyWithSubMapping); - if (topologyWithSubMapping.getSubstitutionMappings() != null) { - // Record nested topology templates in top level template - //nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); - // Set substitution mapping object for mapped node - nt.setSubMappingToscaTemplate( - topologyWithSubMapping.getSubstitutionMappings()); - _handleNestedToscaTemplatesWithTopology(topologyWithSubMapping); - } - } - } - } - } - -// private void _handleNestedToscaTemplatesWithTopology() { -// for(Map.Entry me: nestedToscaTplsWithTopology.entrySet()) { -// String fname = me.getKey(); -// LinkedHashMap toscaTpl = -// (LinkedHashMap)me.getValue(); -// for(NodeTemplate nt: nodeTemplates) { -// if(_isSubMappedNode(nt,toscaTpl)) { -// parsedParams = _getParamsForNestedTemplate(nt); -// ArrayList alim = (ArrayList)toscaTpl.get(IMPORTS); -// LinkedHashMap topologyTpl = -// (LinkedHashMap)toscaTpl.get(TOPOLOGY_TEMPLATE); -// TopologyTemplate topologyWithSubMapping = -// new TopologyTemplate(topologyTpl, -// //_getAllCustomDefs(null), -// _getAllCustomDefs(alim), -// relationshipTypes, -// parsedParams, -// nt); -// if(topologyWithSubMapping.getSubstitutionMappings() != null) { -// // Record nested topology templates in top level template -// nestedToscaTemplatesWithTopology.add(topologyWithSubMapping); -// // Set substitution mapping object for mapped node -// nt.setSubMappingToscaTemplate( -// topologyWithSubMapping.getSubstitutionMappings()); -// } -// } -// } -// } -// } - - private void _validateField() { - String sVersion = _tplVersion(); - if (sVersion == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE245", String.format( - "MissingRequiredField: Template is missing required field \"%s\"", DEFINITION_VERSION))); - } else { - _validateVersion(sVersion); - this.version = sVersion; - } - - for (String sKey : tpl.keySet()) { - boolean bFound = false; - for (String sSection : SECTIONS) { - if (sKey.equals(sSection)) { - bFound = true; - break; - } - } - // check ADDITIONAL_SECTIONS - if (!bFound) { - if (ADDITIONAL_SECTIONS.get(version) != null && - ADDITIONAL_SECTIONS.get(version).contains(sKey)) { - bFound = true; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE246", String.format( - "UnknownFieldError: Template contains unknown field \"%s\"", - sKey))); - } - } - } - - private void _validateVersion(String sVersion) { - boolean bFound = false; - for (String vtv : VALID_TEMPLATE_VERSIONS) { - if (sVersion.equals(vtv)) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE247", String.format( - "InvalidTemplateVersion: \"%s\" is invalid. Valid versions are %s", - sVersion, VALID_TEMPLATE_VERSIONS.toString()))); - } else if ((!sVersion.equals("tosca_simple_yaml_1_0") && !sVersion.equals("tosca_simple_yaml_1_1"))) { - EntityType.updateDefinitions(sVersion); - - } - } - - private String _getPath(String _path) throws JToscaException { - if (_path.toLowerCase().endsWith(".yaml") || _path.toLowerCase().endsWith(".yml")) { - return _path; - } else if (_path.toLowerCase().endsWith(".zip") || _path.toLowerCase().endsWith(".csar")) { - // a CSAR archive - CSAR csar = new CSAR(_path, isFile); - if (csar.validate()) { - try { - csar.decompress(); - metaProperties = csar.getMetaProperties(); - } catch (IOException e) { - log.error("ToscaTemplate - _getPath - IOException trying to decompress {}", _path); - return null; - } - isFile = true; // the file has been decompressed locally - csar.cleanup(); - csarTempDir = csar.getTempDir(); - return csar.getTempDir() + File.separator + csar.getMainTemplate(); - } - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE248", "ValueError: " + _path + " is not a valid file")); - return null; - } - return null; - } - - private void verifyTemplate() throws JToscaException { - //Criticals - int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); - if (validationIssuesCaught > 0) { - List validationIssueStrings = ThreadLocalsHolder.getCollector().getValidationIssueReport(); - log.trace("####################################################################################################"); - log.trace("ToscaTemplate - verifyTemplate - {} Parsing Critical{} occurred...", validationIssuesCaught, (validationIssuesCaught > 1 ? "s" : "")); - for (String s : validationIssueStrings) { - log.trace("{}. CSAR name - {}", s, inputPath); - } - log.trace("####################################################################################################"); - } - - } - - public String getPath() { - return path; - } - - public String getVersion() { - return version; - } - - public String getDescription() { - return description; - } - - public TopologyTemplate getTopologyTemplate() { - return topologyTemplate; - } - - public Metadata getMetaData() { - return metaData; - } - - public ArrayList getInputs() { - if (inputs != null) { - inputs.stream().forEach(Input::resetAnnotaions); - } - return inputs; - } - - public ArrayList getOutputs() { - return outputs; - } - - public ArrayList getPolicies() { - return policies; - } - - public ArrayList getGroups() { - return groups; - } - - public ArrayList getNodeTemplates() { - return nodeTemplates; - } - - public LinkedHashMap getMetaProperties(String propertiesFile) { - return metaProperties.get(propertiesFile); - } - -// private boolean _isSubMappedNode(NodeTemplate nt,LinkedHashMap toscaTpl) { -// // Return True if the nodetemple is substituted -// if(nt != null && nt.getSubMappingToscaTemplate() == null && -// getSubMappingNodeType(toscaTpl).equals(nt.getType()) && -// nt.getInterfaces().size() < 1) { -// return true; -// } -// return false; -// } - - private boolean _isSubMappedNode(NodeTemplate nt, LinkedHashMap toscaTpl) { - // Return True if the nodetemple is substituted - if (nt != null && nt.getSubMappingToscaTemplate() == null && - getSubMappingNodeType(toscaTpl).equals(nt.getType()) && - nt.getInterfaces().size() < 1) { - return true; - } - return false; - } - - private LinkedHashMap _getParamsForNestedTemplate(NodeTemplate nt) { - // Return total params for nested_template - LinkedHashMap pparams; - if (parsedParams != null) { - pparams = parsedParams; - } else { - pparams = new LinkedHashMap(); - } - if (nt != null) { - for (String pname : nt.getProperties().keySet()) { - pparams.put(pname, nt.getPropertyValue(pname)); - } - } - return pparams; - } - - @SuppressWarnings("unchecked") - private String getSubMappingNodeType(LinkedHashMap toscaTpl) { - // Return substitution mappings node type - if (toscaTpl != null) { - return TopologyTemplate.getSubMappingNodeType( - (LinkedHashMap) toscaTpl.get(TOPOLOGY_TEMPLATE)); - } - return null; - } - - public boolean hasNestedTemplates() { - // Return True if the tosca template has nested templates - return nestedToscaTemplatesWithTopology != null && - nestedToscaTemplatesWithTopology.size() >= 1; - - } - - public ArrayList getNestedTemplates() { - return nestedToscaTemplatesWithTopology; - } - - public ConcurrentHashMap getNestedTopologyTemplates() { - return nestedToscaTplsWithTopology; - } - - /** - * Get datatypes. - * - * @return return list of datatypes. - */ - public HashSet getDataTypes() { - return dataTypes; - } - - @Override - public String toString() { - return "ToscaTemplate{" + - "exttools=" + exttools + - ", VALID_TEMPLATE_VERSIONS=" + VALID_TEMPLATE_VERSIONS + - ", ADDITIONAL_SECTIONS=" + ADDITIONAL_SECTIONS + - ", isFile=" + isFile + - ", path='" + path + '\'' + - ", inputPath='" + inputPath + '\'' + - ", parsedParams=" + parsedParams + - ", tpl=" + tpl + - ", version='" + version + '\'' + - ", imports=" + imports + - ", relationshipTypes=" + relationshipTypes + - ", metaData=" + metaData + - ", description='" + description + '\'' + - ", topologyTemplate=" + topologyTemplate + - ", repositories=" + repositories + - ", inputs=" + inputs + - ", relationshipTemplates=" + relationshipTemplates + - ", nodeTemplates=" + nodeTemplates + - ", outputs=" + outputs + - ", policies=" + policies + - ", nestedToscaTplsWithTopology=" + nestedToscaTplsWithTopology + - ", nestedToscaTemplatesWithTopology=" + nestedToscaTemplatesWithTopology + - ", graph=" + graph + - ", csarTempDir='" + csarTempDir + '\'' + - ", nestingLoopCounter=" + nestingLoopCounter + - ", dataTypes=" + dataTypes + - '}'; - } - - public List getInputs(boolean annotationsRequired) { - if (inputs != null && annotationsRequired) { - inputs.stream().forEach(Input::parseAnnotations); - return inputs; - } - return getInputs(); - } -} - -/*python - -import logging -import os - -from copy import deepcopy -from toscaparser.common.exception import ValidationIssueCollector.collector -from toscaparser.common.exception import InvalidTemplateVersion -from toscaparser.common.exception import MissingRequiredFieldError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.common.exception import ValidationError -from toscaparser.elements.entity_type import update_definitions -from toscaparser.extensions.exttools import ExtTools -import org.openecomp.sdc.toscaparser.api.imports -from toscaparser.prereq.csar import CSAR -from toscaparser.repositories import Repository -from toscaparser.topology_template import TopologyTemplate -from toscaparser.tpl_relationship_graph import ToscaGraph -from toscaparser.utils.gettextutils import _ -import org.openecomp.sdc.toscaparser.api.utils.yamlparser - - -# TOSCA template key names -SECTIONS = (DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME, - TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION, - DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES, - RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES, - CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES, INTERFACE_TYPES, - POLICY_TYPES, GROUP_TYPES, REPOSITORIES) = \ - ('tosca_definitions_version', 'tosca_default_namespace', - 'template_name', 'topology_template', 'template_author', - 'template_version', 'description', 'imports', 'dsl_definitions', - 'node_types', 'relationship_types', 'relationship_templates', - 'capability_types', 'artifact_types', 'data_types', - 'interface_types', 'policy_types', 'group_types', 'repositories') -# Sections that are specific to individual template definitions -SPECIAL_SECTIONS = (METADATA) = ('metadata') - -log = logging.getLogger("tosca.model") - -YAML_LOADER = toscaparser.utils.yamlparser.load_yaml - - -class ToscaTemplate(object): - exttools = ExtTools() - - VALID_TEMPLATE_VERSIONS = ['tosca_simple_yaml_1_0'] - - VALID_TEMPLATE_VERSIONS.extend(exttools.get_versions()) - - ADDITIONAL_SECTIONS = {'tosca_simple_yaml_1_0': SPECIAL_SECTIONS} - - ADDITIONAL_SECTIONS.update(exttools.get_sections()) - - '''Load the template data.''' - def __init__(self, path=None, parsed_params=None, a_file=True, - yaml_dict_tpl=None): - - ValidationIssueCollector.collector.start() - self.a_file = a_file - self.input_path = None - self.path = None - self.tpl = None - self.nested_tosca_tpls_with_topology = {} - self.nested_tosca_templates_with_topology = [] - if path: - self.input_path = path - self.path = self._get_path(path) - if self.path: - self.tpl = YAML_LOADER(self.path, self.a_file) - if yaml_dict_tpl: - msg = (_('Both path and yaml_dict_tpl arguments were ' - 'provided. Using path and ignoring yaml_dict_tpl.')) - log.info(msg) - print(msg) - else: - if yaml_dict_tpl: - self.tpl = yaml_dict_tpl - else: - ValidationIssueCollector.collector.appendException( - ValueError(_('No path or yaml_dict_tpl was provided. ' - 'There is nothing to parse.'))) - - if self.tpl: - self.parsed_params = parsed_params - self._validate_field() - self.version = self._tpl_version() - self.relationship_types = self._tpl_relationship_types() - self.description = self._tpl_description() - self.topology_template = self._topology_template() - self.repositories = self._tpl_repositories() - if self.topology_template.tpl: - self.inputs = self._inputs() - self.relationship_templates = self._relationship_templates() - self.nodetemplates = self._nodetemplates() - self.outputs = self._outputs() - self._handle_nested_tosca_templates_with_topology() - self.graph = ToscaGraph(self.nodetemplates) - - ValidationIssueCollector.collector.stop() - self.verify_template() - - def _topology_template(self): - return TopologyTemplate(self._tpl_topology_template(), - self._get_all_custom_defs(), - self.relationship_types, - self.parsed_params, - None) - - def _inputs(self): - return self.topology_template.inputs - - def _nodetemplates(self): - return self.topology_template.nodetemplates - - def _relationship_templates(self): - return self.topology_template.relationship_templates - - def _outputs(self): - return self.topology_template.outputs - - def _tpl_version(self): - return self.tpl.get(DEFINITION_VERSION) - - def _tpl_description(self): - desc = self.tpl.get(DESCRIPTION) - if desc: - return desc.rstrip() - - def _tpl_imports(self): - return self.tpl.get(IMPORTS) - - def _tpl_repositories(self): - repositories = self.tpl.get(REPOSITORIES) - reposit = [] - if repositories: - for name, val in repositories.items(): - reposits = Repository(name, val) - reposit.append(reposits) - return reposit - - def _tpl_relationship_types(self): - return self._get_custom_types(RELATIONSHIP_TYPES) - - def _tpl_relationship_templates(self): - topology_template = self._tpl_topology_template() - return topology_template.get(RELATIONSHIP_TEMPLATES) - - def _tpl_topology_template(self): - return self.tpl.get(TOPOLOGY_TEMPLATE) - - def _get_all_custom_defs(self, imports=None): - types = [IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES, - DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES] - custom_defs_final = {} - custom_defs = self._get_custom_types(types, imports) - if custom_defs: - custom_defs_final.update(custom_defs) - if custom_defs.get(IMPORTS): - import_defs = self._get_all_custom_defs( - custom_defs.get(IMPORTS)) - custom_defs_final.update(import_defs) - - # As imports are not custom_types, removing from the dict - custom_defs_final.pop(IMPORTS, None) - return custom_defs_final - - def _get_custom_types(self, type_definitions, imports=None): - """Handle custom types defined in imported template files - - This method loads the custom type definitions referenced in "imports" - section of the TOSCA YAML template. - """ - custom_defs = {} - type_defs = [] - if not isinstance(type_definitions, list): - type_defs.append(type_definitions) - else: - type_defs = type_definitions - - if not imports: - imports = self._tpl_imports() - - if imports: - custom_service = toscaparser.imports.\ - ImportsLoader(imports, self.path, - type_defs, self.tpl) - - nested_tosca_tpls = custom_service.get_nested_tosca_tpls() - self._update_nested_tosca_tpls_with_topology(nested_tosca_tpls) - - custom_defs = custom_service.get_custom_defs() - if not custom_defs: - return - - # Handle custom types defined in current template file - for type_def in type_defs: - if type_def != IMPORTS: - inner_custom_types = self.tpl.get(type_def) or {} - if inner_custom_types: - custom_defs.update(inner_custom_types) - return custom_defs - - def _update_nested_tosca_tpls_with_topology(self, nested_tosca_tpls): - for tpl in nested_tosca_tpls: - filename, tosca_tpl = list(tpl.items())[0] - if (tosca_tpl.get(TOPOLOGY_TEMPLATE) and - filename not in list( - self.nested_tosca_tpls_with_topology.keys())): - self.nested_tosca_tpls_with_topology.update(tpl) - - def _handle_nested_tosca_templates_with_topology(self): - for fname, tosca_tpl in self.nested_tosca_tpls_with_topology.items(): - for nodetemplate in self.nodetemplates: - if self._is_sub_mapped_node(nodetemplate, tosca_tpl): - parsed_params = self._get_params_for_nested_template( - nodetemplate) - topology_tpl = tosca_tpl.get(TOPOLOGY_TEMPLATE) - topology_with_sub_mapping = TopologyTemplate( - topology_tpl, - self._get_all_custom_defs(), - self.relationship_types, - parsed_params, - nodetemplate) - if topology_with_sub_mapping.substitution_mappings: - # Record nested topo templates in top level template - self.nested_tosca_templates_with_topology.\ - append(topology_with_sub_mapping) - # Set substitution mapping object for mapped node - nodetemplate.sub_mapping_tosca_template = \ - topology_with_sub_mapping.substitution_mappings - - def _validate_field(self): - version = self._tpl_version() - if not version: - ValidationIssueCollector.collector.appendException( - MissingRequiredFieldError(what='Template', - required=DEFINITION_VERSION)) - else: - self._validate_version(version) - self.version = version - - for name in self.tpl: - if (name not in SECTIONS and - name not in self.ADDITIONAL_SECTIONS.get(version, ())): - ValidationIssueCollector.collector.appendException( - UnknownFieldError(what='Template', field=name)) - - def _validate_version(self, version): - if version not in self.VALID_TEMPLATE_VERSIONS: - ValidationIssueCollector.collector.appendException( - InvalidTemplateVersion( - what=version, - valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS))) - else: - if version != 'tosca_simple_yaml_1_0': - update_definitions(version) - - def _get_path(self, path): - if path.lower().endswith(('.yaml','.yml')): - return path - elif path.lower().endswith(('.zip', '.csar')): - # a CSAR archive - csar = CSAR(path, self.a_file) - if csar.validate(): - csar.decompress() - self.a_file = True # the file has been decompressed locally - return os.path.join(csar.temp_dir, csar.get_main_template()) - else: - ValidationIssueCollector.collector.appendException( - ValueError(_('"%(path)s" is not a valid file.') - % {'path': path})) - - def verify_template(self): - if ValidationIssueCollector.collector.exceptionsCaught(): - if self.input_path: - raise ValidationError( - message=(_('\nThe input "%(path)s" failed validation with ' - 'the following error(s): \n\n\t') - % {'path': self.input_path}) + - '\n\t'.join(ValidationIssueCollector.collector.getExceptionsReport())) - else: - raise ValidationError( - message=_('\nThe pre-parsed input failed validation with ' - 'the following error(s): \n\n\t') + - '\n\t'.join(ValidationIssueCollector.collector.getExceptionsReport())) - else: - if self.input_path: - msg = (_('The input "%(path)s" successfully passed ' - 'validation.') % {'path': self.input_path}) - else: - msg = _('The pre-parsed input successfully passed validation.') - - log.info(msg) - - def _is_sub_mapped_node(self, nodetemplate, tosca_tpl): - """Return True if the nodetemple is substituted.""" - if (nodetemplate and not nodetemplate.sub_mapping_tosca_template and - self.get_sub_mapping_node_type(tosca_tpl) == nodetemplate.type - and len(nodetemplate.interfaces) < 1): - return True - else: - return False - - def _get_params_for_nested_template(self, nodetemplate): - """Return total params for nested_template.""" - parsed_params = deepcopy(self.parsed_params) \ - if self.parsed_params else {} - if nodetemplate: - for pname in nodetemplate.get_properties(): - parsed_params.update({pname: - nodetemplate.get_property_value(pname)}) - return parsed_params - - def get_sub_mapping_node_type(self, tosca_tpl): - """Return substitution mappings node type.""" - if tosca_tpl: - return TopologyTemplate.get_sub_mapping_node_type( - tosca_tpl.get(TOPOLOGY_TEMPLATE)) - - def _has_substitution_mappings(self): - """Return True if the template has valid substitution mappings.""" - return self.topology_template is not None and \ - self.topology_template.substitution_mappings is not None - - def has_nested_templates(self): - """Return True if the tosca template has nested templates.""" - return self.nested_tosca_templates_with_topology is not None and \ - len(self.nested_tosca_templates_with_topology) >= 1 -*/ \ No newline at end of file diff --git a/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java b/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java deleted file mode 100644 index c78978f..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java +++ /dev/null @@ -1,201 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.onap.sdc.toscaparser.api.utils.ValidateUtils; - -import java.util.LinkedHashMap; - -public class Triggers extends EntityTemplate { - - private static final String DESCRIPTION = "description"; - private static final String EVENT = "event_type"; - private static final String SCHEDULE = "schedule"; - private static final String TARGET_FILTER = "target_filter"; - private static final String CONDITION = "condition"; - private static final String ACTION = "action"; - - private static final String[] SECTIONS = { - DESCRIPTION, EVENT, SCHEDULE, TARGET_FILTER, CONDITION, ACTION - }; - - private static final String METER_NAME = "meter_name"; - private static final String CONSTRAINT = "constraint"; - private static final String PERIOD = "period"; - private static final String EVALUATIONS = "evaluations"; - private static final String METHOD = "method"; - private static final String THRESHOLD = "threshold"; - private static final String COMPARISON_OPERATOR = "comparison_operator"; - - private static final String[] CONDITION_KEYNAMES = { - METER_NAME, CONSTRAINT, PERIOD, EVALUATIONS, METHOD, THRESHOLD, COMPARISON_OPERATOR - }; - - private String name; - private LinkedHashMap triggerTpl; - - public Triggers(String name, LinkedHashMap triggerTpl) { - super(); // dummy. don't want super - this.name = name; - this.triggerTpl = triggerTpl; - validateKeys(); - validateCondition(); - validateInput(); - } - - public String getDescription() { - return (String) triggerTpl.get("description"); - } - - public String getEvent() { - return (String) triggerTpl.get("event_type"); - } - - public LinkedHashMap getSchedule() { - return (LinkedHashMap) triggerTpl.get("schedule"); - } - - public LinkedHashMap getTargetFilter() { - return (LinkedHashMap) triggerTpl.get("target_filter"); - } - - public LinkedHashMap getCondition() { - return (LinkedHashMap) triggerTpl.get("condition"); - } - - public LinkedHashMap getAction() { - return (LinkedHashMap) triggerTpl.get("action"); - } - - private void validateKeys() { - for (String key : triggerTpl.keySet()) { - boolean bFound = false; - for (int i = 0; i < SECTIONS.length; i++) { - if (key.equals(SECTIONS[i])) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE249", String.format( - "UnknownFieldError: Triggers \"%s\" contains unknown field \"%s\"", - name, key))); - } - } - } - - private void validateCondition() { - for (String key : getCondition().keySet()) { - boolean bFound = false; - for (int i = 0; i < CONDITION_KEYNAMES.length; i++) { - if (key.equals(CONDITION_KEYNAMES[i])) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE250", String.format( - "UnknownFieldError: Triggers \"%s\" contains unknown field \"%s\"", - name, key))); - } - } - } - - private void validateInput() { - for (String key : getCondition().keySet()) { - Object value = getCondition().get(key); - if (key.equals(PERIOD) || key.equals(EVALUATIONS)) { - ValidateUtils.validateInteger(value); - } else if (key.equals(THRESHOLD)) { - ValidateUtils.validateNumeric(value); - } else if (key.equals(METER_NAME) || key.equals(METHOD)) { - ValidateUtils.validateString(value); - } - } - } - - @Override - public String toString() { - return "Triggers{" - + "name='" + name + '\'' - + ", triggerTpl=" + triggerTpl - + '}'; - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import UnknownFieldError -from toscaparser.entity_template import EntityTemplate - -SECTIONS = (DESCRIPTION, EVENT, SCHEDULE, TARGET_FILTER, CONDITION, ACTION) = \ - ('description', 'event_type', 'schedule', - 'target_filter', 'condition', 'action') -CONDITION_KEYNAMES = (CONTRAINT, PERIOD, EVALUATIONS, METHOD) = \ - ('constraint', 'period', 'evaluations', 'method') -log = logging.getLogger('tosca') - - -class Triggers(EntityTemplate): - - '''Triggers defined in policies of topology template''' - - def __init__(self, name, trigger_tpl): - self.name = name - self.trigger_tpl = trigger_tpl - self._validate_keys() - self._validate_condition() - - def get_description(self): - return self.trigger_tpl['description'] - - def get_event(self): - return self.trigger_tpl['event_type'] - - def get_schedule(self): - return self.trigger_tpl['schedule'] - - def get_target_filter(self): - return self.trigger_tpl['target_filter'] - - def get_condition(self): - return self.trigger_tpl['condition'] - - def get_action(self): - return self.trigger_tpl['action'] - - def _validate_keys(self): - for key in self.trigger_tpl.keys(): - if key not in SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Triggers "%s"' % self.name, - field=key)) - - def _validate_condition(self): - for key in self.get_condition(): - if key not in CONDITION_KEYNAMES: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Triggers "%s"' % self.name, - field=key)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java b/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java deleted file mode 100644 index f2bb650..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java +++ /dev/null @@ -1,101 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class UnsupportedType { - - // Note: TOSCA spec version related - - /* - The tosca.nodes.Storage.ObjectStorage and tosca.nodes.Storage.BlockStorage - used here as un_supported_types are part of the name changes in TOSCA spec - version 1.1. The original name as specified in version 1.0 are, - tosca.nodes.BlockStorage and tosca.nodes.ObjectStorage which are supported - by the tosca-parser. Since there are little overlapping in version support - currently in the tosca-parser, the names tosca.nodes.Storage.ObjectStorage - and tosca.nodes.Storage.BlockStorage are used here to demonstrate the usage - of un_supported_types. As tosca-parser move to provide support for version - 1.1 and higher, they will be removed. - */ - - private UnsupportedType() { - } - - private static final String[] UNSUPPORTED_TYPES = { - "tosca.test.invalidtype", - "tosca.nodes.Storage.ObjectStorage", - "tosca.nodes.Storage.BlockStorage"}; - - public static boolean validateType(String entityType) { - for (String ust : UNSUPPORTED_TYPES) { - if (ust.equals(entityType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE251", String.format( - "UnsupportedTypeError: Entity type \"%s\" is not supported", entityType))); - return true; - } - } - return false; - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import UnsupportedTypeError -from toscaparser.utils.gettextutils import _ - -log = logging.getLogger('tosca') - - -class UnsupportedType(object): - - """Note: TOSCA spec version related - - The tosca.nodes.Storage.ObjectStorage and tosca.nodes.Storage.BlockStorage - used here as un_supported_types are part of the name changes in TOSCA spec - version 1.1. The original name as specified in version 1.0 are, - tosca.nodes.BlockStorage and tosca.nodes.ObjectStorage which are supported - by the tosca-parser. Since there are little overlapping in version support - currently in the tosca-parser, the names tosca.nodes.Storage.ObjectStorage - and tosca.nodes.Storage.BlockStorage are used here to demonstrate the usage - of un_supported_types. As tosca-parser move to provide support for version - 1.1 and higher, they will be removed. - """ - un_supported_types = ['tosca.test.invalidtype', - 'tosca.nodes.Storage.ObjectStorage', - 'tosca.nodes.Storage.BlockStorage'] - - def __init__(self): - pass - - @staticmethod - def validate_type(entitytype): - if entitytype in UnsupportedType.un_supported_types: - ValidationIssueCollector.appendException(UnsupportedTypeError( - what=_('%s') - % entitytype)) - return True - else: - return False -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java deleted file mode 100644 index 56416c6..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java +++ /dev/null @@ -1,47 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.common; - -public class JToscaException extends Exception { - - private static final long serialVersionUID = 1L; - private String code; - - public JToscaException(String message, String code) { - super(message); - this.code = code; - } - - public String getCode() { - return code; - } - - public void setCode(String code) { - this.code = code; - } - - //JE1001 - Meta file missing - //JE1002 - Invalid yaml content - //JE1003 - Entry-Definition not defined in meta file - //JE1004 - Entry-Definition file missing - //JE1005 - General Error - //JE1006 - General Error/Path not valid -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java b/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java deleted file mode 100644 index cd5cbc5..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java +++ /dev/null @@ -1,75 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.common; - -import java.util.Objects; - -public class JToscaValidationIssue { - - private String code; - private String message; - - - public JToscaValidationIssue(String code, String message) { - super(); - this.code = code; - this.message = message; - } - - public String getMessage() { - return message; - } - - public void setMessage(String message) { - this.message = message; - } - - public String getCode() { - return code; - } - - public void setCode(String code) { - this.code = code; - } - - @Override - public String toString() { - return "JToscaError [code=" + code + ", message=" + message + "]"; - } - - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - final JToscaValidationIssue that = (JToscaValidationIssue) o; - return Objects.equals(code, that.code) && - Objects.equals(message, that.message); - } - - @Override - public int hashCode() { - return Objects.hash(code, message); - } -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java b/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java deleted file mode 100644 index c109ffd..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java +++ /dev/null @@ -1,58 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.common; - -import java.util.IllegalFormatException; - -public class TOSCAException extends Exception { - private String message = "An unkown exception has occurred"; - private static boolean FATAL_EXCEPTION_FORMAT_ERRORS = false; - private String msgFmt = null; - - public TOSCAException(String... strings) { - try { - message = String.format(msgFmt, (Object[]) strings); - } catch (IllegalFormatException e) { - // TODO log - - if (FATAL_EXCEPTION_FORMAT_ERRORS) { - throw e; - } - - } - - } - - public String __str__() { - return message; - } - - public static void generate_inv_schema_property_error(String name, String attr, String value, String valid_values) { - //TODO - - } - - public static void setFatalFormatException(boolean flag) { - FATAL_EXCEPTION_FORMAT_ERRORS = flag; - } - -} - diff --git a/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java b/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java deleted file mode 100644 index 71c0401..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java +++ /dev/null @@ -1,57 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.common; - -import java.util.*; - -// Perfectly good enough... - -public class ValidationIssueCollector { - - private Map validationIssues = new HashMap(); - - public void appendValidationIssue(JToscaValidationIssue issue) { - - validationIssues.put(issue.getMessage(), issue); - - } - - public List getValidationIssueReport() { - List report = new ArrayList<>(); - if (!validationIssues.isEmpty()) { - for (JToscaValidationIssue exception : validationIssues.values()) { - report.add("[" + exception.getCode() + "]: " + exception.getMessage()); - } - } - - return report; - } - - public Map getValidationIssues() { - return validationIssues; - } - - - public int validationIssuesCaught() { - return validationIssues.size(); - } - -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java deleted file mode 100644 index 9cf8c6c..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java +++ /dev/null @@ -1,121 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import java.util.LinkedHashMap; - -public class ArtifactTypeDef extends StatefulEntityType { - - private String type; - private LinkedHashMap customDef; - private LinkedHashMap properties; - private LinkedHashMap parentArtifacts; - - - public ArtifactTypeDef(String type, LinkedHashMap customDef) { - super(type, ARTIFACT_PREFIX, customDef); - - this.type = type; - this.customDef = customDef; - properties = defs != null ? (LinkedHashMap) defs.get(PROPERTIES) : null; - parentArtifacts = getParentArtifacts(); - } - - private LinkedHashMap getParentArtifacts() { - LinkedHashMap artifacts = new LinkedHashMap<>(); - String parentArtif = null; - if (getParentType() != null) { - parentArtif = getParentType().getType(); - } - if (parentArtif != null && !parentArtif.isEmpty()) { - while (!parentArtif.equals("tosca.artifacts.Root")) { - Object ob = TOSCA_DEF.get(parentArtif); - artifacts.put(parentArtif, ob); - parentArtif = - (String) ((LinkedHashMap) ob).get("derived_from"); - } - } - return artifacts; - } - - public ArtifactTypeDef getParentType() { - // Return a artifact entity from which this entity is derived - if (defs == null) { - return null; - } - String partifactEntity = derivedFrom(defs); - if (partifactEntity != null) { - return new ArtifactTypeDef(partifactEntity, customDef); - } - return null; - } - - public Object getArtifact(String name) { - // Return the definition of an artifact field by name - if (defs != null) { - return defs.get(name); - } - return null; - } - - public String getType() { - return type; - } - -} - -/*python -class ArtifactTypeDef(StatefulEntityType): - '''TOSCA built-in artifacts type.''' - - def __init__(self, atype, custom_def=None): - super(ArtifactTypeDef, self).__init__(atype, self.ARTIFACT_PREFIX, - custom_def) - self.type = atype - self.custom_def = custom_def - self.properties = None - if self.PROPERTIES in self.defs: - self.properties = self.defs[self.PROPERTIES] - self.parent_artifacts = self._get_parent_artifacts() - - def _get_parent_artifacts(self): - artifacts = {} - parent_artif = self.parent_type.type if self.parent_type else None - if parent_artif: - while parent_artif != 'tosca.artifacts.Root': - artifacts[parent_artif] = self.TOSCA_DEF[parent_artif] - parent_artif = artifacts[parent_artif]['derived_from'] - return artifacts - - @property - def parent_type(self): - '''Return a artifact entity from which this entity is derived.''' - if not hasattr(self, 'defs'): - return None - partifact_entity = self.derived_from(self.defs) - if partifact_entity: - return ArtifactTypeDef(partifact_entity, self.custom_def) - - def get_artifact(self, name): - '''Return the definition of an artifact field by name.''' - if name in self.defs: - return self.defs[name] -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java deleted file mode 100644 index e4a30f1..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java +++ /dev/null @@ -1,60 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import java.util.LinkedHashMap; - -public class AttributeDef { - // TOSCA built-in Attribute type - - private String name; - private Object value; - private LinkedHashMap schema; - - public AttributeDef(String adName, Object adValue, LinkedHashMap adSchema) { - name = adName; - value = adValue; - schema = adSchema; - } - - public String getName() { - return name; - } - - public Object getValue() { - return value; - } - - public LinkedHashMap getSchema() { - return schema; - } -} - -/*python - -class AttributeDef(object): - '''TOSCA built-in Attribute type.''' - - def __init__(self, name, value=None, schema=None): - self.name = name - self.value = value - self.schema = schema -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java deleted file mode 100644 index e3c24b3..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java +++ /dev/null @@ -1,240 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -public class CapabilityTypeDef extends StatefulEntityType { - // TOSCA built-in capabilities type - - private static final String TOSCA_TYPEURI_CAPABILITY_ROOT = "tosca.capabilities.Root"; - - private String name; - private String nodetype; - private LinkedHashMap customDef; - private LinkedHashMap properties; - private LinkedHashMap parentCapabilities; - - @SuppressWarnings("unchecked") - public CapabilityTypeDef(String cname, String ctype, String ntype, LinkedHashMap ccustomDef) { - super(ctype, CAPABILITY_PREFIX, ccustomDef); - - name = cname; - nodetype = ntype; - properties = null; - customDef = ccustomDef; - if (defs != null) { - properties = (LinkedHashMap) defs.get(PROPERTIES); - } - parentCapabilities = getParentCapabilities(customDef); - } - - @SuppressWarnings("unchecked") - public ArrayList getPropertiesDefObjects() { - // Return a list of property definition objects - ArrayList propsdefs = new ArrayList<>(); - LinkedHashMap parentProperties = new LinkedHashMap<>(); - if (parentCapabilities != null) { - for (Map.Entry me : parentCapabilities.entrySet()) { - parentProperties.put(me.getKey(), ((LinkedHashMap) me.getValue()).get("properties")); - } - } - if (properties != null) { - for (Map.Entry me : properties.entrySet()) { - propsdefs.add(new PropertyDef(me.getKey(), null, (LinkedHashMap) me.getValue())); - } - } - if (parentProperties != null) { - for (Map.Entry me : parentProperties.entrySet()) { - LinkedHashMap props = (LinkedHashMap) me.getValue(); - if (props != null) { - for (Map.Entry pe : props.entrySet()) { - String prop = pe.getKey(); - LinkedHashMap schema = (LinkedHashMap) pe.getValue(); - // add parent property if not overridden by children type - if (properties == null || properties.get(prop) == null) { - propsdefs.add(new PropertyDef(prop, null, schema)); - } - } - } - } - } - return propsdefs; - } - - public LinkedHashMap getPropertiesDef() { - LinkedHashMap pds = new LinkedHashMap<>(); - for (PropertyDef pd : getPropertiesDefObjects()) { - pds.put(pd.getName(), pd); - } - return pds; - } - - public PropertyDef getPropertyDefValue(String pdname) { - // Return the definition of a given property name - LinkedHashMap propsDef = getPropertiesDef(); - if (propsDef != null && propsDef.get(pdname) != null) { - return (PropertyDef) propsDef.get(pdname).getPDValue(); - } - return null; - } - - @SuppressWarnings("unchecked") - private LinkedHashMap getParentCapabilities(LinkedHashMap customDef) { - LinkedHashMap capabilities = new LinkedHashMap<>(); - CapabilityTypeDef parentCap = getParentType(); - if (parentCap != null) { - String sParentCap = parentCap.getType(); - while (!sParentCap.equals(TOSCA_TYPEURI_CAPABILITY_ROOT)) { - if (TOSCA_DEF.get(sParentCap) != null) { - capabilities.put(sParentCap, TOSCA_DEF.get(sParentCap)); - } else if (customDef != null && customDef.get(sParentCap) != null) { - capabilities.put(sParentCap, customDef.get(sParentCap)); - } - sParentCap = (String) ((LinkedHashMap) capabilities.get(sParentCap)).get("derived_from"); - } - } - return capabilities; - } - - public CapabilityTypeDef getParentType() { - // Return a capability this capability is derived from - if (defs == null) { - return null; - } - String pnode = derivedFrom(defs); - if (pnode != null && !pnode.isEmpty()) { - return new CapabilityTypeDef(name, pnode, nodetype, customDef); - } - return null; - } - - public boolean inheritsFrom(ArrayList typeNames) { - // Check this capability is in type_names - - // Check if this capability or some of its parent types - // are in the list of types: type_names - if (typeNames.contains(getType())) { - return true; - } else if (getParentType() != null) { - return getParentType().inheritsFrom(typeNames); - } - return false; - } - - // getters/setters - - public LinkedHashMap getProperties() { - return properties; - } - - public String getName() { - return name; - } -} - -/*python -from toscaparser.elements.property_definition import PropertyDef -from toscaparser.elements.statefulentitytype import StatefulEntityType - - -class CapabilityTypeDef(StatefulEntityType): - '''TOSCA built-in capabilities type.''' - TOSCA_TYPEURI_CAPABILITY_ROOT = 'tosca.capabilities.Root' - - def __init__(self, name, ctype, ntype, custom_def=None): - self.name = name - super(CapabilityTypeDef, self).__init__(ctype, self.CAPABILITY_PREFIX, - custom_def) - self.nodetype = ntype - self.properties = None - self.custom_def = custom_def - if self.PROPERTIES in self.defs: - self.properties = self.defs[self.PROPERTIES] - self.parent_capabilities = self._get_parent_capabilities(custom_def) - - def get_properties_def_objects(self): - '''Return a list of property definition objects.''' - properties = [] - parent_properties = {} - if self.parent_capabilities: - for type, value in self.parent_capabilities.items(): - parent_properties[type] = value.get('properties') - if self.properties: - for prop, schema in self.properties.items(): - properties.append(PropertyDef(prop, None, schema)) - if parent_properties: - for parent, props in parent_properties.items(): - for prop, schema in props.items(): - # add parent property if not overridden by children type - if not self.properties or \ - prop not in self.properties.keys(): - properties.append(PropertyDef(prop, None, schema)) - return properties - - def get_properties_def(self): - '''Return a dictionary of property definition name-object pairs.''' - return {prop.name: prop - for prop in self.get_properties_def_objects()} - - def get_property_def_value(self, name): - '''Return the definition of a given property name.''' - props_def = self.get_properties_def() - if props_def and name in props_def: - return props_def[name].value - - def _get_parent_capabilities(self, custom_def=None): - capabilities = {} - parent_cap = self.parent_type - if parent_cap: - parent_cap = parent_cap.type - while parent_cap != self.TOSCA_TYPEURI_CAPABILITY_ROOT: - if parent_cap in self.TOSCA_DEF.keys(): - capabilities[parent_cap] = self.TOSCA_DEF[parent_cap] - elif custom_def and parent_cap in custom_def.keys(): - capabilities[parent_cap] = custom_def[parent_cap] - parent_cap = capabilities[parent_cap]['derived_from'] - return capabilities - - @property - def parent_type(self): - '''Return a capability this capability is derived from.''' - if not hasattr(self, 'defs'): - return None - pnode = self.derived_from(self.defs) - if pnode: - return CapabilityTypeDef(self.name, pnode, - self.nodetype, self.custom_def) - - def inherits_from(self, type_names): - '''Check this capability is in type_names - - Check if this capability or some of its parent types - are in the list of types: type_names - ''' - if self.type in type_names: - return True - elif self.parent_type: - return self.parent_type.inherits_from(type_names) - else: - return False*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java deleted file mode 100644 index d8cf460..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java +++ /dev/null @@ -1,136 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -public class DataType extends StatefulEntityType { - - LinkedHashMap customDef; - - public DataType(String _dataTypeName, LinkedHashMap _customDef) { - super(_dataTypeName, DATATYPE_NETWORK_PREFIX, _customDef); - - customDef = _customDef; - } - - public DataType getParentType() { - // Return a datatype this datatype is derived from - if (defs != null) { - String ptype = derivedFrom(defs); - if (ptype != null) { - return new DataType(ptype, customDef); - } - } - return null; - } - - public String getValueType() { - // Return 'type' section in the datatype schema - if (defs != null) { - return (String) entityValue(defs, "type"); - } - return null; - } - - public ArrayList getAllPropertiesObjects() { - //Return all properties objects defined in type and parent type - ArrayList propsDef = getPropertiesDefObjects(); - DataType ptype = getParentType(); - while (ptype != null) { - propsDef.addAll(ptype.getPropertiesDefObjects()); - ptype = ptype.getParentType(); - } - return propsDef; - } - - public LinkedHashMap getAllProperties() { - // Return a dictionary of all property definition name-object pairs - LinkedHashMap pno = new LinkedHashMap<>(); - for (PropertyDef pd : getAllPropertiesObjects()) { - pno.put(pd.getName(), pd); - } - return pno; - } - - public Object getAllPropertyValue(String name) { - // Return the value of a given property name - LinkedHashMap propsDef = getAllProperties(); - if (propsDef != null && propsDef.get(name) != null) { - return propsDef.get(name).getPDValue(); - } - return null; - } - - public LinkedHashMap getDefs() { - return defs; - } - -} - -/*python - -from toscaparser.elements.statefulentitytype import StatefulEntityType - - -class DataType(StatefulEntityType): - '''TOSCA built-in and user defined complex data type.''' - - def __init__(self, datatypename, custom_def=None): - super(DataType, self).__init__(datatypename, - self.DATATYPE_NETWORK_PREFIX, - custom_def) - self.custom_def = custom_def - - @property - def parent_type(self): - '''Return a datatype this datatype is derived from.''' - ptype = self.derived_from(self.defs) - if ptype: - return DataType(ptype, self.custom_def) - return None - - @property - def value_type(self): - '''Return 'type' section in the datatype schema.''' - return self.entity_value(self.defs, 'type') - - def get_all_properties_objects(self): - '''Return all properties objects defined in type and parent type.''' - props_def = self.get_properties_def_objects() - ptype = self.parent_type - while ptype: - props_def.extend(ptype.get_properties_def_objects()) - ptype = ptype.parent_type - return props_def - - def get_all_properties(self): - '''Return a dictionary of all property definition name-object pairs.''' - return {prop.name: prop - for prop in self.get_all_properties_objects()} - - def get_all_property_value(self, name): - '''Return the value of a given property name.''' - props_def = self.get_all_properties() - if props_def and name in props_def.key(): - return props_def[name].value -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java deleted file mode 100644 index efc6ac9..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java +++ /dev/null @@ -1,436 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import java.io.IOException; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.CopyUtils; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.onap.sdc.toscaparser.api.extensions.ExtTools; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.yaml.snakeyaml.Yaml; - -public class EntityType { - - private static Logger log = LoggerFactory.getLogger(EntityType.class.getName()); - - private static final String TOSCA_DEFINITION_1_0_YAML = "TOSCA_definition_1_0.yaml"; - protected static final String DERIVED_FROM = "derived_from"; - protected static final String PROPERTIES = "properties"; - protected static final String ATTRIBUTES = "attributes"; - protected static final String REQUIREMENTS = "requirements"; - protected static final String INTERFACES = "interfaces"; - protected static final String CAPABILITIES = "capabilities"; - protected static final String TYPE = "type"; - protected static final String ARTIFACTS = "artifacts"; - - @SuppressWarnings("unused") - private static final String SECTIONS[] = { - DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS, - INTERFACES, CAPABILITIES, TYPE, ARTIFACTS - }; - - public static final String TOSCA_DEF_SECTIONS[] = { - "node_types", "data_types", "artifact_types", - "group_types", "relationship_types", - "capability_types", "interface_types", - "policy_types"}; - - - // TOSCA definition file - //private final static String path = EntityType.class.getProtectionDomain().getCodeSource().getLocation().getPath(); - - //private final static String path = EntityType.class.getClassLoader().getResource("TOSCA_definition_1_0.yaml").getFile(); - //private final static String TOSCA_DEF_FILE = EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); - - private static LinkedHashMap TOSCA_DEF_LOAD_AS_IS = loadTdf(); - - //EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml"); - - @SuppressWarnings("unchecked") - private static LinkedHashMap loadTdf() { - String toscaDefLocation = EntityType.class.getClassLoader().getResource(TOSCA_DEFINITION_1_0_YAML).getFile(); - InputStream input = EntityType.class.getClassLoader().getResourceAsStream(TOSCA_DEFINITION_1_0_YAML); - if (input == null) { - log.error("EntityType - loadTdf - Couldn't load TOSCA_DEF_FILE {}", toscaDefLocation); - } - Yaml yaml = new Yaml(); - Object loaded = yaml.load(input); - //@SuppressWarnings("unchecked") - return (LinkedHashMap) loaded; - } - - // Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS - public static LinkedHashMap TOSCA_DEF; - - static { - TOSCA_DEF = new LinkedHashMap(); - for (String section : TOSCA_DEF_SECTIONS) { - @SuppressWarnings("unchecked") - LinkedHashMap value = (LinkedHashMap) TOSCA_DEF_LOAD_AS_IS.get(section); - if (value != null) { - for (String key : value.keySet()) { - TOSCA_DEF.put(key, value.get(key)); - } - } - } - } - - public static final String DEPENDSON = "tosca.relationships.DependsOn"; - public static final String HOSTEDON = "tosca.relationships.HostedOn"; - public static final String CONNECTSTO = "tosca.relationships.ConnectsTo"; - public static final String ATTACHESTO = "tosca.relationships.AttachesTo"; - public static final String LINKSTO = "tosca.relationships.network.LinksTo"; - public static final String BINDSTO = "tosca.relationships.network.BindsTo"; - - public static final String RELATIONSHIP_TYPE[] = { - "tosca.relationships.DependsOn", - "tosca.relationships.HostedOn", - "tosca.relationships.ConnectsTo", - "tosca.relationships.AttachesTo", - "tosca.relationships.network.LinksTo", - "tosca.relationships.network.BindsTo"}; - - public static final String NODE_PREFIX = "tosca.nodes."; - public static final String RELATIONSHIP_PREFIX = "tosca.relationships."; - public static final String CAPABILITY_PREFIX = "tosca.capabilities."; - public static final String INTERFACE_PREFIX = "tosca.interfaces."; - public static final String ARTIFACT_PREFIX = "tosca.artifacts."; - public static final String POLICY_PREFIX = "tosca.policies."; - public static final String GROUP_PREFIX = "tosca.groups."; - //currently the data types are defined only for network - // but may have changes in the future. - public static final String DATATYPE_PREFIX = "tosca.datatypes."; - public static final String DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + "network."; - public static final String TOSCA = "tosca"; - - protected String type; - protected LinkedHashMap defs = null; - - public Object getParentType() { - return null; - } - - public String derivedFrom(LinkedHashMap defs) { - // Return a type this type is derived from - return (String) entityValue(defs, "derived_from"); - } - - public boolean isDerivedFrom(String type_str) { - // Check if object inherits from the given type - // Returns true if this object is derived from 'type_str' - // False otherwise. - if (type == null || this.type.isEmpty()) { - return false; - } else if (type == type_str) { - return true; - } else if (getParentType() != null) { - return ((EntityType) getParentType()).isDerivedFrom(type_str); - } else { - return false; - } - } - - public Object entityValue(LinkedHashMap defs, String key) { - if (defs != null) { - return defs.get(key); - } - return null; - } - - @SuppressWarnings("unchecked") - public Object getValue(String ndtype, LinkedHashMap _defs, boolean parent) { - Object value = null; - if (_defs == null) { - if (defs == null) { - return null; - } - _defs = this.defs; - } - Object defndt = _defs.get(ndtype); - if (defndt != null) { - // copy the value to avoid that next operations add items in the - // item definitions - //value = copy.copy(defs[ndtype]) - value = CopyUtils.copyLhmOrAl(defndt); - } - - if (parent) { - EntityType p = this; - if (p != null) { - while (p != null) { - if (p.defs != null && p.defs.get(ndtype) != null) { - // get the parent value - Object parentValue = p.defs.get(ndtype); - if (value != null) { - if (value instanceof LinkedHashMap) { - for (Map.Entry me : ((LinkedHashMap) parentValue).entrySet()) { - String k = me.getKey(); - if (((LinkedHashMap) value).get(k) == null) { - ((LinkedHashMap) value).put(k, me.getValue()); - } - } - } - if (value instanceof ArrayList) { - for (Object pValue : (ArrayList) parentValue) { - if (!((ArrayList) value).contains(pValue)) { - ((ArrayList) value).add(pValue); - } - } - } - } else { - // value = copy.copy(parent_value) - value = CopyUtils.copyLhmOrAl(parentValue); - } - } - p = (EntityType) p.getParentType(); - } - } - } - - return value; - } - - @SuppressWarnings("unchecked") - public Object getDefinition(String ndtype) { - Object value = null; - LinkedHashMap _defs; - // no point in hasattr, because we have it, and it - // doesn't do anything except emit an exception anyway - //if not hasattr(self, 'defs'): - // defs = None - // ValidationIssueCollector.appendException( - // ValidationError(message="defs is " + str(defs))) - //else: - // defs = self.defs - _defs = this.defs; - - - if (_defs != null && _defs.get(ndtype) != null) { - value = _defs.get(ndtype); - } - - Object p = getParentType(); - if (p != null) { - Object inherited = ((EntityType) p).getDefinition(ndtype); - if (inherited != null) { - // inherited = dict(inherited) WTF?!? - if (value == null) { - value = inherited; - } else { - //????? - //inherited.update(value) - //value.update(inherited) - for (Map.Entry me : ((LinkedHashMap) inherited).entrySet()) { - ((LinkedHashMap) value).put(me.getKey(), me.getValue()); - } - } - } - } - return value; - } - - public static void updateDefinitions(String version) { - ExtTools exttools = new ExtTools(); - String extensionDefsFile = exttools.getDefsFile(version); - - try (InputStream input = EntityType.class.getClassLoader().getResourceAsStream(extensionDefsFile);) { - Yaml yaml = new Yaml(); - LinkedHashMap nfvDefFile = (LinkedHashMap) yaml.load(input); - LinkedHashMap nfvDef = new LinkedHashMap<>(); - for (String section : TOSCA_DEF_SECTIONS) { - if (nfvDefFile.get(section) != null) { - LinkedHashMap value = - (LinkedHashMap) nfvDefFile.get(section); - for (String key : value.keySet()) { - nfvDef.put(key, value.get(key)); - } - } - } - TOSCA_DEF.putAll(nfvDef); - } catch (IOException e) { - log.error("EntityType - updateDefinitions - Failed to update definitions from defs file {}", extensionDefsFile); - log.error("Exception:", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE280", - String.format("Failed to update definitions from defs file \"%s\" ", extensionDefsFile))); - return; - } - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import ValidationError -from toscaparser.extensions.exttools import ExtTools -import org.onap.sdc.toscaparser.api.utils.yamlparser - -log = logging.getLogger('tosca') - - -class EntityType(object): - '''Base class for TOSCA elements.''' - - SECTIONS = (DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS, - INTERFACES, CAPABILITIES, TYPE, ARTIFACTS) = \ - ('derived_from', 'properties', 'attributes', 'requirements', - 'interfaces', 'capabilities', 'type', 'artifacts') - - TOSCA_DEF_SECTIONS = ['node_types', 'data_types', 'artifact_types', - 'group_types', 'relationship_types', - 'capability_types', 'interface_types', - 'policy_types'] - - '''TOSCA definition file.''' - TOSCA_DEF_FILE = os.path.join( - os.path.dirname(os.path.abspath(__file__)), - "TOSCA_definition_1_0.yaml") - - loader = toscaparser.utils.yamlparser.load_yaml - - TOSCA_DEF_LOAD_AS_IS = loader(TOSCA_DEF_FILE) - - # Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS - TOSCA_DEF = {} - for section in TOSCA_DEF_SECTIONS: - if section in TOSCA_DEF_LOAD_AS_IS.keys(): - value = TOSCA_DEF_LOAD_AS_IS[section] - for key in value.keys(): - TOSCA_DEF[key] = value[key] - - RELATIONSHIP_TYPE = (DEPENDSON, HOSTEDON, CONNECTSTO, ATTACHESTO, - LINKSTO, BINDSTO) = \ - ('tosca.relationships.DependsOn', - 'tosca.relationships.HostedOn', - 'tosca.relationships.ConnectsTo', - 'tosca.relationships.AttachesTo', - 'tosca.relationships.network.LinksTo', - 'tosca.relationships.network.BindsTo') - - NODE_PREFIX = 'tosca.nodes.' - RELATIONSHIP_PREFIX = 'tosca.relationships.' - CAPABILITY_PREFIX = 'tosca.capabilities.' - INTERFACE_PREFIX = 'tosca.interfaces.' - ARTIFACT_PREFIX = 'tosca.artifacts.' - POLICY_PREFIX = 'tosca.policies.' - GROUP_PREFIX = 'tosca.groups.' - # currently the data types are defined only for network - # but may have changes in the future. - DATATYPE_PREFIX = 'tosca.datatypes.' - DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + 'network.' - TOSCA = 'tosca' - - def derived_from(self, defs): - '''Return a type this type is derived from.''' - return self.entity_value(defs, 'derived_from') - - def is_derived_from(self, type_str): - '''Check if object inherits from the given type. - - Returns true if this object is derived from 'type_str'. - False otherwise. - ''' - if not self.type: - return False - elif self.type == type_str: - return True - elif self.parent_type: - return self.parent_type.is_derived_from(type_str) - else: - return False - - def entity_value(self, defs, key): - if key in defs: - return defs[key] - - def get_value(self, ndtype, defs=None, parent=None): - value = None - if defs is None: - if not hasattr(self, 'defs'): - return None - defs = self.defs - if ndtype in defs: - # copy the value to avoid that next operations add items in the - # item definitions - value = copy.copy(defs[ndtype]) - if parent: - p = self - if p: - while p: - if ndtype in p.defs: - # get the parent value - parent_value = p.defs[ndtype] - if value: - if isinstance(value, dict): - for k, v in parent_value.items(): - if k not in value.keys(): - value[k] = v - if isinstance(value, list): - for p_value in parent_value: - if p_value not in value: - value.append(p_value) - else: - value = copy.copy(parent_value) - p = p.parent_type - return value - - def get_definition(self, ndtype): - value = None - if not hasattr(self, 'defs'): - defs = None - ValidationIssueCollector.appendException( - ValidationError(message="defs is " + str(defs))) - else: - defs = self.defs - if defs is not None and ndtype in defs: - value = defs[ndtype] - p = self.parent_type - if p: - inherited = p.get_definition(ndtype) - if inherited: - inherited = dict(inherited) - if not value: - value = inherited - else: - inherited.update(value) - value.update(inherited) - return value - - -def update_definitions(version): - exttools = ExtTools() - extension_defs_file = exttools.get_defs_file(version) - loader = toscaparser.utils.yamlparser.load_yaml - nfv_def_file = loader(extension_defs_file) - nfv_def = {} - for section in EntityType.TOSCA_DEF_SECTIONS: - if section in nfv_def_file.keys(): - value = nfv_def_file[section] - for key in value.keys(): - nfv_def[key] = value[key] - EntityType.TOSCA_DEF.update(nfv_def) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java deleted file mode 100644 index db6f2b7..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java +++ /dev/null @@ -1,263 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -public class GroupType extends StatefulEntityType { - - private static final String DERIVED_FROM = "derived_from"; - private static final String VERSION = "version"; - private static final String METADATA = "metadata"; - private static final String DESCRIPTION = "description"; - private static final String PROPERTIES = "properties"; - private static final String MEMBERS = "members"; - private static final String INTERFACES = "interfaces"; - - private static final String[] SECTIONS = { - DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES}; - - private String groupType; - private LinkedHashMap customDef; - private String groupDescription; - private String groupVersion; - //private LinkedHashMap groupProperties; - //private ArrayList groupMembers; - private LinkedHashMap metaData; - - @SuppressWarnings("unchecked") - public GroupType(String groupType, LinkedHashMap customDef) { - super(groupType, GROUP_PREFIX, customDef); - - this.groupType = groupType; - this.customDef = customDef; - validateFields(); - if (defs != null) { - groupDescription = (String) defs.get(DESCRIPTION); - groupVersion = (String) defs.get(VERSION); - //groupProperties = (LinkedHashMap)defs.get(PROPERTIES); - //groupMembers = (ArrayList)defs.get(MEMBERS); - Object mdo = defs.get(METADATA); - if (mdo instanceof LinkedHashMap) { - metaData = (LinkedHashMap) mdo; - } else { - metaData = null; - } - - if (metaData != null) { - validateMetadata(metaData); - } - } - } - - public GroupType getParentType() { - // Return a group statefulentity of this entity is derived from. - if (defs == null) { - return null; - } - String pgroupEntity = derivedFrom(defs); - if (pgroupEntity != null) { - return new GroupType(pgroupEntity, customDef); - } - return null; - } - - public String getDescription() { - return groupDescription; - } - - public String getVersion() { - return groupVersion; - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getInterfaces() { - Object ifo = getValue(INTERFACES, null, false); - if (ifo instanceof LinkedHashMap) { - return (LinkedHashMap) ifo; - } - return new LinkedHashMap(); - } - - private void validateFields() { - if (defs != null) { - for (String name : defs.keySet()) { - boolean bFound = false; - for (String sect : SECTIONS) { - if (name.equals(sect)) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE120", String.format( - "UnknownFieldError: Group Type \"%s\" contains unknown field \"%s\"", - groupType, name))); - } - } - } - } - - @SuppressWarnings("unchecked") - private void validateMetadata(LinkedHashMap metadata) { - String mtt = (String) metadata.get("type"); - if (mtt != null && !mtt.equals("map") && !mtt.equals("tosca:map")) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE121", String.format( - "InvalidTypeError: \"%s\" defined in group for metadata is invalid", - mtt))); - } - for (String entrySchema : metadata.keySet()) { - Object estob = metadata.get(entrySchema); - if (estob instanceof LinkedHashMap) { - String est = (String) ((LinkedHashMap) estob).get("type"); - if (!est.equals("string")) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE122", String.format( - "InvalidTypeError: \"%s\" defined in group for metadata \"%s\" is invalid", - est, entrySchema))); - } - } - } - } - - public String getType() { - return groupType; - } - - @SuppressWarnings("unchecked") - public ArrayList getCapabilitiesObjects() { - // Return a list of capability objects - ArrayList typecapabilities = new ArrayList<>(); - LinkedHashMap caps = (LinkedHashMap) getValue(CAPABILITIES, null, true); - if (caps != null) { - // 'cname' is symbolic name of the capability - // 'cvalue' is a dict { 'type': } - for (Map.Entry me : caps.entrySet()) { - String cname = me.getKey(); - LinkedHashMap cvalue = (LinkedHashMap) me.getValue(); - String ctype = cvalue.get("type"); - CapabilityTypeDef cap = new CapabilityTypeDef(cname, ctype, type, customDef); - typecapabilities.add(cap); - } - } - return typecapabilities; - } - - public LinkedHashMap getCapabilities() { - // Return a dictionary of capability name-objects pairs - LinkedHashMap caps = new LinkedHashMap<>(); - for (CapabilityTypeDef ctd : getCapabilitiesObjects()) { - caps.put(ctd.getName(), ctd); - } - return caps; - } - -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidTypeError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.elements.statefulentitytype import StatefulEntityType - - -class GroupType(StatefulEntityType): - '''TOSCA built-in group type.''' - - SECTIONS = (DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, - MEMBERS, INTERFACES) = \ - ("derived_from", "version", "metadata", "description", - "properties", "members", "interfaces") - - def __init__(self, grouptype, custom_def=None): - super(GroupType, self).__init__(grouptype, self.GROUP_PREFIX, - custom_def) - self.custom_def = custom_def - self.grouptype = grouptype - self._validate_fields() - self.group_description = None - if self.DESCRIPTION in self.defs: - self.group_description = self.defs[self.DESCRIPTION] - - self.group_version = None - if self.VERSION in self.defs: - self.group_version = self.defs[self.VERSION] - - self.group_properties = None - if self.PROPERTIES in self.defs: - self.group_properties = self.defs[self.PROPERTIES] - - self.group_members = None - if self.MEMBERS in self.defs: - self.group_members = self.defs[self.MEMBERS] - - if self.METADATA in self.defs: - self.meta_data = self.defs[self.METADATA] - self._validate_metadata(self.meta_data) - - @property - def parent_type(self): - '''Return a group statefulentity of this entity is derived from.''' - if not hasattr(self, 'defs'): - return None - pgroup_entity = self.derived_from(self.defs) - if pgroup_entity: - return GroupType(pgroup_entity, self.custom_def) - - @property - def description(self): - return self.group_description - - @property - def version(self): - return self.group_version - - @property - def interfaces(self): - return self.get_value(self.INTERFACES) - - def _validate_fields(self): - if self.defs: - for name in self.defs.keys(): - if name not in self.SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Group Type %s' - % self.grouptype, field=name)) - - def _validate_metadata(self, meta_data): - if not meta_data.get('type') in ['map', 'tosca:map']: - ValidationIssueCollector.appendException( - InvalidTypeError(what='"%s" defined in group for ' - 'metadata' % (meta_data.get('type')))) - for entry_schema, entry_schema_type in meta_data.items(): - if isinstance(entry_schema_type, dict) and not \ - entry_schema_type.get('type') == 'string': - ValidationIssueCollector.appendException( - InvalidTypeError(what='"%s" defined in group for ' - 'metadata "%s"' - % (entry_schema_type.get('type'), - entry_schema))) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java deleted file mode 100644 index 2862a11..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java +++ /dev/null @@ -1,283 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.onap.sdc.toscaparser.api.EntityTemplate; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -public class InterfacesDef extends StatefulEntityType { - - public static final String LIFECYCLE = "tosca.interfaces.node.lifecycle.Standard"; - public static final String CONFIGURE = "tosca.interfaces.relationship.Configure"; - public static final String LIFECYCLE_SHORTNAME = "Standard"; - public static final String CONFIGURE_SHORTNAME = "Configure"; - - public static final String[] SECTIONS = { - LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME, CONFIGURE_SHORTNAME - }; - - public static final String IMPLEMENTATION = "implementation"; - public static final String DESCRIPTION = "description"; - public static final String INPUTS = "inputs"; - - public static final String[] INTERFACE_DEF_RESERVED_WORDS = { - "type", "inputs", "derived_from", "version", "description"}; - - private EntityType ntype; - private EntityTemplate nodeTemplate; - - private String operationName; - private Object operationDef; - private Object implementation; - private LinkedHashMap inputs; - private String description; - - @SuppressWarnings("unchecked") - public InterfacesDef(EntityType inodeType, - String interfaceType, - EntityTemplate inodeTemplate, - String iname, - Object ivalue) { - // void - super(); - - ntype = inodeType; - nodeTemplate = inodeTemplate; - type = interfaceType; - operationName = iname; - operationDef = ivalue; - implementation = null; - inputs = null; - defs = new LinkedHashMap<>(); - - if (interfaceType.equals(LIFECYCLE_SHORTNAME)) { - interfaceType = LIFECYCLE; - } - if (interfaceType.equals(CONFIGURE_SHORTNAME)) { - interfaceType = CONFIGURE; - } - - // only NodeType has getInterfaces "hasattr(ntype,interfaces)" - // while RelationshipType does not - if (ntype instanceof NodeType) { - if (((NodeType) ntype).getInterfaces() != null - && ((NodeType) ntype).getInterfaces().values().contains(interfaceType)) { - LinkedHashMap nii = (LinkedHashMap) - ((NodeType) ntype).getInterfaces().get(interfaceType); - interfaceType = (String) nii.get("type"); - } - } - if (inodeType != null) { - if (nodeTemplate != null && nodeTemplate.getCustomDef() != null - && nodeTemplate.getCustomDef().containsKey(interfaceType)) { - defs = (LinkedHashMap) - nodeTemplate.getCustomDef().get(interfaceType); - } else { - defs = (LinkedHashMap) TOSCA_DEF.get(interfaceType); - } - } - - if (ivalue != null) { - if (ivalue instanceof LinkedHashMap) { - for (Map.Entry me : ((LinkedHashMap) ivalue).entrySet()) { - if (me.getKey().equals(IMPLEMENTATION)) { - implementation = me.getValue(); - } else if (me.getKey().equals(INPUTS)) { - inputs = (LinkedHashMap) me.getValue(); - } else if (me.getKey().equals(DESCRIPTION)) { - description = (String) me.getValue(); - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE123", String.format( - "UnknownFieldError: \"interfaces\" of template \"%s\" contain unknown field \"%s\"", - nodeTemplate.getName(), me.getKey()))); - } - } - } - } - } - - public ArrayList getLifecycleOps() { - if (defs != null) { - if (type.equals(LIFECYCLE)) { - return ops(); - } - } - return null; - } - - public ArrayList getInterfaceOps() { - if (defs != null) { - ArrayList ops = ops(); - ArrayList idrw = new ArrayList<>(); - for (int i = 0; i < InterfacesDef.INTERFACE_DEF_RESERVED_WORDS.length; i++) { - idrw.add(InterfacesDef.INTERFACE_DEF_RESERVED_WORDS[i]); - } - ops.removeAll(idrw); - return ops; - } - return null; - } - - public ArrayList getConfigureOps() { - if (defs != null) { - if (type.equals(CONFIGURE)) { - return ops(); - } - } - return null; - } - - private ArrayList ops() { - return new ArrayList(defs.keySet()); - } - - // getters/setters - - public LinkedHashMap getInputs() { - return inputs; - } - - public void setInput(String name, Object value) { - inputs.put(name, value); - } - - public Object getImplementation() { - return implementation; - } - - public void setImplementation(Object implementation) { - this.implementation = implementation; - } - - public String getDescription() { - return description; - } - - public void setDescription(String description) { - this.description = description; - } - - public String getOperationName() { - return operationName; - } - - public void setOperationName(String operationName) { - this.operationName = operationName; - } -} - - - -/*python - -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import UnknownFieldError -from toscaparser.elements.statefulentitytype import StatefulEntityType - -SECTIONS = (LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME, - CONFIGURE_SHORTNAME) = \ - ('tosca.interfaces.node.lifecycle.Standard', - 'tosca.interfaces.relationship.Configure', - 'Standard', 'Configure') - -INTERFACEVALUE = (IMPLEMENTATION, INPUTS) = ('implementation', 'inputs') - -INTERFACE_DEF_RESERVED_WORDS = ['type', 'inputs', 'derived_from', 'version', - 'description'] - - -class InterfacesDef(StatefulEntityType): - '''TOSCA built-in interfaces type.''' - - def __init__(self, node_type, interfacetype, - node_template=None, name=None, value=None): - self.ntype = node_type - self.node_template = node_template - self.type = interfacetype - self.name = name - self.value = value - self.implementation = None - self.inputs = None - self.defs = {} - if interfacetype == LIFECYCLE_SHORTNAME: - interfacetype = LIFECYCLE - if interfacetype == CONFIGURE_SHORTNAME: - interfacetype = CONFIGURE - if hasattr(self.ntype, 'interfaces') \ - and self.ntype.interfaces \ - and interfacetype in self.ntype.interfaces: - interfacetype = self.ntype.interfaces[interfacetype]['type'] - if node_type: - if self.node_template and self.node_template.custom_def \ - and interfacetype in self.node_template.custom_def: - self.defs = self.node_template.custom_def[interfacetype] - else: - self.defs = self.TOSCA_DEF[interfacetype] - if value: - if isinstance(self.value, dict): - for i, j in self.value.items(): - if i == IMPLEMENTATION: - self.implementation = j - elif i == INPUTS: - self.inputs = j - else: - what = ('"interfaces" of template "%s"' % - self.node_template.name) - ValidationIssueCollector.appendException( - UnknownFieldError(what=what, field=i)) - else: - self.implementation = value - - @property - def lifecycle_ops(self): - if self.defs: - if self.type == LIFECYCLE: - return self._ops() - - @property - def configure_ops(self): - if self.defs: - if self.type == CONFIGURE: - return self._ops() - - def _ops(self): - ops = [] - for name in list(self.defs.keys()): - ops.append(name) - return ops -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java deleted file mode 100644 index f3de49e..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java +++ /dev/null @@ -1,62 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import java.util.AbstractMap; -import java.util.HashMap; -import java.util.Map; -import java.util.stream.Collectors; - -public class Metadata { - - private final Map metadataMap; - - public Metadata(Map metadataMap) { - this.metadataMap = metadataMap != null ? metadataMap : new HashMap<>(); - } - - public String getValue(String key) { - - Object obj = this.metadataMap.get(key); - if (obj != null) { - return String.valueOf(obj); - } - return null; - } - - /** - * Get all properties of a Metadata object.
- * This object represents the "metadata" section of some entity. - * - * @return all properties of this Metadata, as a key-value. - */ - public Map getAllProperties() { - return metadataMap.entrySet().stream().map(e -> new AbstractMap.SimpleEntry(e.getKey(), String.valueOf(e.getValue()))).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - } - - @Override - public String toString() { - return "Metadata{" - + "metadataMap=" + metadataMap - + '}'; - } - -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java deleted file mode 100644 index c251be9..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java +++ /dev/null @@ -1,549 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -public class NodeType extends StatefulEntityType { - // TOSCA built-in node type - - private static final String DERIVED_FROM = "derived_from"; - private static final String METADATA = "metadata"; - private static final String PROPERTIES = "properties"; - private static final String VERSION = "version"; - private static final String DESCRIPTION = "description"; - private static final String ATTRIBUTES = "attributes"; - private static final String REQUIREMENTS = "requirements"; - private static final String CAPABILITIES = "capabilities"; - private static final String INTERFACES = "interfaces"; - private static final String ARTIFACTS = "artifacts"; - - private static final String SECTIONS[] = { - DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS - }; - - private String ntype; - public LinkedHashMap customDef; - - public NodeType(String nttype, LinkedHashMap ntcustomDef) { - super(nttype, NODE_PREFIX, ntcustomDef); - ntype = nttype; - customDef = ntcustomDef; - _validateKeys(); - } - - public Object getParentType() { - // Return a node this node is derived from - if (defs == null) { - return null; - } - String pnode = derivedFrom(defs); - if (pnode != null && !pnode.isEmpty()) { - return new NodeType(pnode, customDef); - } - return null; - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getRelationship() { - // Return a dictionary of relationships to other node types - - // This method returns a dictionary of named relationships that nodes - // of the current node type (self) can have to other nodes (of specific - // types) in a TOSCA template. - - LinkedHashMap relationship = new LinkedHashMap<>(); - ArrayList> requires; - Object treq = getAllRequirements(); - if (treq != null) { - // NOTE(sdmonov): Check if requires is a dict. - // If it is a dict convert it to a list of dicts. - // This is needed because currently the code below supports only - // lists as requirements definition. The following check will - // make sure if a map (dict) was provided it will be converted to - // a list before proceeding to the parsing. - if (treq instanceof LinkedHashMap) { - requires = new ArrayList<>(); - for (Map.Entry me : ((LinkedHashMap) treq).entrySet()) { - LinkedHashMap tl = new LinkedHashMap<>(); - tl.put(me.getKey(), me.getValue()); - requires.add(tl); - } - } else { - requires = (ArrayList>) treq; - } - - String keyword = null; - String nodeType = null; - for (LinkedHashMap require : requires) { - String relation = null; - for (Map.Entry re : require.entrySet()) { - String key = re.getKey(); - LinkedHashMap req = (LinkedHashMap) re.getValue(); - if (req.get("relationship") != null) { - Object trelation = req.get("relationship"); - // trelation is a string or a dict with "type" mapped to the string we want - if (trelation instanceof String) { - relation = (String) trelation; - } else { - if (((LinkedHashMap) trelation).get("type") != null) { - relation = (String) ((LinkedHashMap) trelation).get("type"); - } - } - nodeType = (String) req.get("node"); - //BUG meaningless?? LinkedHashMap value = req; - if (nodeType != null) { - keyword = "node"; - } else { - String getRelation = null; - // If nodeTypeByCap is a dict and has a type key - // we need to lookup the node type using - // the capability type - String captype = (String) req.get("capability"); - nodeType = _getNodeTypeByCap(captype); - if (nodeType != null) { - getRelation = _getRelation(key, nodeType); - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( - "NodeTypeRequirementForCapabilityUnfulfilled: Node type: \"%s\" with requrement \"%s\" for node type with capability type \"%s\" is not found\\unfulfilled", this.ntype, key, captype))); - } - if (getRelation != null) { - relation = getRelation; - } - keyword = key; - } - } - } - if (relation == null || nodeType == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format( - "NodeTypeForRelationUnfulfilled: Node type \"%s\" - relationship type \"%s\" is unfulfilled", this.ntype, relation))); - } else { - RelationshipType rtype = new RelationshipType(relation, keyword, customDef); - NodeType relatednode = new NodeType(nodeType, customDef); - relationship.put(rtype, relatednode); - } - } - } - return relationship; - - } - - @SuppressWarnings("unchecked") - private String _getNodeTypeByCap(String cap) { - // Find the node type that has the provided capability - - // This method will lookup all node types if they have the - // provided capability. - // Filter the node types - ArrayList nodeTypes = new ArrayList<>(); - for (String nt : customDef.keySet()) { - if (nt.startsWith(NODE_PREFIX) || nt.startsWith("org.openecomp") && !nt.equals("tosca.nodes.Root")) { - nodeTypes.add(nt); - } - } - for (String nt : nodeTypes) { - LinkedHashMap nodeDef = (LinkedHashMap) customDef.get(nt); - if (nodeDef instanceof LinkedHashMap && nodeDef.get("capabilities") != null) { - LinkedHashMap nodeCaps = (LinkedHashMap) nodeDef.get("capabilities"); - if (nodeCaps != null) { - for (Object val : nodeCaps.values()) { - if (val instanceof LinkedHashMap) { - String tp = (String) ((LinkedHashMap) val).get("type"); - if (tp != null && tp.equals(cap)) { - return nt; - } - } - } - } - } - } - return null; - } - - @SuppressWarnings("unchecked") - private String _getRelation(String key, String ndtype) { - String relation = null; - NodeType ntype = new NodeType(ndtype, customDef); - LinkedHashMap caps = ntype.getCapabilities(); - if (caps != null && caps.get(key) != null) { - CapabilityTypeDef c = caps.get(key); - for (int i = 0; i < RELATIONSHIP_TYPE.length; i++) { - String r = RELATIONSHIP_TYPE[i]; - if (r != null) { - relation = r; - break; - } - LinkedHashMap rtypedef = (LinkedHashMap) customDef.get(r); - for (Object o : rtypedef.values()) { - LinkedHashMap properties = (LinkedHashMap) o; - if (properties.get(c.getType()) != null) { - relation = r; - break; - } - } - if (relation != null) { - break; - } else { - for (Object o : rtypedef.values()) { - LinkedHashMap properties = (LinkedHashMap) o; - if (properties.get(c.getParentType()) != null) { - relation = r; - break; - } - } - } - } - } - return relation; - } - - @SuppressWarnings("unchecked") - public ArrayList getCapabilitiesObjects() { - // Return a list of capability objects - ArrayList typecapabilities = new ArrayList<>(); - LinkedHashMap caps = (LinkedHashMap) getValue(CAPABILITIES, null, true); - if (caps != null) { - // 'cname' is symbolic name of the capability - // 'cvalue' is a dict { 'type': } - for (Map.Entry me : caps.entrySet()) { - String cname = me.getKey(); - LinkedHashMap cvalue = (LinkedHashMap) me.getValue(); - String ctype = cvalue.get("type"); - CapabilityTypeDef cap = new CapabilityTypeDef(cname, ctype, type, customDef); - typecapabilities.add(cap); - } - } - return typecapabilities; - } - - public LinkedHashMap getCapabilities() { - // Return a dictionary of capability name-objects pairs - LinkedHashMap caps = new LinkedHashMap<>(); - for (CapabilityTypeDef ctd : getCapabilitiesObjects()) { - caps.put(ctd.getName(), ctd); - } - return caps; - } - - @SuppressWarnings("unchecked") - public ArrayList getRequirements() { - return (ArrayList) getValue(REQUIREMENTS, null, true); - } - - public ArrayList getAllRequirements() { - return getRequirements(); - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getInterfaces() { - return (LinkedHashMap) getValue(INTERFACES, null, false); - } - - - @SuppressWarnings("unchecked") - public ArrayList getLifecycleInputs() { - // Return inputs to life cycle operations if found - ArrayList inputs = new ArrayList<>(); - LinkedHashMap interfaces = getInterfaces(); - if (interfaces != null) { - for (Map.Entry me : interfaces.entrySet()) { - String iname = me.getKey(); - LinkedHashMap ivalue = (LinkedHashMap) me.getValue(); - if (iname.equals(InterfacesDef.LIFECYCLE)) { - for (Map.Entry ie : ivalue.entrySet()) { - if (ie.getKey().equals("input")) { - LinkedHashMap y = (LinkedHashMap) ie.getValue(); - for (String i : y.keySet()) { - inputs.add(i); - } - } - } - } - } - } - return inputs; - } - - public ArrayList getLifecycleOperations() { - // Return available life cycle operations if found - ArrayList ops = null; - LinkedHashMap interfaces = getInterfaces(); - if (interfaces != null) { - InterfacesDef i = new InterfacesDef(this, InterfacesDef.LIFECYCLE, null, null, null); - ops = i.getLifecycleOps(); - } - return ops; - } - - public CapabilityTypeDef getCapability(String name) { - //BUG?? the python code has to be wrong - // it refers to a bad attribute 'value'... - LinkedHashMap caps = getCapabilities(); - if (caps != null) { - return caps.get(name); - } - return null; - /* - def get_capability(self, name): - caps = self.get_capabilities() - if caps and name in caps.keys(): - return caps[name].value - */ - } - - public String getCapabilityType(String name) { - //BUG?? the python code has to be wrong - // it refers to a bad attribute 'value'... - CapabilityTypeDef captype = getCapability(name); - if (captype != null) { - return captype.getType(); - } - return null; - /* - def get_capability_type(self, name): - captype = self.get_capability(name) - if captype and name in captype.keys(): - return captype[name].value - */ - } - - private void _validateKeys() { - if (defs != null) { - for (String key : defs.keySet()) { - boolean bFound = false; - for (int i = 0; i < SECTIONS.length; i++) { - if (key.equals(SECTIONS[i])) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE124", String.format( - "UnknownFieldError: Nodetype \"%s\" has unknown field \"%s\"", ntype, key))); - } - } - } - } - -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import UnknownFieldError -from toscaparser.elements.capabilitytype import CapabilityTypeDef -import org.openecomp.sdc.toscaparser.api.elements.interfaces as ifaces -from toscaparser.elements.interfaces import InterfacesDef -from toscaparser.elements.relationshiptype import RelationshipType -from toscaparser.elements.statefulentitytype import StatefulEntityType - - -class NodeType(StatefulEntityType): - '''TOSCA built-in node type.''' - SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS) = \ - ('derived_from', 'metadata', 'properties', 'version', - 'description', 'attributes', 'requirements', 'capabilities', - 'interfaces', 'artifacts') - - def __init__(self, ntype, custom_def=None): - super(NodeType, self).__init__(ntype, self.NODE_PREFIX, custom_def) - self.ntype = ntype - self.custom_def = custom_def - self._validate_keys() - - @property - def parent_type(self): - '''Return a node this node is derived from.''' - if not hasattr(self, 'defs'): - return None - pnode = self.derived_from(self.defs) - if pnode: - return NodeType(pnode, self.custom_def) - - @property - def relationship(self): - '''Return a dictionary of relationships to other node types. - - This method returns a dictionary of named relationships that nodes - of the current node type (self) can have to other nodes (of specific - types) in a TOSCA template. - - ''' - relationship = {} - requires = self.get_all_requirements() - if requires: - # NOTE(sdmonov): Check if requires is a dict. - # If it is a dict convert it to a list of dicts. - # This is needed because currently the code below supports only - # lists as requirements definition. The following check will - # make sure if a map (dict) was provided it will be converted to - # a list before proceeding to the parsing. - if isinstance(requires, dict): - requires = [{key: value} for key, value in requires.items()] - - keyword = None - node_type = None - for require in requires: - for key, req in require.items(): - if 'relationship' in req: - relation = req.get('relationship') - if 'type' in relation: - relation = relation.get('type') - node_type = req.get('node') - value = req - if node_type: - keyword = 'node' - else: - # If value is a dict and has a type key - # we need to lookup the node type using - # the capability type - value = req - if isinstance(value, dict): - captype = value['capability'] - value = (self. - _get_node_type_by_cap(key, captype)) - relation = self._get_relation(key, value) - keyword = key - node_type = value - rtype = RelationshipType(relation, keyword, self.custom_def) - relatednode = NodeType(node_type, self.custom_def) - relationship[rtype] = relatednode - return relationship - - def _get_node_type_by_cap(self, key, cap): - '''Find the node type that has the provided capability - - This method will lookup all node types if they have the - provided capability. - ''' - - # Filter the node types - node_types = [node_type for node_type in self.TOSCA_DEF.keys() - if node_type.startswith(self.NODE_PREFIX) and - node_type != 'tosca.nodes.Root'] - - for node_type in node_types: - node_def = self.TOSCA_DEF[node_type] - if isinstance(node_def, dict) and 'capabilities' in node_def: - node_caps = node_def['capabilities'] - for value in node_caps.values(): - if isinstance(value, dict) and \ - 'type' in value and value['type'] == cap: - return node_type - - def _get_relation(self, key, ndtype): - relation = None - ntype = NodeType(ndtype) - caps = ntype.get_capabilities() - if caps and key in caps.keys(): - c = caps[key] - for r in self.RELATIONSHIP_TYPE: - rtypedef = ntype.TOSCA_DEF[r] - for properties in rtypedef.values(): - if c.type in properties: - relation = r - break - if relation: - break - else: - for properties in rtypedef.values(): - if c.parent_type in properties: - relation = r - break - return relation - - def get_capabilities_objects(self): - '''Return a list of capability objects.''' - typecapabilities = [] - caps = self.get_value(self.CAPABILITIES, None, True) - if caps: - # 'name' is symbolic name of the capability - # 'value' is a dict { 'type': } - for name, value in caps.items(): - ctype = value.get('type') - cap = CapabilityTypeDef(name, ctype, self.type, - self.custom_def) - typecapabilities.append(cap) - return typecapabilities - - def get_capabilities(self): - '''Return a dictionary of capability name-objects pairs.''' - return {cap.name: cap - for cap in self.get_capabilities_objects()} - - @property - def requirements(self): - return self.get_value(self.REQUIREMENTS, None, True) - - def get_all_requirements(self): - return self.requirements - - @property - def interfaces(self): - return self.get_value(self.INTERFACES) - - @property - def lifecycle_inputs(self): - '''Return inputs to life cycle operations if found.''' - inputs = [] - interfaces = self.interfaces - if interfaces: - for name, value in interfaces.items(): - if name == ifaces.LIFECYCLE: - for x, y in value.items(): - if x == 'inputs': - for i in y.iterkeys(): - inputs.append(i) - return inputs - - @property - def lifecycle_operations(self): - '''Return available life cycle operations if found.''' - ops = None - interfaces = self.interfaces - if interfaces: - i = InterfacesDef(self.type, ifaces.LIFECYCLE) - ops = i.lifecycle_ops - return ops - - def get_capability(self, name): - caps = self.get_capabilities() - if caps and name in caps.keys(): - return caps[name].value - - def get_capability_type(self, name): - captype = self.get_capability(name) - if captype and name in captype.keys(): - return captype[name].value - - def _validate_keys(self): - if self.defs: - for key in self.defs.keys(): - if key not in self.SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Nodetype"%s"' % self.ntype, - field=key)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java deleted file mode 100644 index b227a31..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java +++ /dev/null @@ -1,309 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.TOSCAVersionProperty; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -public class PolicyType extends StatefulEntityType { - - private static final String DERIVED_FROM = "derived_from"; - private static final String METADATA = "metadata"; - private static final String PROPERTIES = "properties"; - private static final String VERSION = "version"; - private static final String DESCRIPTION = "description"; - private static final String TARGETS = "targets"; - private static final String TRIGGERS = "triggers"; - private static final String TYPE = "type"; - - private static final String[] SECTIONS = { - DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS, TRIGGERS, TYPE - }; - - private LinkedHashMap customDef; - private String policyDescription; - private Object policyVersion; - private LinkedHashMap properties; - private LinkedHashMap parentPolicies; - private LinkedHashMap metaData; - private ArrayList targetsList; - - - public PolicyType(String type, LinkedHashMap customDef) { - super(type, POLICY_PREFIX, customDef); - - this.type = type; - this.customDef = customDef; - validateKeys(); - - metaData = null; - if (defs != null && defs.get(METADATA) != null) { - metaData = (LinkedHashMap) defs.get(METADATA); - validateMetadata(metaData); - } - - properties = null; - if (defs != null && defs.get(PROPERTIES) != null) { - properties = (LinkedHashMap) defs.get(PROPERTIES); - } - parentPolicies = getParentPolicies(); - - policyVersion = null; - if (defs != null && defs.get(VERSION) != null) { - policyVersion = (new TOSCAVersionProperty( - defs.get(VERSION).toString())).getVersion(); - } - - policyDescription = null; - if (defs != null && defs.get(DESCRIPTION) != null) { - policyDescription = (String) defs.get(DESCRIPTION); - } - - targetsList = null; - if (defs != null && defs.get(TARGETS) != null) { - targetsList = (ArrayList) defs.get(TARGETS); - validateTargets(targetsList, this.customDef); - } - - } - - private LinkedHashMap getParentPolicies() { - LinkedHashMap policies = new LinkedHashMap<>(); - String parentPolicy; - if (getParentType() != null) { - parentPolicy = getParentType().getType(); - } else { - parentPolicy = null; - } - if (parentPolicy != null) { - while (parentPolicy != null && !parentPolicy.equals("tosca.policies.Root")) { - policies.put(parentPolicy, TOSCA_DEF.get(parentPolicy)); - parentPolicy = (String) - ((LinkedHashMap) policies.get(parentPolicy)).get("derived_from);"); - } - } - return policies; - } - - public String getType() { - return type; - } - - public PolicyType getParentType() { - // Return a policy statefulentity of this node is derived from - if (defs == null) { - return null; - } - String policyEntity = derivedFrom(defs); - if (policyEntity != null) { - return new PolicyType(policyEntity, customDef); - } - return null; - } - - public Object getPolicy(String name) { - // Return the definition of a policy field by name - if (defs != null && defs.get(name) != null) { - return defs.get(name); - } - return null; - } - - public ArrayList getTargets() { - // Return targets - return targetsList; - } - - public String getDescription() { - return policyDescription; - } - - public Object getVersion() { - return policyVersion; - } - - private void validateKeys() { - for (String key : defs.keySet()) { - boolean bFound = false; - for (String sect : SECTIONS) { - if (key.equals(sect)) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE125", String.format( - "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"", - type, key))); - } - } - } - - private void validateTargets(ArrayList targetsList, - LinkedHashMap customDef) { - for (String nodetype : targetsList) { - if (customDef.get(nodetype) == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE126", String.format( - "InvalidTypeError: \"%s\" defined in targets for policy \"%s\"", - nodetype, type))); - - } - } - } - - private void validateMetadata(LinkedHashMap metaData) { - String mtype = (String) metaData.get("type"); - if (mtype != null && !mtype.equals("map") && !mtype.equals("tosca:map")) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE127", String.format( - "InvalidTypeError: \"%s\" defined in policy for metadata", - mtype))); - } - for (String entrySchema : this.metaData.keySet()) { - Object estob = this.metaData.get(entrySchema); - if (estob instanceof LinkedHashMap) { - String est = (String) - ((LinkedHashMap) estob).get("type"); - if (!est.equals("string")) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE128", String.format( - "InvalidTypeError: \"%s\" defined in policy for metadata \"%s\"", - est, entrySchema))); - } - } - } - } - -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidTypeError -from toscaparser.common.exception import UnknownFieldError -from toscaparser.elements.statefulentitytype import StatefulEntityType -from toscaparser.utils.validateutils import TOSCAVersionProperty - - -class PolicyType(StatefulEntityType): - - '''TOSCA built-in policies type.''' - SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS) = \ - ('derived_from', 'metadata', 'properties', 'version', - 'description', 'targets') - - def __init__(self, ptype, custom_def=None): - super(PolicyType, self).__init__(ptype, self.POLICY_PREFIX, - custom_def) - self.type = ptype - self.custom_def = custom_def - self._validate_keys() - - self.meta_data = None - if self.METADATA in self.defs: - self.meta_data = self.defs[self.METADATA] - self._validate_metadata(self.meta_data) - - self.properties = None - if self.PROPERTIES in self.defs: - self.properties = self.defs[self.PROPERTIES] - self.parent_policies = self._get_parent_policies() - - self.policy_version = None - if self.VERSION in self.defs: - self.policy_version = TOSCAVersionProperty( - self.defs[self.VERSION]).get_version() - - self.policy_description = self.defs[self.DESCRIPTION] \ - if self.DESCRIPTION in self.defs else None - - self.targets_list = None - if self.TARGETS in self.defs: - self.targets_list = self.defs[self.TARGETS] - self._validate_targets(self.targets_list, custom_def) - - def _get_parent_policies(self): - policies = {} - parent_policy = self.parent_type.type if self.parent_type else None - if parent_policy: - while parent_policy != 'tosca.policies.Root': - policies[parent_policy] = self.TOSCA_DEF[parent_policy] - parent_policy = policies[parent_policy]['derived_from'] - return policies - - @property - def parent_type(self): - '''Return a policy statefulentity of this node is derived from.''' - if not hasattr(self, 'defs'): - return None - ppolicy_entity = self.derived_from(self.defs) - if ppolicy_entity: - return PolicyType(ppolicy_entity, self.custom_def) - - def get_policy(self, name): - '''Return the definition of a policy field by name.''' - if name in self.defs: - return self.defs[name] - - @property - def targets(self): - '''Return targets.''' - return self.targets_list - - @property - def description(self): - return self.policy_description - - @property - def version(self): - return self.policy_version - - def _validate_keys(self): - for key in self.defs.keys(): - if key not in self.SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Policy "%s"' % self.type, - field=key)) - - def _validate_targets(self, targets_list, custom_def): - for nodetype in targets_list: - if nodetype not in custom_def: - ValidationIssueCollector.appendException( - InvalidTypeError(what='"%s" defined in targets for ' - 'policy "%s"' % (nodetype, self.type))) - - def _validate_metadata(self, meta_data): - if not meta_data.get('type') in ['map', 'tosca:map']: - ValidationIssueCollector.appendException( - InvalidTypeError(what='"%s" defined in policy for ' - 'metadata' % (meta_data.get('type')))) - - for entry_schema, entry_schema_type in meta_data.items(): - if isinstance(entry_schema_type, dict) and not \ - entry_schema_type.get('type') == 'string': - ValidationIssueCollector.appendException( - InvalidTypeError(what='"%s" defined in policy for ' - 'metadata "%s"' - % (entry_schema_type.get('type'), - entry_schema))) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java deleted file mode 100644 index 01fb9fc..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java +++ /dev/null @@ -1,177 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import org.onap.sdc.toscaparser.api.DataEntity; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.onap.sdc.toscaparser.api.utils.ValidateUtils; - -import java.util.LinkedHashMap; - -public class PortSpec { - // Parent class for tosca.datatypes.network.PortSpec type - - private static final String SHORTNAME = "PortSpec"; - private static final String TYPE_URI = "tosca.datatypes.network." + SHORTNAME; - - private static final String PROTOCOL = "protocol"; - private static final String SOURCE = "source"; - private static final String SOURCE_RANGE = "source_range"; - private static final String TARGET = "target"; - private static final String TARGET_RANGE = "target_range"; - - private static final String PROPERTY_NAMES[] = { - PROTOCOL, SOURCE, SOURCE_RANGE, - TARGET, TARGET_RANGE - }; - - // todo(TBD) May want to make this a subclass of DataType - // and change init method to set PortSpec's properties - public PortSpec() { - - } - - // The following additional requirements MUST be tested: - // 1) A valid PortSpec MUST have at least one of the following properties: - // target, target_range, source or source_range. - // 2) A valid PortSpec MUST have a value for the source property that - // is within the numeric range specified by the property source_range - // when source_range is specified. - // 3) A valid PortSpec MUST have a value for the target property that is - // within the numeric range specified by the property target_range - // when target_range is specified. - public static void validateAdditionalReq(Object _properties, - String propName, - LinkedHashMap custom_def) { - - try { - LinkedHashMap properties = (LinkedHashMap) _properties; - Object source = properties.get(PortSpec.SOURCE); - Object sourceRange = properties.get(PortSpec.SOURCE_RANGE); - Object target = properties.get(PortSpec.TARGET); - Object targetRange = properties.get(PortSpec.TARGET_RANGE); - - // verify one of the specified values is set - if (source == null && sourceRange == null && - target == null && targetRange == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE129", String.format( - "InvalidTypeAdditionalRequirementsError: Additional requirements for type \"%s\" not met", - TYPE_URI))); - } - // Validate source value is in specified range - if (source != null && sourceRange != null) { - ValidateUtils.validateValueInRange(source, sourceRange, SOURCE); - } else { - DataEntity portdef = new DataEntity("PortDef", source, null, SOURCE); - portdef.validate(); - } - // Validate target value is in specified range - if (target != null && targetRange != null) { - ValidateUtils.validateValueInRange(target, targetRange, SOURCE); - } else { - DataEntity portdef = new DataEntity("PortDef", source, null, TARGET); - portdef.validate(); - } - } catch (Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE130", String.format( - "ValueError: \"%s\" do not meet requirements for type \"%s\"", - _properties.toString(), SHORTNAME))); - } - } - -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidTypeAdditionalRequirementsError -from toscaparser.utils.gettextutils import _ -import org.openecomp.sdc.toscaparser.api.utils.validateutils as validateutils - -log = logging.getLogger('tosca') - - -class PortSpec(object): - '''Parent class for tosca.datatypes.network.PortSpec type.''' - - SHORTNAME = 'PortSpec' - TYPE_URI = 'tosca.datatypes.network.' + SHORTNAME - - PROPERTY_NAMES = ( - PROTOCOL, SOURCE, SOURCE_RANGE, - TARGET, TARGET_RANGE - ) = ( - 'protocol', 'source', 'source_range', - 'target', 'target_range' - ) - - # TODO(TBD) May want to make this a subclass of DataType - # and change init method to set PortSpec's properties - def __init__(self): - pass - - # The following additional requirements MUST be tested: - # 1) A valid PortSpec MUST have at least one of the following properties: - # target, target_range, source or source_range. - # 2) A valid PortSpec MUST have a value for the source property that - # is within the numeric range specified by the property source_range - # when source_range is specified. - # 3) A valid PortSpec MUST have a value for the target property that is - # within the numeric range specified by the property target_range - # when target_range is specified. - @staticmethod - def validate_additional_req(properties, prop_name, custom_def=None, ): - try: - source = properties.get(PortSpec.SOURCE) - source_range = properties.get(PortSpec.SOURCE_RANGE) - target = properties.get(PortSpec.TARGET) - target_range = properties.get(PortSpec.TARGET_RANGE) - - # verify one of the specified values is set - if source is None and source_range is None and \ - target is None and target_range is None: - ValidationIssueCollector.appendException( - InvalidTypeAdditionalRequirementsError( - type=PortSpec.TYPE_URI)) - # Validate source value is in specified range - if source and source_range: - validateutils.validate_value_in_range(source, source_range, - PortSpec.SOURCE) - else: - from toscaparser.dataentity import DataEntity - portdef = DataEntity('PortDef', source, None, PortSpec.SOURCE) - portdef.validate() - # Validate target value is in specified range - if target and target_range: - validateutils.validate_value_in_range(target, target_range, - PortSpec.TARGET) - else: - from toscaparser.dataentity import DataEntity - portdef = DataEntity('PortDef', source, None, PortSpec.TARGET) - portdef.validate() - except Exception: - msg = _('"%(value)s" do not meet requirements ' - 'for type "%(type)s".') \ - % {'value': properties, 'type': PortSpec.SHORTNAME} - ValidationIssueCollector.appendException( - ValueError(msg)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java deleted file mode 100644 index 484d17e..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java +++ /dev/null @@ -1,249 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import java.util.LinkedHashMap; -import java.util.Map; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class PropertyDef { - - private static final String PROPERTY_KEYNAME_DEFAULT = "default"; - private static final String PROPERTY_KEYNAME_REQUIRED = "required"; - private static final String PROPERTY_KEYNAME_STATUS = "status"; - private static final String VALID_PROPERTY_KEYNAMES[] = { - PROPERTY_KEYNAME_DEFAULT, - PROPERTY_KEYNAME_REQUIRED, - PROPERTY_KEYNAME_STATUS}; - - private static final boolean PROPERTY_REQUIRED_DEFAULT = true; - - private static final String VALID_REQUIRED_VALUES[] = {"true", "false"}; - - private static final String PROPERTY_STATUS_SUPPORTED = "supported"; - private static final String PROPERTY_STATUS_EXPERIMENTAL = "experimental"; - private static final String VALID_STATUS_VALUES[] = { - PROPERTY_STATUS_SUPPORTED, PROPERTY_STATUS_EXPERIMENTAL}; - - private static final String PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED; - - private String name; - private Object value; - private LinkedHashMap schema; - private String _status; - private boolean _required; - - public PropertyDef(String pdName, Object pdValue, - LinkedHashMap pdSchema) { - name = pdName; - value = pdValue; - schema = pdSchema; - _status = PROPERTY_STATUS_DEFAULT; - _required = PROPERTY_REQUIRED_DEFAULT; - - if (schema != null) { - // Validate required 'type' property exists - if (schema.get("type") == null) { - //msg = (_('Schema definition of "%(pname)s" must have a "type" ' - // 'attribute.') % dict(pname=self.name)) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE131", String.format( - "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute", name))); - } - _loadRequiredAttrFromSchema(); - _loadStatusAttrFromSchema(); - } - } - - public Object getDefault() { - if (schema != null) { - for (Map.Entry me : schema.entrySet()) { - if (me.getKey().equals(PROPERTY_KEYNAME_DEFAULT)) { - return me.getValue(); - } - } - } - return null; - } - - public boolean isRequired() { - return _required; - } - - private void _loadRequiredAttrFromSchema() { - // IF 'required' keyname exists verify it's a boolean, - // if so override default - Object val = schema.get(PROPERTY_KEYNAME_REQUIRED); - if (val != null) { - if (val instanceof Boolean) { - _required = (boolean) val; - } else { - //valid_values = ', '.join(self.VALID_REQUIRED_VALUES) - //attr = self.PROPERTY_KEYNAME_REQUIRED - //TOSCAException.generate_inv_schema_property_error(self, - // attr, - // value, - // valid_values) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE132", String.format( - "Schema definition of \"%s\" has \"required\" attribute with an invalid value", - name))); - } - } - } - - public String getStatus() { - return _status; - } - - private void _loadStatusAttrFromSchema() { - // IF 'status' keyname exists verify it's a boolean, - // if so override default - String sts = (String) schema.get(PROPERTY_KEYNAME_STATUS); - if (sts != null) { - boolean bFound = false; - for (String vsv : VALID_STATUS_VALUES) { - if (vsv.equals(sts)) { - bFound = true; - break; - } - } - if (bFound) { - _status = sts; - } else { - //valid_values = ', '.join(self.VALID_STATUS_VALUES) - //attr = self.PROPERTY_KEYNAME_STATUS - //TOSCAException.generate_inv_schema_property_error(self, - // attr, - // value, - // valid_values) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE006", String.format( - "Schema definition of \"%s\" has \"status\" attribute with an invalid value", - name))); - } - } - } - - public String getName() { - return name; - } - - public LinkedHashMap getSchema() { - return schema; - } - - public Object getPDValue() { - // there's getValue in EntityType... - return value; - } - -} -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidSchemaError -from toscaparser.common.exception import TOSCAException -from toscaparser.utils.gettextutils import _ - - -class PropertyDef(object): - '''TOSCA built-in Property type.''' - - VALID_PROPERTY_KEYNAMES = (PROPERTY_KEYNAME_DEFAULT, - PROPERTY_KEYNAME_REQUIRED, - PROPERTY_KEYNAME_STATUS) = \ - ('default', 'required', 'status') - - PROPERTY_REQUIRED_DEFAULT = True - - VALID_REQUIRED_VALUES = ['true', 'false'] - VALID_STATUS_VALUES = (PROPERTY_STATUS_SUPPORTED, - PROPERTY_STATUS_EXPERIMENTAL) = \ - ('supported', 'experimental') - - PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED - - def __init__(self, name, value=None, schema=None): - self.name = name - self.value = value - self.schema = schema - self._status = self.PROPERTY_STATUS_DEFAULT - self._required = self.PROPERTY_REQUIRED_DEFAULT - - # Validate required 'type' property exists - try: - self.schema['type'] - except KeyError: - msg = (_('Schema definition of "%(pname)s" must have a "type" ' - 'attribute.') % dict(pname=self.name)) - ValidationIssueCollector.appendException( - InvalidSchemaError(message=msg)) - - if self.schema: - self._load_required_attr_from_schema() - self._load_status_attr_from_schema() - - @property - def default(self): - if self.schema: - for prop_key, prop_value in self.schema.items(): - if prop_key == self.PROPERTY_KEYNAME_DEFAULT: - return prop_value - return None - - @property - def required(self): - return self._required - - def _load_required_attr_from_schema(self): - # IF 'required' keyname exists verify it's a boolean, - # if so override default - if self.PROPERTY_KEYNAME_REQUIRED in self.schema: - value = self.schema[self.PROPERTY_KEYNAME_REQUIRED] - if isinstance(value, bool): - self._required = value - else: - valid_values = ', '.join(self.VALID_REQUIRED_VALUES) - attr = self.PROPERTY_KEYNAME_REQUIRED - TOSCAException.generate_inv_schema_property_error(self, - attr, - value, - valid_values) - - @property - def status(self): - return self._status - - def _load_status_attr_from_schema(self): - # IF 'status' keyname exists verify it's a valid value, - # if so override default - if self.PROPERTY_KEYNAME_STATUS in self.schema: - value = self.schema[self.PROPERTY_KEYNAME_STATUS] - if value in self.VALID_STATUS_VALUES: - self._status = value - else: - valid_values = ', '.join(self.VALID_STATUS_VALUES) - attr = self.PROPERTY_KEYNAME_STATUS - TOSCAException.generate_inv_schema_property_error(self, - attr, - value, - valid_values) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java deleted file mode 100644 index 4c39ec2..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java +++ /dev/null @@ -1,121 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.LinkedHashMap; - -public class RelationshipType extends StatefulEntityType { - - private static final String DERIVED_FROM = "derived_from"; - private static final String VALID_TARGET_TYPES = "valid_target_types"; - private static final String INTERFACES = "interfaces"; - private static final String ATTRIBUTES = "attributes"; - private static final String PROPERTIES = "properties"; - private static final String DESCRIPTION = "description"; - private static final String VERSION = "version"; - private static final String CREDENTIAL = "credential"; - - private static final String[] SECTIONS = { - DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES, - ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, CREDENTIAL}; - - private String capabilityName; - private LinkedHashMap customDef; - - public RelationshipType(String type, String capabilityName, LinkedHashMap customDef) { - super(type, RELATIONSHIP_PREFIX, customDef); - this.capabilityName = capabilityName; - this.customDef = customDef; - } - - public RelationshipType getParentType() { - // Return a relationship this reletionship is derived from.''' - String prel = derivedFrom(defs); - if (prel != null) { - return new RelationshipType(prel, null, customDef); - } - return null; - } - - public Object getValidTargetTypes() { - return entityValue(defs, "valid_target_types"); - } - - private void validateKeys() { - for (String key : defs.keySet()) { - boolean bFound = false; - for (String section : SECTIONS) { - if (key.equals(section)) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE133", String.format( - "UnknownFieldError: Relationshiptype \"%s\" has unknown field \"%s\"", type, key))); - } - } - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import UnknownFieldError -from toscaparser.elements.statefulentitytype import StatefulEntityType - - -class RelationshipType(StatefulEntityType): - '''TOSCA built-in relationship type.''' - SECTIONS = (DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES, - ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, - CREDENTIAL) = ('derived_from', 'valid_target_types', - 'interfaces', 'attributes', 'properties', - 'description', 'version', 'credential') - - def __init__(self, type, capability_name=None, custom_def=None): - super(RelationshipType, self).__init__(type, self.RELATIONSHIP_PREFIX, - custom_def) - self.capability_name = capability_name - self.custom_def = custom_def - self._validate_keys() - - @property - def parent_type(self): - '''Return a relationship this reletionship is derived from.''' - prel = self.derived_from(self.defs) - if prel: - return RelationshipType(prel, self.custom_def) - - @property - def valid_target_types(self): - return self.entity_value(self.defs, 'valid_target_types') - - def _validate_keys(self): - for key in self.defs.keys(): - if key not in self.SECTIONS: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Relationshiptype "%s"' % self.type, - field=key)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java deleted file mode 100644 index 1eaa8a0..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java +++ /dev/null @@ -1,287 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.onap.sdc.toscaparser.api.utils.ValidateUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.HashMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public abstract class ScalarUnit { - - private static Logger log = LoggerFactory.getLogger(ScalarUnit.class.getName()); - - private static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; - private static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; - private static final String SCALAR_UNIT_TIME = "scalar-unit.time"; - - public static final String[] SCALAR_UNIT_TYPES = { - SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME - }; - - private Object value; - private HashMap scalarUnitDict; - private String scalarUnitDefault; - - public ScalarUnit(Object value) { - this.value = value; - scalarUnitDict = new HashMap<>(); - scalarUnitDefault = ""; - } - - void putToScalarUnitDict(String key, Object value) { - scalarUnitDict.put(key, value); - } - - void setScalarUnitDefault(String scalarUnitDefault) { - this.scalarUnitDefault = scalarUnitDefault; - } - - private String checkUnitInScalarStandardUnits(String inputUnit) { - // Check whether the input unit is following specified standard - - // If unit is not following specified standard, convert it to standard - // unit after displaying a warning message. - - if (scalarUnitDict.get(inputUnit) != null) { - return inputUnit; - } else { - for (String key : scalarUnitDict.keySet()) { - if (key.toUpperCase().equals(inputUnit.toUpperCase())) { - log.debug("ScalarUnit - checkUnitInScalarStandardUnits - \n" - + "The unit {} does not follow scalar unit standards\n" - + "using {} instead", - inputUnit, key); - return key; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE007", String.format( - "'The unit \"%s\" is not valid. Valid units are \n%s", - inputUnit, scalarUnitDict.keySet().toString()))); - return inputUnit; - } - } - - public Object validateScalarUnit() { - Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); - Matcher matcher = pattern.matcher(value.toString()); - if (matcher.find()) { - ValidateUtils.strToNum(matcher.group(1)); - String scalarUnit = checkUnitInScalarStandardUnits(matcher.group(2)); - value = matcher.group(1) + " " + scalarUnit; - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE134", String.format( - "ValueError: \"%s\" is not a valid scalar-unit", value.toString()))); - } - return value; - } - - public double getNumFromScalarUnit(String unit) { - if (unit != null) { - unit = checkUnitInScalarStandardUnits(unit); - } else { - unit = scalarUnitDefault; - } - Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)"); - Matcher matcher = pattern.matcher(value.toString()); - if (matcher.find()) { - final double minimalNum = 0.0000000000001; - - ValidateUtils.strToNum(matcher.group(1)); - String scalarUnit = checkUnitInScalarStandardUnits(matcher.group(2)); - value = matcher.group(1) + " " + scalarUnit; - Object on1 = ValidateUtils.strToNum(matcher.group(1)) != null ? ValidateUtils.strToNum(matcher.group(1)) : 0; - Object on2 = scalarUnitDict.get(matcher.group(2)) != null ? scalarUnitDict.get(matcher.group(2)) : 0; - Object on3 = scalarUnitDict.get(unit) != null ? scalarUnitDict.get(unit) : 0; - - Double n1 = new Double(on1.toString()); - Double n2 = new Double(on2.toString()); - Double n3 = new Double(on3.toString()); - double converted = n1 * n2 / n3; - - if (Math.abs(converted - Math.round(converted)) < minimalNum) { - converted = Math.round(converted); - } - return converted; - } - return 0.0; - } - - private static HashMap scalarUnitMapping = getScalarUnitMappings(); - - private static HashMap getScalarUnitMappings() { - HashMap map = new HashMap<>(); - map.put(SCALAR_UNIT_FREQUENCY, "ScalarUnitFrequency"); - map.put(SCALAR_UNIT_SIZE, "ScalarUnitSize"); - map.put(SCALAR_UNIT_TIME, "ScalarUnit_Time"); - return map; - } - - public static ScalarUnit getScalarunitClass(String type, Object val) { - if (type.equals(SCALAR_UNIT_SIZE)) { - return new ScalarUnitSize(val); - } else if (type.equals(SCALAR_UNIT_TIME)) { - return new ScalarUnitTime(val); - } else if (type.equals(SCALAR_UNIT_FREQUENCY)) { - return new ScalarUnitFrequency(val); - } - return null; - } - - public static double getScalarunitValue(String type, Object value, String unit) { - if (type.equals(SCALAR_UNIT_SIZE)) { - return (new ScalarUnitSize(value)).getNumFromScalarUnit(unit); - } - if (type.equals(SCALAR_UNIT_TIME)) { - return (new ScalarUnitTime(value)).getNumFromScalarUnit(unit); - } - if (type.equals(SCALAR_UNIT_FREQUENCY)) { - return (new ScalarUnitFrequency(value)).getNumFromScalarUnit(unit); - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE135", String.format( - "TypeError: \"%s\" is not a valid scalar-unit type", type))); - return 0.0; - } - -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.utils.gettextutils import _ -from toscaparser.utils import validateutils - -log = logging.getLogger('tosca') - - -class ScalarUnit(object): - '''Parent class for scalar-unit type.''' - - SCALAR_UNIT_TYPES = ( - SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME - ) = ( - 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time' - ) - - def __init__(self, value): - self.value = value - - def _check_unit_in_scalar_standard_units(self, input_unit): - """Check whether the input unit is following specified standard - - If unit is not following specified standard, convert it to standard - unit after displaying a warning message. - """ - if input_unit in self.scalarUnitDict.keys(): - return input_unit - else: - for key in self.scalarUnitDict.keys(): - if key.upper() == input_unit.upper(): - log.warning(_('The unit "%(unit)s" does not follow ' - 'scalar unit standards; using "%(key)s" ' - 'instead.') % {'unit': input_unit, - 'key': key}) - return key - msg = (_('The unit "%(unit)s" is not valid. Valid units are ' - '"%(valid_units)s".') % - {'unit': input_unit, - 'valid_units': sorted(self.scalarUnitDict.keys())}) - ValidationIssueCollector.appendException(ValueError(msg)) - - def validate_scalar_unit(self): - regex = re.compile('([0-9.]+)\s*(\w+)') - try: - result = regex.match(str(self.value)).groups() - validateutils.str_to_num(result[0]) - scalar_unit = self._check_unit_in_scalar_standard_units(result[1]) - self.value = ' '.join([result[0], scalar_unit]) - return self.value - - except Exception: - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a valid scalar-unit.') - % self.value)) - - def get_num_from_scalar_unit(self, unit=None): - if unit: - unit = self._check_unit_in_scalar_standard_units(unit) - else: - unit = self.scalarUnitDefault - self.validate_scalar_unit() - - regex = re.compile('([0-9.]+)\s*(\w+)') - result = regex.match(str(self.value)).groups() - converted = (float(validateutils.str_to_num(result[0])) - * self.scalarUnitDict[result[1]] - / self.scalarUnitDict[unit]) - if converted - int(converted) < 0.0000000000001: - converted = int(converted) - return converted - - -class ScalarUnit_Size(ScalarUnit): - - scalarUnitDefault = 'B' - scalarUnitDict = {'B': 1, 'kB': 1000, 'KiB': 1024, 'MB': 1000000, - 'MiB': 1048576, 'GB': 1000000000, - 'GiB': 1073741824, 'TB': 1000000000000, - 'TiB': 1099511627776} - - -class ScalarUnit_Time(ScalarUnit): - - scalarUnitDefault = 'ms' - scalarUnitDict = {'d': 86400, 'h': 3600, 'm': 60, 's': 1, - 'ms': 0.001, 'us': 0.000001, 'ns': 0.000000001} - - -class ScalarUnit_Frequency(ScalarUnit): - - scalarUnitDefault = 'GHz' - scalarUnitDict = {'Hz': 1, 'kHz': 1000, - 'MHz': 1000000, 'GHz': 1000000000} - - -scalarunit_mapping = { - ScalarUnit.SCALAR_UNIT_FREQUENCY: ScalarUnit_Frequency, - ScalarUnit.SCALAR_UNIT_SIZE: ScalarUnit_Size, - ScalarUnit.SCALAR_UNIT_TIME: ScalarUnit_Time, - } - - -def get_scalarunit_class(type): - return scalarunit_mapping.get(type) - - -def get_scalarunit_value(type, value, unit=None): - if type in ScalarUnit.SCALAR_UNIT_TYPES: - ScalarUnit_Class = get_scalarunit_class(type) - return (ScalarUnit_Class(value). - get_num_from_scalar_unit(unit)) - else: - ValidationIssueCollector.appendException( - TypeError(_('"%s" is not a valid scalar-unit type.') % type)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java deleted file mode 100644 index ed10da9..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java +++ /dev/null @@ -1,39 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -public class ScalarUnitFrequency extends ScalarUnit { - - private static final Long HZ = 1L; - private static final Long KHZ = 1000L; - private static final Long MHZ = 1000000L; - private static final Long GHZ = 1000000000L; - - public ScalarUnitFrequency(Object value) { - super(value); - setScalarUnitDefault("GHz"); - putToScalarUnitDict("Hz", HZ); - putToScalarUnitDict("kHz", KHZ); - putToScalarUnitDict("MHz", MHZ); - putToScalarUnitDict("GHz", GHZ); - } - -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java deleted file mode 100644 index 78687a1..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java +++ /dev/null @@ -1,43 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import org.onap.sdc.toscaparser.api.elements.enums.FileSize; - -public class ScalarUnitSize extends ScalarUnit { - - - - public ScalarUnitSize(Object value) { - super(value); - - setScalarUnitDefault("B"); - putToScalarUnitDict("B", FileSize.B); - putToScalarUnitDict("kB", FileSize.KB); - putToScalarUnitDict("MB", FileSize.MB); - putToScalarUnitDict("GB", FileSize.GB); - putToScalarUnitDict("TB", FileSize.TB); - putToScalarUnitDict("kiB", FileSize.KIB); - putToScalarUnitDict("MiB", FileSize.MIB); - putToScalarUnitDict("GiB", FileSize.GIB); - putToScalarUnitDict("TiB", FileSize.TIB); - } -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java deleted file mode 100644 index 8d2c13e..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java +++ /dev/null @@ -1,37 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -public class ScalarUnitTime extends ScalarUnit { - - public ScalarUnitTime(Object value) { - super(value); - setScalarUnitDefault("ms"); - putToScalarUnitDict("d", 86400L); - putToScalarUnitDict("h", 3600L); - putToScalarUnitDict("m", 60L); - putToScalarUnitDict("s", 1L); - putToScalarUnitDict("ms", 0.001); - putToScalarUnitDict("us", 0.000001); - putToScalarUnitDict("ns", 0.000000001); - } - -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java deleted file mode 100644 index b710dda..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java +++ /dev/null @@ -1,234 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import org.onap.sdc.toscaparser.api.UnsupportedType; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - - -public class StatefulEntityType extends EntityType { - // Class representing TOSCA states - - public static final String[] INTERFACE_NODE_LIFECYCLE_OPERATIONS = { - "create", "configure", "start", "stop", "delete"}; - - public static final String[] INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS = { - "post_configure_source", "post_configure_target", "add_target", "remove_target"}; - - public StatefulEntityType() { - // void constructor for subclasses that don't want super - } - - @SuppressWarnings("unchecked") - public StatefulEntityType(String entityType, String prefix, LinkedHashMap customDef) { - - String entireEntityType = entityType; - if (UnsupportedType.validateType(entireEntityType)) { - defs = null; - } else { - if (entityType.startsWith(TOSCA + ":")) { - entityType = entityType.substring(TOSCA.length() + 1); - entireEntityType = prefix + entityType; - } - if (!entityType.startsWith(TOSCA)) { - entireEntityType = prefix + entityType; - } - if (TOSCA_DEF.get(entireEntityType) != null) { - defs = (LinkedHashMap) TOSCA_DEF.get(entireEntityType); - entityType = entireEntityType; - } else if (customDef != null && customDef.get(entityType) != null) { - defs = (LinkedHashMap) customDef.get(entityType); - } else { - defs = null; - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE136", String.format( - "InvalidTypeError: \"%s\" is not a valid type", entityType))); - } - } - type = entityType; - } - - @SuppressWarnings("unchecked") - public ArrayList getPropertiesDefObjects() { - // Return a list of property definition objects - ArrayList properties = new ArrayList(); - LinkedHashMap props = (LinkedHashMap) getDefinition(PROPERTIES); - if (props != null) { - for (Map.Entry me : props.entrySet()) { - String pdname = me.getKey(); - Object to = me.getValue(); - if (to == null || !(to instanceof LinkedHashMap)) { - String s = to == null ? "null" : to.getClass().getSimpleName(); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE137", String.format( - "Unexpected type error: property \"%s\" has type \"%s\" (expected dict)", pdname, s))); - continue; - } - LinkedHashMap pdschema = (LinkedHashMap) to; - properties.add(new PropertyDef(pdname, null, pdschema)); - } - } - return properties; - } - - public LinkedHashMap getPropertiesDef() { - LinkedHashMap pds = new LinkedHashMap(); - for (PropertyDef pd : getPropertiesDefObjects()) { - pds.put(pd.getName(), pd); - } - return pds; - } - - public PropertyDef getPropertyDefValue(String name) { - // Return the property definition associated with a given name - PropertyDef pd = null; - LinkedHashMap propsDef = getPropertiesDef(); - if (propsDef != null) { - pd = propsDef.get(name); - } - return pd; - } - - public ArrayList getAttributesDefObjects() { - // Return a list of attribute definition objects - @SuppressWarnings("unchecked") - LinkedHashMap attrs = (LinkedHashMap) getValue(ATTRIBUTES, null, true); - ArrayList ads = new ArrayList<>(); - if (attrs != null) { - for (Map.Entry me : attrs.entrySet()) { - String attr = me.getKey(); - @SuppressWarnings("unchecked") - LinkedHashMap adschema = (LinkedHashMap) me.getValue(); - ads.add(new AttributeDef(attr, null, adschema)); - } - } - return ads; - } - - public LinkedHashMap getAttributesDef() { - // Return a dictionary of attribute definition name-object pairs - - LinkedHashMap ads = new LinkedHashMap<>(); - for (AttributeDef ado : getAttributesDefObjects()) { - ads.put(((AttributeDef) ado).getName(), ado); - } - return ads; - } - - public AttributeDef getAttributeDefValue(String name) { - // Return the attribute definition associated with a given name - AttributeDef ad = null; - LinkedHashMap attrsDef = getAttributesDef(); - if (attrsDef != null) { - ad = attrsDef.get(name); - } - return ad; - } - - public String getType() { - return type; - } -} - -/*python - -from toscaparser.common.exception import InvalidTypeError -from toscaparser.elements.attribute_definition import AttributeDef -from toscaparser.elements.entity_type import EntityType -from toscaparser.elements.property_definition import PropertyDef -from toscaparser.unsupportedtype import UnsupportedType - - -class StatefulEntityType(EntityType): - '''Class representing TOSCA states.''' - - interfaces_node_lifecycle_operations = ['create', - 'configure', 'start', - 'stop', 'delete'] - - interfaces_relationship_configure_operations = ['post_configure_source', - 'post_configure_target', - 'add_target', - 'remove_target'] - - def __init__(self, entitytype, prefix, custom_def=None): - entire_entitytype = entitytype - if UnsupportedType.validate_type(entire_entitytype): - self.defs = None - else: - if entitytype.startswith(self.TOSCA + ":"): - entitytype = entitytype[(len(self.TOSCA) + 1):] - entire_entitytype = prefix + entitytype - if not entitytype.startswith(self.TOSCA): - entire_entitytype = prefix + entitytype - if entire_entitytype in list(self.TOSCA_DEF.keys()): - self.defs = self.TOSCA_DEF[entire_entitytype] - entitytype = entire_entitytype - elif custom_def and entitytype in list(custom_def.keys()): - self.defs = custom_def[entitytype] - else: - self.defs = None - ValidationIssueCollector.appendException( - InvalidTypeError(what=entitytype)) - self.type = entitytype - - def get_properties_def_objects(self): - '''Return a list of property definition objects.''' - properties = [] - props = self.get_definition(self.PROPERTIES) - if props: - for prop, schema in props.items(): - properties.append(PropertyDef(prop, None, schema)) - return properties - - def get_properties_def(self): - '''Return a dictionary of property definition name-object pairs.''' - return {prop.name: prop - for prop in self.get_properties_def_objects()} - - def get_property_def_value(self, name): - '''Return the property definition associated with a given name.''' - props_def = self.get_properties_def() - if props_def and name in props_def.keys(): - return props_def[name].value - - def get_attributes_def_objects(self): - '''Return a list of attribute definition objects.''' - attrs = self.get_value(self.ATTRIBUTES, parent=True) - if attrs: - return [AttributeDef(attr, None, schema) - for attr, schema in attrs.items()] - return [] - - def get_attributes_def(self): - '''Return a dictionary of attribute definition name-object pairs.''' - return {attr.name: attr - for attr in self.get_attributes_def_objects()} - - def get_attribute_def_value(self, name): - '''Return the attribute definition associated with a given name.''' - attrs_def = self.get_attributes_def() - if attrs_def and name in attrs_def.keys(): - return attrs_def[name].value -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java deleted file mode 100644 index 18dd5ca..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java +++ /dev/null @@ -1,173 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -import org.onap.sdc.toscaparser.api.extensions.ExtTools; - -public class TypeValidation { - - private static final String DEFINITION_VERSION = "tosca_definitions_version"; - private static final String DESCRIPTION = "description"; - private static final String IMPORTS = "imports"; - private static final String DSL_DEFINITIONS = "dsl_definitions"; - private static final String NODE_TYPES = "node_types"; - private static final String REPOSITORIES = "repositories"; - private static final String DATA_TYPES = "data_types"; - private static final String ARTIFACT_TYPES = "artifact_types"; - private static final String GROUP_TYPES = "group_types"; - private static final String RELATIONSHIP_TYPES = "relationship_types"; - private static final String CAPABILITY_TYPES = "capability_types"; - private static final String INTERFACE_TYPES = "interface_types"; - private static final String POLICY_TYPES = "policy_types"; - private static final String TOPOLOGY_TEMPLATE = "topology_template"; - //Pavel - private static final String METADATA = "metadata"; - - private String ALLOWED_TYPE_SECTIONS[] = { - DEFINITION_VERSION, DESCRIPTION, IMPORTS, - DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES, - DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES, - RELATIONSHIP_TYPES, CAPABILITY_TYPES, - INTERFACE_TYPES, POLICY_TYPES, - TOPOLOGY_TEMPLATE, METADATA - }; - - private static ArrayList VALID_TEMPLATE_VERSIONS = _getVTV(); - - private static ArrayList _getVTV() { - ArrayList vtv = new ArrayList<>(); - vtv.add("tosca_simple_yaml_1_0"); - vtv.add("tosca_simple_yaml_1_1"); - ExtTools exttools = new ExtTools(); - vtv.addAll(exttools.getVersions()); - return vtv; - } - - //private LinkedHashMap customTypes; - private Object importDef; - //private String version; - - public TypeValidation(LinkedHashMap _customTypes, - Object _importDef) { - importDef = _importDef; - _validateTypeKeys(_customTypes); - } - - private void _validateTypeKeys(LinkedHashMap customTypes) { - - String sVersion = (String) customTypes.get(DEFINITION_VERSION); - if (sVersion != null) { - _validateTypeVersion(sVersion); - //version = sVersion; - } - for (String name : customTypes.keySet()) { - boolean bFound = false; - for (String ats : ALLOWED_TYPE_SECTIONS) { - if (name.equals(ats)) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE138", String.format( - "UnknownFieldError: Template \"%s\" contains unknown field \"%s\"", - importDef.toString(), name))); - } - } - } - - private void _validateTypeVersion(String sVersion) { - boolean bFound = false; - String allowed = ""; - for (String atv : VALID_TEMPLATE_VERSIONS) { - allowed += "\"" + atv + "\" "; - if (sVersion.equals(atv)) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE139", String.format( - "InvalidTemplateVersion: version \"%s\" in \"%s\" is not supported\n" + - "Allowed versions: [%s]", - sVersion, importDef.toString(), allowed))); - } - } -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidTemplateVersion -from toscaparser.common.exception import UnknownFieldError -from toscaparser.extensions.exttools import ExtTools - - -class TypeValidation(object): - - ALLOWED_TYPE_SECTIONS = (DEFINITION_VERSION, DESCRIPTION, IMPORTS, - DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES, - DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES, - RELATIONSHIP_TYPES, CAPABILITY_TYPES, - INTERFACE_TYPES, POLICY_TYPES, - TOPOLOGY_TEMPLATE) = \ - ('tosca_definitions_version', 'description', 'imports', - 'dsl_definitions', 'node_types', 'repositories', - 'data_types', 'artifact_types', 'group_types', - 'relationship_types', 'capability_types', - 'interface_types', 'policy_types', 'topology_template') - VALID_TEMPLATE_VERSIONS = ['tosca_simple_yaml_1_0'] - exttools = ExtTools() - VALID_TEMPLATE_VERSIONS.extend(exttools.get_versions()) - - def __init__(self, custom_types, import_def): - self.import_def = import_def - self._validate_type_keys(custom_types) - - def _validate_type_keys(self, custom_type): - version = custom_type[self.DEFINITION_VERSION] \ - if self.DEFINITION_VERSION in custom_type \ - else None - if version: - self._validate_type_version(version) - self.version = version - - for name in custom_type: - if name not in self.ALLOWED_TYPE_SECTIONS: - ValidationIssueCollector.appendException( -# UnknownFieldError(what='Template ' + (self.import_def), - UnknownFieldError(what= (self.import_def), - field=name)) - - def _validate_type_version(self, version): - if version not in self.VALID_TEMPLATE_VERSIONS: - ValidationIssueCollector.appendException( - InvalidTemplateVersion( -# what=version + ' in ' + self.import_def, - what=self.import_def, - valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS))) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java deleted file mode 100644 index dd77659..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java +++ /dev/null @@ -1,309 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements.constraints; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.elements.ScalarUnit; -import org.onap.sdc.toscaparser.api.functions.Function; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; - -public abstract class Constraint { - - // Parent class for constraints for a Property or Input - - protected static final String EQUAL = "equal"; - protected static final String GREATER_THAN = "greater_than"; - protected static final String GREATER_OR_EQUAL = "greater_or_equal"; - protected static final String LESS_THAN = "less_than"; - protected static final String LESS_OR_EQUAL = "less_or_equal"; - protected static final String IN_RANGE = "in_range"; - protected static final String VALID_VALUES = "valid_values"; - protected static final String LENGTH = "length"; - protected static final String MIN_LENGTH = "min_length"; - protected static final String MAX_LENGTH = "max_length"; - protected static final String PATTERN = "pattern"; - - protected static final String[] CONSTRAINTS = { - EQUAL, GREATER_THAN, GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, - IN_RANGE, VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN}; - - @SuppressWarnings("unchecked") - public static Constraint factory(String constraintClass, String propname, String proptype, Object constraint) { - - // a factory for the different Constraint classes - // replaces Python's __new__() usage - - if (!(constraint instanceof LinkedHashMap) - || ((LinkedHashMap) constraint).size() != 1) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE101", - "InvalidSchemaError: Invalid constraint schema " + constraint.toString())); - } - - switch (constraintClass) { - case EQUAL: - return new Equal(propname, proptype, constraint); - case GREATER_THAN: - return new GreaterThan(propname, proptype, constraint); - case GREATER_OR_EQUAL: - return new GreaterOrEqual(propname, proptype, constraint); - case LESS_THAN: - return new LessThan(propname, proptype, constraint); - case LESS_OR_EQUAL: - return new LessOrEqual(propname, proptype, constraint); - case IN_RANGE: - return new InRange(propname, proptype, constraint); - case VALID_VALUES: - return new ValidValues(propname, proptype, constraint); - case LENGTH: - return new Length(propname, proptype, constraint); - case MIN_LENGTH: - return new MinLength(propname, proptype, constraint); - case MAX_LENGTH: - return new MaxLength(propname, proptype, constraint); - case PATTERN: - return new Pattern(propname, proptype, constraint); - default: - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE102", String.format( - "InvalidSchemaError: Invalid property \"%s\"", constraintClass))); - return null; - } - } - - private String constraintKey = "TBD"; - protected ArrayList validTypes = new ArrayList<>(); - protected ArrayList validPropTypes = new ArrayList<>(); - - protected String propertyName; - private String propertyType; - protected Object constraintValue; - protected Object constraintValueMsg; - protected Object valueMsg; - - @SuppressWarnings("unchecked") - public Constraint(String propname, String proptype, Object constraint) { - - setValues(); - - propertyName = propname; - propertyType = proptype; - constraintValue = ((LinkedHashMap) constraint).get(constraintKey); - constraintValueMsg = constraintValue; - boolean bFound = false; - for (String s : ScalarUnit.SCALAR_UNIT_TYPES) { - if (s.equals(propertyType)) { - bFound = true; - break; - } - } - if (bFound) { - constraintValue = _getScalarUnitConstraintValue(); - } - // check if constraint is valid for property type - bFound = false; - for (String s : validPropTypes) { - if (s.equals(propertyType)) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE103", String.format( - "InvalidSchemaError: Property \"%s\" is not valid for data type \"%s\"", - constraintKey, propertyType))); - } - } - - public ArrayList getValidTypes() { - return validTypes; - } - - public void addValidTypes(List validTypes) { - this.validTypes.addAll(validTypes); - } - - public ArrayList getValidPropTypes() { - return validPropTypes; - } - - public String getPropertyType() { - return propertyType; - } - - public Object getConstraintValue() { - return constraintValue; - } - - public Object getConstraintValueMsg() { - return constraintValueMsg; - } - - public Object getValueMsg() { - return valueMsg; - } - - public void setConstraintKey(String constraintKey) { - this.constraintKey = constraintKey; - } - - public void setValidTypes(ArrayList validTypes) { - this.validTypes = validTypes; - } - - public void setValidPropTypes(ArrayList validPropTypes) { - this.validPropTypes = validPropTypes; - } - - public void setPropertyType(String propertyType) { - this.propertyType = propertyType; - } - - public void setConstraintValue(Object constraintValue) { - this.constraintValue = constraintValue; - } - - public void setConstraintValueMsg(Object constraintValueMsg) { - this.constraintValueMsg = constraintValueMsg; - } - - public void setValueMsg(Object valueMsg) { - this.valueMsg = valueMsg; - } - - @SuppressWarnings("unchecked") - private Object _getScalarUnitConstraintValue() { - // code differs from Python because of class creation - if (constraintValue instanceof ArrayList) { - ArrayList ret = new ArrayList<>(); - for (Object v : (ArrayList) constraintValue) { - ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType, v); - ret.add(su.getNumFromScalarUnit(null)); - } - return ret; - } else { - ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType, constraintValue); - return su.getNumFromScalarUnit(null); - } - } - - public void validate(Object value) { - if (Function.isFunction(value)) { - //skipping constraints check for functions - return; - } - - valueMsg = value; - boolean bFound = false; - for (String s : ScalarUnit.SCALAR_UNIT_TYPES) { - if (s.equals(propertyType)) { - bFound = true; - break; - } - } - if (bFound) { - value = ScalarUnit.getScalarunitValue(propertyType, value, null); - } - if (!isValid(value)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE008", "ValidationError: " + errMsg(value))); - } - } - - protected abstract boolean isValid(Object value); - - protected abstract void setValues(); - - protected abstract String errMsg(Object value); - -} - -/*python - -class Constraint(object): - '''Parent class for constraints for a Property or Input.''' - - CONSTRAINTS = (EQUAL, GREATER_THAN, - GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, IN_RANGE, - VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN) = \ - ('equal', 'greater_than', 'greater_or_equal', 'less_than', - 'less_or_equal', 'in_range', 'valid_values', 'length', - 'min_length', 'max_length', 'pattern') - - def __new__(cls, property_name, property_type, constraint): - if cls is not Constraint: - return super(Constraint, cls).__new__(cls) - - if(not isinstance(constraint, collections.Mapping) or - len(constraint) != 1): - ValidationIssueCollector.appendException( - InvalidSchemaError(message=_('Invalid constraint schema.'))) - - for type in constraint.keys(): - ConstraintClass = get_constraint_class(type) - if not ConstraintClass: - msg = _('Invalid property "%s".') % type - ValidationIssueCollector.appendException( - InvalidSchemaError(message=msg)) - - return ConstraintClass(property_name, property_type, constraint) - - def __init__(self, property_name, property_type, constraint): - self.property_name = property_name - self.property_type = property_type - self.constraint_value = constraint[self.constraint_key] - self.constraint_value_msg = self.constraint_value - if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: - self.constraint_value = self._get_scalarunit_constraint_value() - # check if constraint is valid for property type - if property_type not in self.valid_prop_types: - msg = _('Property "%(ctype)s" is not valid for data type ' - '"%(dtype)s".') % dict( - ctype=self.constraint_key, - dtype=property_type) - ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) - - def _get_scalarunit_constraint_value(self): - if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: - ScalarUnit_Class = (scalarunit. - get_scalarunit_class(self.property_type)) - if isinstance(self.constraint_value, list): - return [ScalarUnit_Class(v).get_num_from_scalar_unit() - for v in self.constraint_value] - else: - return (ScalarUnit_Class(self.constraint_value). - get_num_from_scalar_unit()) - - def _err_msg(self, value): - return _('Property "%s" could not be validated.') % self.property_name - - def validate(self, value): - self.value_msg = value - if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES: - value = scalarunit.get_scalarunit_value(self.property_type, value) - if not self._is_valid(value): - err_msg = self._err_msg(value) - ValidationIssueCollector.appendException( - ValidationError(message=err_msg)) - - -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java deleted file mode 100644 index f480099..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java +++ /dev/null @@ -1,77 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements.constraints; - -import java.util.Arrays; - -public class Equal extends Constraint { - - protected void setValues() { - - setConstraintKey(EQUAL); - validPropTypes.addAll(Arrays.asList(Schema.PROPERTY_TYPES)); - - } - - public Equal(String name, String type, Object c) { - super(name, type, c); - - } - - protected boolean isValid(Object val) { - // equality of objects is tricky so we're comparing - // the toString() representation - return val.toString().equals(constraintValue.toString()); - } - - protected String errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" is not equal to \"%s\"", - valueMsg, propertyName, constraintValueMsg); - } - -} - -/*python - -class Equal(Constraint): -"""Constraint class for "equal" - -Constrains a property or parameter to a value equal to ('=') -the value declared. -""" - -constraint_key = Constraint.EQUAL - -valid_prop_types = Schema.PROPERTY_TYPES - -def _is_valid(self, value): - if value == self.constraint_value: - return True - - return False - -def _err_msg(self, value): - return (_('The value "%(pvalue)s" of property "%(pname)s" is not ' - 'equal to "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=self.value_msg, - cvalue=self.constraint_value_msg)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java deleted file mode 100644 index 0cb8f36..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java +++ /dev/null @@ -1,130 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements.constraints; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.functions.Function; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.Arrays; -import java.util.Date; - -public class GreaterOrEqual extends Constraint { - // Constraint class for "greater_or_equal" - - // Constrains a property or parameter to a value greater than or equal - // to ('>=') the value declared. - - protected void setValues() { - - setConstraintKey(GREATER_OR_EQUAL); - - // timestamps are loaded as Date objects - addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public GreaterOrEqual(String name, String type, Object c) { - super(name, type, c); - - if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE104", "InvalidSchemaError: The property \"greater_or_equal\" expects comparable values")); - } - } - - - @Override - protected boolean isValid(Object value) { - if (Function.isFunction(value)) { - return true; - } - - // timestamps - if (value instanceof Date) { - if (constraintValue instanceof Date) { - return !((Date) value).before((Date) constraintValue); - } - return false; - } - // all others - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 >= n2; - } - - protected String errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be greater or equal to \"%s\"", - valueMsg, propertyName, constraintValueMsg); - } -} - -/*python - -class GreaterOrEqual(Constraint): -"""Constraint class for "greater_or_equal" - -Constrains a property or parameter to a value greater than or equal -to ('>=') the value declared. -""" - -constraint_key = Constraint.GREATER_OR_EQUAL - -valid_types = (int, float, datetime.date, - datetime.time, datetime.datetime) - -valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, - Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, - Schema.SCALAR_UNIT_TIME) - -def __init__(self, property_name, property_type, constraint): - super(GreaterOrEqual, self).__init__(property_name, property_type, - constraint) - if not isinstance(self.constraint_value, self.valid_types): - ThreadLocalsHolder.getCollector().appendException( - InvalidSchemaError(message=_('The property ' - '"greater_or_equal" expects ' - 'comparable values.'))) - -def _is_valid(self, value): - if toscaparser.functions.is_function(value) or \ - value >= self.constraint_value: - return True - return False - -def _err_msg(self, value): - return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' - 'greater than or equal to "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=self.value_msg, - cvalue=self.constraint_value_msg)) - - -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java deleted file mode 100644 index b501907..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java +++ /dev/null @@ -1,120 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements.constraints; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.Arrays; -import java.util.Date; - -public class GreaterThan extends Constraint { - - @Override - protected void setValues() { - - setConstraintKey(GREATER_THAN); - - // timestamps are loaded as Date objects - addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public GreaterThan(String name, String type, Object c) { - super(name, type, c); - - if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE105", "InvalidSchemaError: The property \"greater_than\" expects comparable values")); - } - } - - @Override - protected boolean isValid(Object value) { - - // timestamps - if (value instanceof Date) { - if (constraintValue instanceof Date) { - return ((Date) value).after((Date) constraintValue); - } - return false; - } - - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 > n2; - } - - protected String errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be greater than \"%s\"", - valueMsg, propertyName, constraintValueMsg); - } - -} - -/* -class GreaterThan(Constraint): - """Constraint class for "greater_than" - - Constrains a property or parameter to a value greater than ('>') - the value declared. - """ - - constraint_key = Constraint.GREATER_THAN - - valid_types = (int, float, datetime.date, - datetime.time, datetime.datetime) - - valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, - Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, - Schema.SCALAR_UNIT_TIME) - - def __init__(self, property_name, property_type, constraint): - super(GreaterThan, self).__init__(property_name, property_type, - constraint) - if not isinstance(constraint[self.GREATER_THAN], self.valid_types): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "greater_than" ' - 'expects comparable values.'))) - - def _is_valid(self, value): - if value > self.constraint_value: - return True - - return False - - def _err_msg(self, value): - return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' - 'greater than "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=self.value_msg, - cvalue=self.constraint_value_msg)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java deleted file mode 100644 index 4edf021..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java +++ /dev/null @@ -1,186 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements.constraints; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.Arrays; -import java.util.Date; - -import java.util.ArrayList; - -public class InRange extends Constraint { - // Constraint class for "in_range" - - //Constrains a property or parameter to a value in range of (inclusive) - //the two values declared. - - private static final String UNBOUNDED = "UNBOUNDED"; - - private Object min, max; - - protected void setValues() { - - setConstraintKey(IN_RANGE); - - // timestamps are loaded as Date objects - addValidTypes(Arrays.asList("Integer", "Double", "Float", "String", "Date")); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - validPropTypes.add(Schema.RANGE); - - } - - @SuppressWarnings("unchecked") - public InRange(String name, String type, Object c) { - super(name, type, c); - - if (!(constraintValue instanceof ArrayList) || ((ArrayList) constraintValue).size() != 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE106", "InvalidSchemaError: The property \"in_range\" expects a list")); - - } - - ArrayList alcv = (ArrayList) constraintValue; - String msg = "The property \"in_range\" expects comparable values"; - for (Object vo : alcv) { - if (!validTypes.contains(vo.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE107", "InvalidSchemaError: " + msg)); - } - // The only string we allow for range is the special value 'UNBOUNDED' - if ((vo instanceof String) && !((String) vo).equals(UNBOUNDED)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE108", "InvalidSchemaError: " + msg)); - } - } - min = alcv.get(0); - max = alcv.get(1); - - } - - @Override - protected boolean isValid(Object value) { - - // timestamps - if (value instanceof Date) { - if (min instanceof Date && max instanceof Date) { - return !((Date) value).before((Date) min) - && !((Date) value).after((Date) max); - } - return false; - } - - Double dvalue = new Double(value.toString()); - if (!(min instanceof String)) { - if (dvalue < new Double(min.toString())) { - return false; - } - } else if (!((String) min).equals(UNBOUNDED)) { - return false; - } - if (!(max instanceof String)) { - if (dvalue > new Double(max.toString())) { - return false; - } - } else if (!((String) max).equals(UNBOUNDED)) { - return false; - } - return true; - } - - @Override - protected String errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" is out of range \"(min:%s, max:%s)\"", - valueMsg, propertyName, min.toString(), max.toString()); - } - -} - -/*python - -class InRange(Constraint): - """Constraint class for "in_range" - - Constrains a property or parameter to a value in range of (inclusive) - the two values declared. - """ - UNBOUNDED = 'UNBOUNDED' - - constraint_key = Constraint.IN_RANGE - - valid_types = (int, float, datetime.date, - datetime.time, datetime.datetime, str) - - valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, - Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, - Schema.SCALAR_UNIT_TIME, Schema.RANGE) - - def __init__(self, property_name, property_type, constraint): - super(InRange, self).__init__(property_name, property_type, constraint) - if(not isinstance(self.constraint_value, collections.Sequence) or - (len(constraint[self.IN_RANGE]) != 2)): - ValidationIssueCollector.appendException( - InvalidSchemaError(message=_('The property "in_range" ' - 'expects a list.'))) - - msg = _('The property "in_range" expects comparable values.') - for value in self.constraint_value: - if not isinstance(value, self.valid_types): - ValidationIssueCollector.appendException( - InvalidSchemaError(message=msg)) - # The only string we allow for range is the special value - # 'UNBOUNDED' - if(isinstance(value, str) and value != self.UNBOUNDED): - ValidationIssueCollector.appendException( - InvalidSchemaError(message=msg)) - - self.min = self.constraint_value[0] - self.max = self.constraint_value[1] - - def _is_valid(self, value): - if not isinstance(self.min, str): - if value < self.min: - return False - elif self.min != self.UNBOUNDED: - return False - if not isinstance(self.max, str): - if value > self.max: - return False - elif self.max != self.UNBOUNDED: - return False - return True - - def _err_msg(self, value): - return (_('The value "%(pvalue)s" of property "%(pname)s" is out of ' - 'range "(min:%(vmin)s, max:%(vmax)s)".') % - dict(pname=self.property_name, - pvalue=self.value_msg, - vmin=self.constraint_value_msg[0], - vmax=self.constraint_value_msg[1])) - -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java deleted file mode 100644 index 7988cb8..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java +++ /dev/null @@ -1,100 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements.constraints; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.Collections; - -public class Length extends Constraint { - // Constraint class for "length" - - // Constrains the property or parameter to a value of a given length. - - @Override - protected void setValues() { - - setConstraintKey(LENGTH); - addValidTypes(Collections.singletonList("Integer")); - - validPropTypes.add(Schema.STRING); - - } - - public Length(String name, String type, Object c) { - super(name, type, c); - - if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE109", "InvalidSchemaError: The property \"length\" expects an integer")); - } - } - - @Override - protected boolean isValid(Object value) { - if (value instanceof String && constraintValue instanceof Integer && - ((String) value).length() == (Integer) constraintValue) { - return true; - } - return false; - } - - @Override - protected String errMsg(Object value) { - return String.format("Length of value \"%s\" of property \"%s\" must be equal to \"%s\"", - value.toString(), propertyName, constraintValue.toString()); - } - -} - -/*python - class Length(Constraint): - """Constraint class for "length" - - Constrains the property or parameter to a value of a given length. - """ - - constraint_key = Constraint.LENGTH - - valid_types = (int, ) - - valid_prop_types = (Schema.STRING, ) - - def __init__(self, property_name, property_type, constraint): - super(Length, self).__init__(property_name, property_type, constraint) - if not isinstance(self.constraint_value, self.valid_types): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "length" expects ' - 'an integer.'))) - - def _is_valid(self, value): - if isinstance(value, str) and len(value) == self.constraint_value: - return True - - return False - - def _err_msg(self, value): - return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' - 'must be equal to "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=value, - cvalue=self.constraint_value)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java deleted file mode 100644 index 37a4afc..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java +++ /dev/null @@ -1,124 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements.constraints; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.Arrays; -import java.util.Date; - -public class LessOrEqual extends Constraint { - // Constraint class for "less_or_equal" - - // Constrains a property or parameter to a value less than or equal - // to ('<=') the value declared. - - protected void setValues() { - - setConstraintKey(LESS_OR_EQUAL); - - // timestamps are loaded as Date objects - addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public LessOrEqual(String name, String type, Object c) { - super(name, type, c); - - if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE110", "InvalidSchemaError: The property \"less_or_equal\" expects comparable values")); - } - } - - @Override - protected boolean isValid(Object value) { - - // timestamps - if (value instanceof Date) { - if (constraintValue instanceof Date) { - return !((Date) value).after((Date) constraintValue); - } - return false; - } - - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 <= n2; - } - - @Override - protected String errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be less or equal to \"%s\"", - valueMsg, propertyName, constraintValueMsg); - } - -} - -/*python - -class LessOrEqual(Constraint): - """Constraint class for "less_or_equal" - - Constrains a property or parameter to a value less than or equal - to ('<=') the value declared. - """ - - constraint_key = Constraint.LESS_OR_EQUAL - - valid_types = (int, float, datetime.date, - datetime.time, datetime.datetime) - - valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, - Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, - Schema.SCALAR_UNIT_TIME) - - def __init__(self, property_name, property_type, constraint): - super(LessOrEqual, self).__init__(property_name, property_type, - constraint) - if not isinstance(self.constraint_value, self.valid_types): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "less_or_equal" ' - 'expects comparable values.'))) - - def _is_valid(self, value): - if value <= self.constraint_value: - return True - - return False - - def _err_msg(self, value): - return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' - 'less than or equal to "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=self.value_msg, - cvalue=self.constraint_value_msg)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java deleted file mode 100644 index 952861d..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java +++ /dev/null @@ -1,121 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements.constraints; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.Arrays; -import java.util.Date; - -public class LessThan extends Constraint { - - @Override - protected void setValues() { - - setConstraintKey(LESS_THAN); - // timestamps are loaded as Date objects - addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date")); - //validTypes.add("datetime.date"); - //validTypes.add("datetime.time"); - //validTypes.add("datetime.datetime"); - - - validPropTypes.add(Schema.INTEGER); - validPropTypes.add(Schema.FLOAT); - validPropTypes.add(Schema.TIMESTAMP); - validPropTypes.add(Schema.SCALAR_UNIT_SIZE); - validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY); - validPropTypes.add(Schema.SCALAR_UNIT_TIME); - - } - - public LessThan(String name, String type, Object c) { - super(name, type, c); - - if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE111", "InvalidSchemaError: The property \"less_than\" expects comparable values")); - } - } - - @Override - protected boolean isValid(Object value) { - - // timestamps - if (value instanceof Date) { - if (constraintValue instanceof Date) { - return ((Date) value).before((Date) constraintValue); - } - return false; - } - - Double n1 = new Double(value.toString()); - Double n2 = new Double(constraintValue.toString()); - return n1 < n2; - } - - @Override - protected String errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" must be less than \"%s\"", - valueMsg, propertyName, constraintValueMsg); - } - -} - -/*python - -class LessThan(Constraint): -"""Constraint class for "less_than" - -Constrains a property or parameter to a value less than ('<') -the value declared. -""" - -constraint_key = Constraint.LESS_THAN - -valid_types = (int, float, datetime.date, - datetime.time, datetime.datetime) - -valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP, - Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY, - Schema.SCALAR_UNIT_TIME) - -def __init__(self, property_name, property_type, constraint): - super(LessThan, self).__init__(property_name, property_type, - constraint) - if not isinstance(self.constraint_value, self.valid_types): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "less_than" ' - 'expects comparable values.'))) - -def _is_valid(self, value): - if value < self.constraint_value: - return True - - return False - -def _err_msg(self, value): - return (_('The value "%(pvalue)s" of property "%(pname)s" must be ' - 'less than "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=self.value_msg, - cvalue=self.constraint_value_msg)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java deleted file mode 100644 index 9068b65..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java +++ /dev/null @@ -1,110 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements.constraints; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.Collections; -import java.util.LinkedHashMap; - -public class MaxLength extends Constraint { - // Constraint class for "min_length" - - // Constrains the property or parameter to a value of a maximum length. - - @Override - protected void setValues() { - - setConstraintKey(MAX_LENGTH); - - addValidTypes(Collections.singletonList("Integer")); - - - validPropTypes.add(Schema.STRING); - validPropTypes.add(Schema.MAP); - - } - - public MaxLength(String name, String type, Object c) { - super(name, type, c); - - if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE112", "InvalidSchemaError: The property \"max_length\" expects an integer")); - } - } - - @SuppressWarnings("unchecked") - @Override - protected boolean isValid(Object value) { - if (value instanceof String && constraintValue instanceof Integer - && ((String) value).length() <= (Integer) constraintValue) { - return true; - } else { - return value instanceof LinkedHashMap && constraintValue instanceof Integer - && ((LinkedHashMap) value).size() <= (Integer) constraintValue; - } - } - - @Override - protected String errMsg(Object value) { - return String.format("Length of value \"%s\" of property \"%s\" must be no greater than \"%s\"", - value.toString(), propertyName, constraintValue.toString()); - } - -} - -/*python - -class MaxLength(Constraint): - """Constraint class for "max_length" - - Constrains the property or parameter to a value to a maximum length. - """ - - constraint_key = Constraint.MAX_LENGTH - - valid_types = (int, ) - - valid_prop_types = (Schema.STRING, Schema.MAP) - - def __init__(self, property_name, property_type, constraint): - super(MaxLength, self).__init__(property_name, property_type, - constraint) - if not isinstance(self.constraint_value, self.valid_types): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "max_length" ' - 'expects an integer.'))) - - def _is_valid(self, value): - if ((isinstance(value, str) or isinstance(value, dict)) and - len(value) <= self.constraint_value): - return True - - return False - - def _err_msg(self, value): - return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' - 'must be no greater than "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=value, - cvalue=self.constraint_value)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java deleted file mode 100644 index eb1d870..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java +++ /dev/null @@ -1,109 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements.constraints; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.Collections; -import java.util.LinkedHashMap; - -public class MinLength extends Constraint { - // Constraint class for "min_length" - - // Constrains the property or parameter to a value of a minimum length. - - @Override - protected void setValues() { - - setConstraintKey(MIN_LENGTH); - - addValidTypes(Collections.singletonList("Integer")); - - validPropTypes.add(Schema.STRING); - validPropTypes.add(Schema.MAP); - - } - - public MinLength(String name, String type, Object c) { - super(name, type, c); - - if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE113", "InvalidSchemaError: The property \"min_length\" expects an integer")); - } - } - - @SuppressWarnings("unchecked") - @Override - protected boolean isValid(Object value) { - if (value instanceof String && constraintValue instanceof Integer - && ((String) value).length() >= (Integer) constraintValue) { - return true; - } else { - return value instanceof LinkedHashMap && constraintValue instanceof Integer - && ((LinkedHashMap) value).size() >= (Integer) constraintValue; - } - } - - @Override - protected String errMsg(Object value) { - return String.format("Length of value \"%s\" of property \"%s\" must be at least \"%s\"", - value.toString(), propertyName, constraintValue.toString()); - } - -} - -/*python - -class MinLength(Constraint): - """Constraint class for "min_length" - - Constrains the property or parameter to a value to a minimum length. - """ - - constraint_key = Constraint.MIN_LENGTH - - valid_types = (int, ) - - valid_prop_types = (Schema.STRING, Schema.MAP) - - def __init__(self, property_name, property_type, constraint): - super(MinLength, self).__init__(property_name, property_type, - constraint) - if not isinstance(self.constraint_value, self.valid_types): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "min_length" ' - 'expects an integer.'))) - - def _is_valid(self, value): - if ((isinstance(value, str) or isinstance(value, dict)) and - len(value) >= self.constraint_value): - return True - - return False - - def _err_msg(self, value): - return (_('Length of value "%(pvalue)s" of property "%(pname)s" ' - 'must be at least "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=value, - cvalue=self.constraint_value)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java deleted file mode 100644 index 913e922..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java +++ /dev/null @@ -1,116 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements.constraints; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.Collections; -import java.util.regex.Matcher; -import java.util.regex.PatternSyntaxException; - -public class Pattern extends Constraint { - - @Override - protected void setValues() { - - setConstraintKey(PATTERN); - - addValidTypes(Collections.singletonList("String")); - - validPropTypes.add(Schema.STRING); - - } - - - public Pattern(String name, String type, Object c) { - super(name, type, c); - - if (!validTypes.contains(constraintValue.getClass().getSimpleName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE114", "InvalidSchemaError: The property \"pattern\" expects a string")); - } - } - - @Override - protected boolean isValid(Object value) { - try { - if (!(value instanceof String)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE115", String.format("ValueError: Input value \"%s\" to \"pattern\" property \"%s\" must be a string", - value.toString(), propertyName))); - return false; - } - String strp = constraintValue.toString(); - String strm = value.toString(); - java.util.regex.Pattern pattern = java.util.regex.Pattern.compile(strp); - Matcher matcher = pattern.matcher(strm); - if (matcher.find() && matcher.end() == strm.length()) { - return true; - } - return false; - } catch (PatternSyntaxException pse) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE116", String.format("ValueError: Invalid regex \"%s\" in \"pattern\" property \"%s\"", - constraintValue.toString(), propertyName))); - return false; - } - } - - @Override - protected String errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" does not match the pattern \"%s\"", - value.toString(), propertyName, constraintValue.toString()); - } - -} - -/*python - -class Pattern(Constraint): - """Constraint class for "pattern" - - Constrains the property or parameter to a value that is allowed by - the provided regular expression. - """ - - constraint_key = Constraint.PATTERN - - valid_types = (str, ) - - valid_prop_types = (Schema.STRING, ) - - def __init__(self, property_name, property_type, constraint): - super(Pattern, self).__init__(property_name, property_type, constraint) - if not isinstance(self.constraint_value, self.valid_types): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "pattern" ' - 'expects a string.'))) - self.match = re.compile(self.constraint_value).match - - def _is_valid(self, value): - match = self.match(value) - return match is not None and match.end() == len(value) - - def _err_msg(self, value): - return (_('The value "%(pvalue)s" of property "%(pname)s" does not ' - 'match pattern "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=value, - cvalue=self.constraint_value)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java deleted file mode 100644 index 15ec597..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java +++ /dev/null @@ -1,309 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements.constraints; - -import com.google.common.collect.ImmutableMap; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.elements.enums.FileSize; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.Map; - - -public class Schema { - - private static final String TYPE = "type"; - private static final String REQUIRED = "required"; - private static final String DESCRIPTION = "description"; - private static final String DEFAULT = "default"; - private static final String CONSTRAINTS = "constraints"; - private static final String STATUS = "status"; - private static final String ENTRYSCHEMA = "entry_schema"; - private static final String[] KEYS = { - TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS}; - - public static final String INTEGER = "integer"; - public static final String STRING = "string"; - public static final String BOOLEAN = "boolean"; - public static final String FLOAT = "float"; - public static final String RANGE = "range"; - public static final String NUMBER = "number"; - public static final String TIMESTAMP = "timestamp"; - public static final String LIST = "list"; - public static final String MAP = "map"; - public static final String SCALAR_UNIT_SIZE = "scalar-unit.size"; - public static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency"; - public static final String SCALAR_UNIT_TIME = "scalar-unit.time"; - public static final String VERSION = "version"; - public static final String PORTDEF = "PortDef"; - public static final String PORTSPEC = "PortSpec"; //??? PortSpec.SHORTNAME - public static final String JSON = "json"; - - public static final String[] PROPERTY_TYPES = { - INTEGER, STRING, BOOLEAN, FLOAT, RANGE, NUMBER, TIMESTAMP, LIST, MAP, - SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, - VERSION, PORTDEF, PORTSPEC, JSON}; - - public static final String[] SIMPLE_PROPERTY_TYPES = { - INTEGER, STRING, BOOLEAN, FLOAT, RANGE, NUMBER, TIMESTAMP, - SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, - VERSION}; - - @SuppressWarnings("unused") - private static final String SCALAR_UNIT_SIZE_DEFAULT = "B"; - - private static Map scalarUnitSizeDict = ImmutableMap.builder() - .put("B", FileSize.B) - .put("KB", FileSize.KB) - .put("MB", FileSize.MB) - .put("GB", FileSize.GB) - .put("TB", FileSize.TB) - .put("KIB", FileSize.KIB) - .put("MIB", FileSize.MIB) - .put("GIB", FileSize.GIB) - .put("TIB", FileSize.TIB) - .build(); - - - private String name; - private LinkedHashMap schema; - private int len; - private ArrayList constraintsList; - - - public Schema(String name, LinkedHashMap schemaDict) { - this.name = name; - - if (!(schemaDict instanceof LinkedHashMap)) { - //msg = (_('Schema definition of "%(pname)s" must be a dict.') - // % dict(pname=name)) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE117", String.format( - "InvalidSchemaError: Schema definition of \"%s\" must be a dict", this.name))); - } - - if (schemaDict.get("type") == null) { - //msg = (_('Schema definition of "%(pname)s" must have a "type" ' - // 'attribute.') % dict(pname=name)) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE118", String.format( - "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute", this.name))); - } - - schema = schemaDict; - len = 0; //??? None - constraintsList = new ArrayList<>(); - } - - public String getType() { - return (String) schema.get(TYPE); - } - - public boolean isRequired() { - return (boolean) schema.getOrDefault(REQUIRED, true); - } - - public String getDescription() { - return (String) schema.getOrDefault(DESCRIPTION, ""); - } - - public Object getDefault() { - return schema.get(DEFAULT); - } - - public String getStatus() { - return (String) schema.getOrDefault(STATUS, ""); - } - - public static boolean isRequestedTypeSimple(String type) { - return Arrays.asList(SIMPLE_PROPERTY_TYPES).contains(type); - } - - @SuppressWarnings("unchecked") - public ArrayList getConstraints() { - if (constraintsList.size() == 0) { - Object cob = schema.get(CONSTRAINTS); - if (cob instanceof ArrayList) { - ArrayList constraintSchemata = (ArrayList) cob; - for (Object ob : constraintSchemata) { - if (ob instanceof LinkedHashMap) { - for (String cClass : ((LinkedHashMap) ob).keySet()) { - Constraint c = Constraint.factory(cClass, name, getType(), ob); - if (c != null) { - constraintsList.add(c); - } else { - // error - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE119", String.format( - "UnknownFieldError: Constraint type \"%s\" for property \"%s\" is not supported", - cClass, name))); - } - break; - } - } - } - } - } - return constraintsList; - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getEntrySchema() { - return (LinkedHashMap) schema.get(ENTRYSCHEMA); - } - - // Python intrinsic methods... - - // substitute for __getitem__ (aka self[key]) - public Object getItem(String key) { - return schema.get(key); - } - - /* - def __iter__(self): - for k in self.KEYS: - try: - self.schema[k] - except KeyError: - pass - else: - yield k - */ - - // substitute for __len__ (aka self.len()) - public int getLen() { - int len = 0; - for (String k : KEYS) { - if (schema.get(k) != null) { - len++; - } - this.len = len; - } - return this.len; - } - - // getter - public LinkedHashMap getSchema() { - return schema; - } - -} - -/*python - -class Schema(collections.Mapping): - -KEYS = ( - TYPE, REQUIRED, DESCRIPTION, - DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS -) = ( - 'type', 'required', 'description', - 'default', 'constraints', 'entry_schema', 'status' -) - -PROPERTY_TYPES = ( - INTEGER, STRING, BOOLEAN, FLOAT, RANGE, - NUMBER, TIMESTAMP, LIST, MAP, - SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME, - VERSION, PORTDEF, PORTSPEC -) = ( - 'integer', 'string', 'boolean', 'float', 'range', - 'number', 'timestamp', 'list', 'map', - 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time', - 'version', 'PortDef', PortSpec.SHORTNAME -) - -SCALAR_UNIT_SIZE_DEFAULT = 'B' -scalarUnitSizeDict = {'B': 1, 'KB': 1000, 'KIB': 1024, 'MB': 1000000, - 'MIB': 1048576, 'GB': 1000000000, - 'GIB': 1073741824, 'TB': 1000000000000, - 'TIB': 1099511627776} - -def __init__(self, name, schema_dict): - self.name = name - if not isinstance(schema_dict, collections.Mapping): - msg = (_('Schema definition of "%(pname)s" must be a dict.') - % dict(pname=name)) - ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) - - try: - schema_dict['type'] - except KeyError: - msg = (_('Schema definition of "%(pname)s" must have a "type" ' - 'attribute.') % dict(pname=name)) - ValidationIssueCollector.appendException(InvalidSchemaError(message=msg)) - - self.schema = schema_dict - self.len = None - self.constraints_list = [] - -@property -def type(self): - return self.schema[self.TYPE] - -@property -def required(self): - return self.schema.get(self.REQUIRED, True) - -@property -def description(self): - return self.schema.get(self.DESCRIPTION, '') - -@property -def default(self): - return self.schema.get(self.DEFAULT) - -@property -def status(self): - return self.schema.get(self.STATUS, '') - -@property -def constraints(self): - if not self.constraints_list: - constraint_schemata = self.schema.get(self.CONSTRAINTS) - if constraint_schemata: - self.constraints_list = [Constraint(self.name, - self.type, - cschema) - for cschema in constraint_schemata] - return self.constraints_list - -@property -def entry_schema(self): - return self.schema.get(self.ENTRYSCHEMA) - -def __getitem__(self, key): - return self.schema[key] - -def __iter__(self): - for k in self.KEYS: - try: - self.schema[k] - except KeyError: - pass - else: - yield k - -def __len__(self): - if self.len is None: - self.len = len(list(iter(self))) - return self.len -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java deleted file mode 100644 index c3a192d..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java +++ /dev/null @@ -1,99 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements.constraints; - -import java.util.ArrayList; -import java.util.Collections; - -public class ValidValues extends Constraint { - - - protected void setValues() { - setConstraintKey(VALID_VALUES); - Collections.addAll(validPropTypes, Schema.PROPERTY_TYPES); - } - - - public ValidValues(String name, String type, Object c) { - super(name, type, c); - } - - @SuppressWarnings("unchecked") - protected boolean isValid(Object val) { - if (!(constraintValue instanceof ArrayList)) { - return false; - } - if (val instanceof ArrayList) { - boolean bAll = true; - for (Object v : (ArrayList) val) { - if (!((ArrayList) constraintValue).contains(v)) { - bAll = false; - break; - } - } - return bAll; - } - return ((ArrayList) constraintValue).contains(val); - } - - protected String errMsg(Object value) { - return String.format("The value \"%s\" of property \"%s\" is not valid. Expected a value from \"%s\"", - value.toString(), propertyName, constraintValue.toString()); - } - -} - -/*python - -class ValidValues(Constraint): -"""Constraint class for "valid_values" - -Constrains a property or parameter to a value that is in the list of -declared values. -""" -constraint_key = Constraint.VALID_VALUES - -valid_prop_types = Schema.PROPERTY_TYPES - -def __init__(self, property_name, property_type, constraint): - super(ValidValues, self).__init__(property_name, property_type, - constraint) - if not isinstance(self.constraint_value, collections.Sequence): - ValidationIsshueCollector.appendException( - InvalidSchemaError(message=_('The property "valid_values" ' - 'expects a list.'))) - -def _is_valid(self, value): - print '*** payton parser validating ',value,' in ',self.constraint_value#GGG - if isinstance(value, list): - return all(v in self.constraint_value for v in value) - return value in self.constraint_value - -def _err_msg(self, value): - allowed = '[%s]' % ', '.join(str(a) for a in self.constraint_value) - return (_('The value "%(pvalue)s" of property "%(pname)s" is not ' - 'valid. Expected a value from "%(cvalue)s".') % - dict(pname=self.property_name, - pvalue=value, - cvalue=allowed)) - - -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java deleted file mode 100644 index b07f7fa..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java +++ /dev/null @@ -1,32 +0,0 @@ -/* -============LICENSE_START======================================================= - SDC - ================================================================================ - Copyright (C) 2019 Nokia. All rights reserved. - ================================================================================ - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - ============LICENSE_END========================================================= -*/ -package org.onap.sdc.toscaparser.api.elements.enums; - -public class FileSize { - public static final long B = 1L; - public static final long KB = 1000L; - public static final long MB = 1000000L; - public static final long GB = 1000000000L; - public static final long TB = 1000000000000L; - public static final long KIB = 1000L; - public static final long MIB = 1048576L; - public static final long GIB = 1073741824L; - public static final long TIB = 1099511627776L; -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java b/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java deleted file mode 100644 index ac0d837..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java +++ /dev/null @@ -1,40 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements.enums; - -public enum ToscaElementNames { - - TYPE("type"), - PROPERTIES("properties"), - ANNOTATIONS("annotations"), - SOURCE_TYPE("source_type"); - - private String name; - - ToscaElementNames(String name) { - this.name = name; - } - - public String getName() { - return name; - } - -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java b/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java deleted file mode 100644 index 5fbfca0..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java +++ /dev/null @@ -1,204 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.extensions; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.reflections.Reflections; -import org.reflections.scanners.ResourcesScanner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Set; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class ExtTools { - - private static Logger log = LoggerFactory.getLogger(ExtTools.class.getName()); - - private static LinkedHashMap extensionInfo = new LinkedHashMap<>(); - - public ExtTools() { - extensionInfo = loadExtensions(); - } - - private LinkedHashMap loadExtensions() { - - LinkedHashMap extensions = new LinkedHashMap<>(); - - Reflections reflections = new Reflections("extensions", new ResourcesScanner()); - Set resourcePaths = reflections.getResources(Pattern.compile(".*\\.py$")); - - for (String resourcePath : resourcePaths) { - try (InputStream is = ExtTools.class.getClassLoader().getResourceAsStream(resourcePath); - InputStreamReader isr = new InputStreamReader(is, Charset.forName("UTF-8")); - BufferedReader br = new BufferedReader(isr);) { - String version = null; - ArrayList sections = null; - String defsFile = null; - String line; - - Pattern pattern = Pattern.compile("^([^#]\\S+)\\s*=\\s*(\\S.*)$"); - while ((line = br.readLine()) != null) { - line = line.replace("'", "\""); - Matcher matcher = pattern.matcher(line); - if (matcher.find()) { - if (matcher.group(1).equals("VERSION")) { - version = matcher.group(2); - if (version.startsWith("'") || version.startsWith("\"")) { - version = version.substring(1, version.length() - 1); - } - } else if (matcher.group(1).equals("DEFS_FILE")) { - String fn = matcher.group(2); - if (fn.startsWith("'") || fn.startsWith("\"")) { - fn = fn.substring(1, fn.length() - 1); - } - defsFile = resourcePath.replaceFirst("\\w*.py$", fn); - } else if (matcher.group(1).equals("SECTIONS")) { - sections = new ArrayList<>(); - Pattern secpat = Pattern.compile("\"([^\"]+)\""); - Matcher secmat = secpat.matcher(matcher.group(2)); - while (secmat.find()) { - sections.add(secmat.group(1)); - } - } - } - } - - if (version != null && defsFile != null) { - LinkedHashMap ext = new LinkedHashMap<>(); - ext.put("defs_file", defsFile); - if (sections != null) { - ext.put("sections", sections); - } - extensions.put(version, ext); - } - } catch (Exception e) { - log.error("ExtTools - loadExtensions - {}", e); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue( - "JE281", "Failed to load extensions" + e.getMessage())); - } - } - return extensions; - } - - public ArrayList getVersions() { - return new ArrayList(extensionInfo.keySet()); - } - - public LinkedHashMap> getSections() { - LinkedHashMap> sections = new LinkedHashMap<>(); - for (String version : extensionInfo.keySet()) { - LinkedHashMap eiv = (LinkedHashMap) extensionInfo.get(version); - sections.put(version, (ArrayList) eiv.get("sections")); - } - return sections; - } - - public String getDefsFile(String version) { - LinkedHashMap eiv = (LinkedHashMap) extensionInfo.get(version); - return (String) eiv.get("defs_file"); - } - -} - -/*python - -from toscaparser.common.exception import ToscaExtAttributeError -from toscaparser.common.exception import ToscaExtImportError - -log = logging.getLogger("tosca.model") - -REQUIRED_ATTRIBUTES = ['VERSION', 'DEFS_FILE'] - - -class ExtTools(object): - def __init__(self): - self.extensionInfo = self._load_extensions() - - def _load_extensions(self): - '''Dynamically load all the extensions .''' - extensions = {} - - # Use the absolute path of the class path - abs_path = os.path.dirname(os.path.abspath(__file__)) - - extdirs = [e for e in os.listdir(abs_path) if - not e.startswith('tests') and - os.path.isdir(os.path.join(abs_path, e))] - - for e in extdirs: - log.info(e) - extpath = abs_path + '/' + e - # Grab all the extension files in the given path - ext_files = [f for f in os.listdir(extpath) if f.endswith('.py') - and not f.startswith('__init__')] - - # For each module, pick out the target translation class - for f in ext_files: - log.info(f) - ext_name = 'toscaparser/extensions/' + e + '/' + f.strip('.py') - ext_name = ext_name.replace('/', '.') - try: - extinfo = importlib.import_module(ext_name) - version = getattr(extinfo, 'VERSION') - defs_file = extpath + '/' + getattr(extinfo, 'DEFS_FILE') - - # Sections is an optional attribute - sections = getattr(extinfo, 'SECTIONS', ()) - - extensions[version] = {'sections': sections, - 'defs_file': defs_file} - except ImportError: - raise ToscaExtImportError(ext_name=ext_name) - except AttributeError: - attrs = ', '.join(REQUIRED_ATTRIBUTES) - raise ToscaExtAttributeError(ext_name=ext_name, - attrs=attrs) - - print 'Extensions ',extensions#GGG - return extensions - - def get_versions(self): - return self.extensionInfo.keys() - - def get_sections(self): - sections = {} - for version in self.extensionInfo.keys(): - sections[version] = self.extensionInfo[version]['sections'] - - return sections - - def get_defs_file(self, version): - versiondata = self.extensionInfo.get(version) - - if versiondata: - return versiondata.get('defs_file') - else: - return None -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java deleted file mode 100644 index 4ebeba9..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java +++ /dev/null @@ -1,97 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.functions; - -import org.onap.sdc.toscaparser.api.TopologyTemplate; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; - -public class Concat extends Function { - // Validate the function and provide an instance of the function - - // Concatenation of values are supposed to be produced at runtime and - // therefore its the responsibility of the TOSCA engine to implement the - // evaluation of Concat functions. - - // Arguments: - - // * List of strings that needs to be concatenated - - // Example: - - // [ 'http://', - // get_attribute: [ server, public_address ], - // ':' , - // get_attribute: [ server, port ] ] - - - public Concat(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl, context, name, args); - } - - @Override - public Object result() { - return this; - } - - @Override - void validate() { - if (args.size() < 1) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE145", - "ValueError: Invalid arguments for function \"concat\". " + - "Expected at least one argument")); - } - } - -} - -/*python - -class Concat(Function): -"""Validate the function and provide an instance of the function - -Concatenation of values are supposed to be produced at runtime and -therefore its the responsibility of the TOSCA engine to implement the -evaluation of Concat functions. - -Arguments: - -* List of strings that needs to be concatenated - -Example: - - [ 'http://', - get_attribute: [ server, public_address ], - ':' , - get_attribute: [ server, port ] ] -""" - -def validate(self): - if len(self.args) < 1: - ValidationIsshueCollector.appendException( - ValueError(_('Invalid arguments for function "{0}". Expected ' - 'at least one arguments.').format(CONCAT))) - -def result(self): - return self -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java deleted file mode 100644 index 711a7ca..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java +++ /dev/null @@ -1,259 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.functions; - - -import org.onap.sdc.toscaparser.api.TopologyTemplate; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; - -public abstract class Function { - - protected static final String GET_PROPERTY = "get_property"; - protected static final String GET_ATTRIBUTE = "get_attribute"; - protected static final String GET_INPUT = "get_input"; - protected static final String GET_OPERATION_OUTPUT = "get_operation_output"; - protected static final String CONCAT = "concat"; - protected static final String TOKEN = "token"; - - protected static final String SELF = "SELF"; - protected static final String HOST = "HOST"; - protected static final String TARGET = "TARGET"; - protected static final String SOURCE = "SOURCE"; - - protected static final String HOSTED_ON = "tosca.relationships.HostedOn"; - - protected static HashMap functionMappings = _getFunctionMappings(); - - private static HashMap _getFunctionMappings() { - HashMap map = new HashMap<>(); - map.put(GET_PROPERTY, "GetProperty"); - map.put(GET_INPUT, "GetInput"); - map.put(GET_ATTRIBUTE, "GetAttribute"); - map.put(GET_OPERATION_OUTPUT, "GetOperationOutput"); - map.put(CONCAT, "Concat"); - map.put(TOKEN, "Token"); - return map; - } - - protected TopologyTemplate toscaTpl; - protected Object context; - protected String name; - protected ArrayList args; - - - public Function(TopologyTemplate _toscaTpl, Object _context, String _name, ArrayList _args) { - toscaTpl = _toscaTpl; - context = _context; - name = _name; - args = _args; - validate(); - - } - - abstract Object result(); - - abstract void validate(); - - @SuppressWarnings("unchecked") - public static boolean isFunction(Object funcObj) { - // Returns True if the provided function is a Tosca intrinsic function. - // - //Examples: - // - //* "{ get_property: { SELF, port } }" - //* "{ get_input: db_name }" - //* Function instance - - //:param function: Function as string or a Function instance. - //:return: True if function is a Tosca intrinsic function, otherwise False. - // - - if (funcObj instanceof LinkedHashMap) { - LinkedHashMap function = (LinkedHashMap) funcObj; - if (function.size() == 1) { - String funcName = (new ArrayList(function.keySet())).get(0); - return functionMappings.keySet().contains(funcName); - } - } - return (funcObj instanceof Function); - } - - @SuppressWarnings("unchecked") - public static Object getFunction(TopologyTemplate ttpl, Object context, Object rawFunctionObj, boolean resolveGetInput) { - // Gets a Function instance representing the provided template function. - - // If the format provided raw_function format is not relevant for template - // functions or if the function name doesn't exist in function mapping the - // method returns the provided raw_function. - // - // :param tosca_tpl: The tosca template. - // :param node_template: The node template the function is specified for. - // :param raw_function: The raw function as dict. - // :return: Template function as Function instance or the raw_function if - // parsing was unsuccessful. - - - // iterate over leaves of the properties's tree and convert function leaves to function object, - // support List and Map nested, - // assuming that leaf value of function is always map type contains 1 item (e.g. my_leaf: {get_input: xxx}). - - if (rawFunctionObj instanceof LinkedHashMap) { // In map type case - LinkedHashMap rawFunction = ((LinkedHashMap) rawFunctionObj); - if (rawFunction.size() == 1 && - !(rawFunction.values().iterator().next() instanceof LinkedHashMap)) { // End point - return getFunctionForObjectItem(ttpl, context, rawFunction, resolveGetInput); - } else { - return getFunctionForMap(ttpl, context, rawFunction, resolveGetInput); - } - } else if (rawFunctionObj instanceof ArrayList) { // In list type case - return getFunctionForList(ttpl, context, (ArrayList) rawFunctionObj, resolveGetInput); - } - - return rawFunctionObj; - } - - private static Object getFunctionForList(TopologyTemplate ttpl, Object context, ArrayList rawFunctionObj, boolean resolveGetInput) { - // iterate over list properties in recursion, convert leaves to function, - // and collect them in the same hierarchy as the original list. - ArrayList rawFunctionObjList = new ArrayList<>(); - for (Object rawFunctionObjItem : rawFunctionObj) { - rawFunctionObjList.add(getFunction(ttpl, context, rawFunctionObjItem, resolveGetInput)); - } - return rawFunctionObjList; - } - - private static Object getFunctionForMap(TopologyTemplate ttpl, Object context, LinkedHashMap rawFunction, boolean resolveGetInput) { - // iterate over map nested properties in recursion, convert leaves to function, - // and collect them in the same hierarchy as the original map. - LinkedHashMap rawFunctionObjMap = new LinkedHashMap(); - for (Object rawFunctionObjItem : rawFunction.entrySet()) { - Object itemValue = getFunction(ttpl, context, ((Map.Entry) rawFunctionObjItem).getValue(), resolveGetInput); - rawFunctionObjMap.put(((Map.Entry) rawFunctionObjItem).getKey(), itemValue); - } - return rawFunctionObjMap; - } - - private static Object getFunctionForObjectItem(TopologyTemplate ttpl, Object context, Object rawFunctionObjItem, boolean resolveGetInput) { - if (isFunction(rawFunctionObjItem)) { - LinkedHashMap rawFunction = (LinkedHashMap) rawFunctionObjItem; - String funcName = (new ArrayList(rawFunction.keySet())).get(0); - if (functionMappings.keySet().contains(funcName)) { - String funcType = functionMappings.get(funcName); - Object oargs = (new ArrayList(rawFunction.values())).get(0); - ArrayList funcArgs; - if (oargs instanceof ArrayList) { - funcArgs = (ArrayList) oargs; - } else { - funcArgs = new ArrayList<>(); - funcArgs.add(oargs); - } - - switch (funcType) { - case "GetInput": - if (resolveGetInput) { - GetInput input = new GetInput(ttpl, context, funcName, funcArgs); - return input.result(); - } - return new GetInput(ttpl, context, funcName, funcArgs); - case "GetAttribute": - return new GetAttribute(ttpl, context, funcName, funcArgs); - case "GetProperty": - return new GetProperty(ttpl, context, funcName, funcArgs); - case "GetOperationOutput": - return new GetOperationOutput(ttpl, context, funcName, funcArgs); - case "Concat": - return new Concat(ttpl, context, funcName, funcArgs); - case "Token": - return new Token(ttpl, context, funcName, funcArgs); - } - } - } - - return rawFunctionObjItem; - } - - @Override - public String toString() { - String argsStr = args.size() > 1 ? args.toString() : args.get(0).toString(); - return name + ":" + argsStr; - } -} - -/*python - -from toscaparser.common.exception import ValidationIsshueCollector -from toscaparser.common.exception import UnknownInputError -from toscaparser.dataentity import DataEntity -from toscaparser.elements.constraints import Schema -from toscaparser.elements.datatype import DataType -from toscaparser.elements.entity_type import EntityType -from toscaparser.elements.relationshiptype import RelationshipType -from toscaparser.elements.statefulentitytype import StatefulEntityType -from toscaparser.utils.gettextutils import _ - - -GET_PROPERTY = 'get_property' -GET_ATTRIBUTE = 'get_attribute' -GET_INPUT = 'get_input' -GET_OPERATION_OUTPUT = 'get_operation_output' -CONCAT = 'concat' -TOKEN = 'token' - -SELF = 'SELF' -HOST = 'HOST' -TARGET = 'TARGET' -SOURCE = 'SOURCE' - -HOSTED_ON = 'tosca.relationships.HostedOn' - - -@six.add_metaclass(abc.ABCMeta) -class Function(object): - """An abstract type for representing a Tosca template function.""" - - def __init__(self, tosca_tpl, context, name, args): - self.tosca_tpl = tosca_tpl - self.context = context - self.name = name - self.args = args - self.validate() - - @abc.abstractmethod - def result(self): - """Invokes the function and returns its result - - Some methods invocation may only be relevant on runtime (for example, - getting runtime properties) and therefore its the responsibility of - the orchestrator/translator to take care of such functions invocation. - - :return: Function invocation result. - """ - return {self.name: self.args} - - @abc.abstractmethod - def validate(self): - """Validates function arguments.""" - pass -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java deleted file mode 100644 index 564d410..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java +++ /dev/null @@ -1,544 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.functions; - -import org.onap.sdc.toscaparser.api.*; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -import org.onap.sdc.toscaparser.api.*; -import org.onap.sdc.toscaparser.api.elements.AttributeDef; -import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef; -import org.onap.sdc.toscaparser.api.elements.DataType; -import org.onap.sdc.toscaparser.api.elements.EntityType; -import org.onap.sdc.toscaparser.api.elements.NodeType; -import org.onap.sdc.toscaparser.api.elements.PropertyDef; -import org.onap.sdc.toscaparser.api.elements.RelationshipType; -import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; -import org.onap.sdc.toscaparser.api.elements.constraints.Schema; - -public class GetAttribute extends Function { - // Get an attribute value of an entity defined in the service template - - // Node template attributes values are set in runtime and therefore its the - // responsibility of the Tosca engine to implement the evaluation of - // get_attribute functions. - - // Arguments: - - // * Node template name | HOST. - // * Attribute name. - - // If the HOST keyword is passed as the node template name argument the - // function will search each node template along the HostedOn relationship - // chain until a node which contains the attribute is found. - - // Examples: - - // * { get_attribute: [ server, private_address ] } - // * { get_attribute: [ HOST, private_address ] } - // * { get_attribute: [ HOST, private_address, 0 ] } - // * { get_attribute: [ HOST, private_address, 0, some_prop] } - - public GetAttribute(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl, context, name, args); - } - - @Override - void validate() { - if (args.size() < 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", - "ValueError: Illegal arguments for function \"get_attribute\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); - return; - } else if (args.size() == 2) { - _findNodeTemplateContainingAttribute(); - } else { - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - if (nodeTpl == null) { - return; - } - int index = 2; - AttributeDef attr = nodeTpl.getTypeDefinition().getAttributeDefValue((String) args.get(1)); - if (attr != null) { - // found - } else { - index = 3; - // then check the req or caps - if (!(args.get(1) instanceof String) || !(args.get(2) instanceof String)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", "ValueError: Illegal arguments for function \"get_attribute\". Expected a String argument")); - } - - attr = _findReqOrCapAttribute(args.get(1).toString(), args.get(2).toString()); - if (attr == null) { - return; - } - } - - - String valueType = (String) attr.getSchema().get("type"); - if (args.size() > index) { - for (Object elem : args.subList(index, args.size())) { - if (valueType.equals("list")) { - if (!(elem instanceof Integer)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE147", String.format( - "ValueError: Illegal arguments for function \"get_attribute\" \"%s\". Expected positive integer argument", - elem.toString()))); - } - Object ob = attr.getSchema().get("entry_schema"); - valueType = (String) - ((LinkedHashMap) ob).get("type"); - } else if (valueType.equals("map")) { - Object ob = attr.getSchema().get("entry_schema"); - valueType = (String) - ((LinkedHashMap) ob).get("type"); - } else { - boolean bFound = false; - for (String p : Schema.PROPERTY_TYPES) { - if (p.equals(valueType)) { - bFound = true; - break; - } - } - if (bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE148", String.format( - "ValueError: 'Illegal arguments for function \"get_attribute\". Unexpected attribute/index value \"%s\"", - elem))); - return; - } else { // It is a complex type - DataType dataType = new DataType(valueType, null); - LinkedHashMap props = - dataType.getAllProperties(); - PropertyDef prop = props.get((String) elem); - if (prop != null) { - valueType = (String) prop.getSchema().get("type"); - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE149", String.format( - "KeyError: Illegal arguments for function \"get_attribute\". Attribute name \"%s\" not found in \"%\"", - elem, valueType))); - } - } - } - } - } - } - } - - @Override - public Object result() { - return this; - } - - private NodeTemplate getReferencedNodeTemplate() { - // Gets the NodeTemplate instance the get_attribute function refers to - - // If HOST keyword was used as the node template argument, the node - // template which contains the attribute along the HostedOn relationship - // chain will be returned. - - return _findNodeTemplateContainingAttribute(); - - } - - // Attributes can be explicitly created as part of the type definition - // or a property name can be implicitly used as an attribute name - private NodeTemplate _findNodeTemplateContainingAttribute() { - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - if (nodeTpl != null && - !_attributeExistsInType(nodeTpl.getTypeDefinition()) && - !nodeTpl.getProperties().keySet().contains(getAttributeName())) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE150", String.format( - "KeyError: Attribute \"%s\" was not found in node template \"%s\"", - getAttributeName(), nodeTpl.getName()))); - } - return nodeTpl; - } - - private boolean _attributeExistsInType(StatefulEntityType typeDefinition) { - LinkedHashMap attrsDef = typeDefinition.getAttributesDef(); - return attrsDef.get(getAttributeName()) != null; - } - - private NodeTemplate _findHostContainingAttribute(String nodeTemplateName) { - NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); - if (nodeTemplate != null) { - LinkedHashMap hostedOnRel = - (LinkedHashMap) EntityType.TOSCA_DEF.get(HOSTED_ON); - for (RequirementAssignment r : nodeTemplate.getRequirements().getAll()) { - String targetName = r.getNodeTemplateName(); - NodeTemplate targetNode = _findNodeTemplate(targetName); - NodeType targetType = (NodeType) targetNode.getTypeDefinition(); - for (CapabilityTypeDef capability : targetType.getCapabilitiesObjects()) { -// if(((ArrayList)hostedOnRel.get("valid_target_types")).contains(capability.getType())) { - if (capability.inheritsFrom((ArrayList) hostedOnRel.get("valid_target_types"))) { - if (_attributeExistsInType(targetType)) { - return targetNode; - } - return _findHostContainingAttribute(targetName); - } - } - } - } - return null; - } - - - private NodeTemplate _findNodeTemplate(String nodeTemplateName) { - if (nodeTemplateName.equals(HOST)) { - // Currently this is the only way to tell whether the function - // is used within the outputs section of the TOSCA template. - if (context instanceof ArrayList) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE151", - "ValueError: \"get_attribute: [ HOST, ... ]\" is not allowed in \"outputs\" section of the TOSCA template")); - return null; - } - NodeTemplate nodeTpl = _findHostContainingAttribute(SELF); - if (nodeTpl == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE152", String.format( - "ValueError: \"get_attribute: [ HOST, ... ]\" was used in " + - "node template \"%s\" but \"%s\" was not found in " + - "the relationship chain", ((NodeTemplate) context).getName(), HOSTED_ON))); - return null; - } - return nodeTpl; - } - if (nodeTemplateName.equals(TARGET)) { - if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE153", - "KeyError: \"TARGET\" keyword can only be used in context " + - " to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate) context).getTarget(); - } - if (nodeTemplateName.equals(SOURCE)) { - if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE154", - "KeyError: \"SOURCE\" keyword can only be used in context " + - " to \"Relationships\" source node")); - return null; - } - return ((RelationshipTemplate) context).getTarget(); - } - String name; - if (nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { - name = ((NodeTemplate) context).getName(); - } else { - name = nodeTemplateName; - } - for (NodeTemplate nt : toscaTpl.getNodeTemplates()) { - if (nt.getName().equals(name)) { - return nt; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE155", String.format( - "KeyError: Node template \"%s\" was not found", nodeTemplateName))); - return null; - } - - public AttributeDef _findReqOrCapAttribute(String reqOrCap, String attrName) { - - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - // Find attribute in node template's requirements - for (RequirementAssignment r : nodeTpl.getRequirements().getAll()) { - String nodeName = r.getNodeTemplateName(); - if (r.getName().equals(reqOrCap)) { - NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); - return _getCapabilityAttribute(nodeTemplate, r.getName(), attrName); - } - } - // If requirement was not found, look in node template's capabilities - return _getCapabilityAttribute(nodeTpl, reqOrCap, attrName); - } - - private AttributeDef _getCapabilityAttribute(NodeTemplate nodeTemplate, - String capabilityName, - String attrName) { - // Gets a node template capability attribute - CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); - - if (cap != null) { - AttributeDef attribute = null; - LinkedHashMap attrs = - cap.getDefinition().getAttributesDef(); - if (attrs != null && attrs.keySet().contains(attrName)) { - attribute = attrs.get(attrName); - } - if (attribute == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE156", String.format( - "KeyError: Attribute \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - attrName, capabilityName, nodeTemplate.getName(), ((NodeTemplate) context).getName()))); - } - return attribute; - } - String msg = String.format( - "Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", - capabilityName, ((NodeTemplate) context).getName(), nodeTemplate.getName()); - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE157", "KeyError: " + msg)); - return null; - } - - String getNodeTemplateName() { - return (String) args.get(0); - } - - String getAttributeName() { - return (String) args.get(1); - } - -} - -/*python - -class GetAttribute(Function): -"""Get an attribute value of an entity defined in the service template - -Node template attributes values are set in runtime and therefore its the -responsibility of the Tosca engine to implement the evaluation of -get_attribute functions. - -Arguments: - -* Node template name | HOST. -* Attribute name. - -If the HOST keyword is passed as the node template name argument the -function will search each node template along the HostedOn relationship -chain until a node which contains the attribute is found. - -Examples: - -* { get_attribute: [ server, private_address ] } -* { get_attribute: [ HOST, private_address ] } -* { get_attribute: [ HOST, private_address, 0 ] } -* { get_attribute: [ HOST, private_address, 0, some_prop] } -""" - -def validate(self): - if len(self.args) < 2: - ValidationIssueCollector.appendException( - ValueError(_('Illegal arguments for function "{0}". Expected ' - 'arguments: "node-template-name", "req-or-cap"' - '(optional), "property name"' - ).format(GET_ATTRIBUTE))) - return - elif len(self.args) == 2: - self._find_node_template_containing_attribute() - else: - node_tpl = self._find_node_template(self.args[0]) - if node_tpl is None: - return - index = 2 - attrs = node_tpl.type_definition.get_attributes_def() - found = [attrs[self.args[1]]] if self.args[1] in attrs else [] - if found: - attr = found[0] - else: - index = 3 - # then check the req or caps - attr = self._find_req_or_cap_attribute(self.args[1], - self.args[2]) - - value_type = attr.schema['type'] - if len(self.args) > index: - for elem in self.args[index:]: - if value_type == "list": - if not isinstance(elem, int): - ValidationIssueCollector.appendException( - ValueError(_('Illegal arguments for function' - ' "{0}". "{1}" Expected positive' - ' integer argument' - ).format(GET_ATTRIBUTE, elem))) - value_type = attr.schema['entry_schema']['type'] - elif value_type == "map": - value_type = attr.schema['entry_schema']['type'] - elif value_type in Schema.PROPERTY_TYPES: - ValidationIssueCollector.appendException( - ValueError(_('Illegal arguments for function' - ' "{0}". Unexpected attribute/' - 'index value "{1}"' - ).format(GET_ATTRIBUTE, elem))) - return - else: # It is a complex type - data_type = DataType(value_type) - props = data_type.get_all_properties() - found = [props[elem]] if elem in props else [] - if found: - prop = found[0] - value_type = prop.schema['type'] - else: - ValidationIssueCollector.appendException( - KeyError(_('Illegal arguments for function' - ' "{0}". Attribute name "{1}" not' - ' found in "{2}"' - ).format(GET_ATTRIBUTE, - elem, - value_type))) - -def result(self): - return self - -def get_referenced_node_template(self): - """Gets the NodeTemplate instance the get_attribute function refers to. - - If HOST keyword was used as the node template argument, the node - template which contains the attribute along the HostedOn relationship - chain will be returned. - """ - return self._find_node_template_containing_attribute() - -# Attributes can be explicitly created as part of the type definition -# or a property name can be implicitly used as an attribute name -def _find_node_template_containing_attribute(self): - node_tpl = self._find_node_template(self.args[0]) - if node_tpl and \ - not self._attribute_exists_in_type(node_tpl.type_definition) \ - and self.attribute_name not in node_tpl.get_properties(): - ValidationIssueCollector.appendException( - KeyError(_('Attribute "%(att)s" was not found in node ' - 'template "%(ntpl)s".') % - {'att': self.attribute_name, - 'ntpl': node_tpl.name})) - return node_tpl - -def _attribute_exists_in_type(self, type_definition): - attrs_def = type_definition.get_attributes_def() - found = [attrs_def[self.attribute_name]] \ - if self.attribute_name in attrs_def else [] - return len(found) == 1 - -def _find_host_containing_attribute(self, node_template_name=SELF): - node_template = self._find_node_template(node_template_name) - if node_template: - hosted_on_rel = EntityType.TOSCA_DEF[HOSTED_ON] - for r in node_template.requirements: - for requirement, target_name in r.items(): - target_node = self._find_node_template(target_name) - target_type = target_node.type_definition - for capability in target_type.get_capabilities_objects(): - if capability.type in \ - hosted_on_rel['valid_target_types']: - if self._attribute_exists_in_type(target_type): - return target_node - return self._find_host_containing_attribute( - target_name) - -def _find_node_template(self, node_template_name): - if node_template_name == HOST: - # Currently this is the only way to tell whether the function - # is used within the outputs section of the TOSCA template. - if isinstance(self.context, list): - ValidationIssueCollector.appendException( - ValueError(_( - '"get_attribute: [ HOST, ... ]" is not allowed in ' - '"outputs" section of the TOSCA template.'))) - return - node_tpl = self._find_host_containing_attribute() - if not node_tpl: - ValidationIssueCollector.appendException( - ValueError(_( - '"get_attribute: [ HOST, ... ]" was used in node ' - 'template "{0}" but "{1}" was not found in ' - 'the relationship chain.').format(self.context.name, - HOSTED_ON))) - return - return node_tpl - if node_template_name == TARGET: - if not isinstance(self.context.type_definition, RelationshipType): - ValidationIssueCollector.appendException( - KeyError(_('"TARGET" keyword can only be used in context' - ' to "Relationships" target node'))) - return - return self.context.target - if node_template_name == SOURCE: - if not isinstance(self.context.type_definition, RelationshipType): - ValidationIssueCollector.appendException( - KeyError(_('"SOURCE" keyword can only be used in context' - ' to "Relationships" source node'))) - return - return self.context.source - name = self.context.name \ - if node_template_name == SELF and \ - not isinstance(self.context, list) \ - else node_template_name - for node_template in self.tosca_tpl.nodetemplates: - if node_template.name == name: - return node_template - ValidationIssueCollector.appendException( - KeyError(_( - 'Node template "{0}" was not found.' - ).format(node_template_name))) - -def _find_req_or_cap_attribute(self, req_or_cap, attr_name): - node_tpl = self._find_node_template(self.args[0]) - # Find attribute in node template's requirements - for r in node_tpl.requirements: - for req, node_name in r.items(): - if req == req_or_cap: - node_template = self._find_node_template(node_name) - return self._get_capability_attribute( - node_template, - req, - attr_name) - # If requirement was not found, look in node template's capabilities - return self._get_capability_attribute(node_tpl, - req_or_cap, - attr_name) - -def _get_capability_attribute(self, - node_template, - capability_name, - attr_name): - """Gets a node template capability attribute.""" - caps = node_template.get_capabilities() - if caps and capability_name in caps.keys(): - cap = caps[capability_name] - attribute = None - attrs = cap.definition.get_attributes_def() - if attrs and attr_name in attrs.keys(): - attribute = attrs[attr_name] - if not attribute: - ValidationIssueCollector.appendException( - KeyError(_('Attribute "%(attr)s" was not found in ' - 'capability "%(cap)s" of node template ' - '"%(ntpl1)s" referenced from node template ' - '"%(ntpl2)s".') % {'attr': attr_name, - 'cap': capability_name, - 'ntpl1': node_template.name, - 'ntpl2': self.context.name})) - return attribute - msg = _('Requirement/CapabilityAssignment "{0}" referenced from node template ' - '"{1}" was not found in node template "{2}".').format( - capability_name, - self.context.name, - node_template.name) - ValidationIssueCollector.appendException(KeyError(msg)) - -@property -def node_template_name(self): - return self.args[0] - -@property -def attribute_name(self): - return self.args[1] -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java deleted file mode 100644 index ee5be17..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java +++ /dev/null @@ -1,203 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (c) 2017 AT&T Intellectual Property. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * Modifications copyright (c) 2019 Fujitsu Limited. - * ================================================================================ - */ -package org.onap.sdc.toscaparser.api.functions; - -import org.onap.sdc.toscaparser.api.DataEntity; -import org.onap.sdc.toscaparser.api.TopologyTemplate; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.onap.sdc.toscaparser.api.parameters.Input; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -public class GetInput extends Function { - - public static final String INDEX = "INDEX"; - public static final String INPUTS = "inputs"; - public static final String TYPE = "type"; - public static final String PROPERTIES = "properties"; - public static final String ENTRY_SCHEMA = "entry_schema"; - - public GetInput(TopologyTemplate toscaTpl, Object context, String name, ArrayList _args) { - super(toscaTpl, context, name, _args); - - } - - @Override - void validate() { - -// if(args.size() != 1) { -// //PA - changed to WARNING from CRITICAL after talking to Renana, 22/05/2017 -// ThreadLocalsHolder.getCollector().appendWarning(String.format( -// "ValueError: Expected one argument for function \"get_input\" but received \"%s\"", -// args.toString())); -// } - boolean bFound = false; - for (Input inp : toscaTpl.getInputs()) { - if (inp.getName().equals(args.get(0))) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE158", String.format( - "UnknownInputError: Unknown input \"%s\"", args.get(0)))); - } else if (args.size() > 2) { - LinkedHashMap inputs = (LinkedHashMap) toscaTpl.getTpl().get(INPUTS); - LinkedHashMap data = (LinkedHashMap) inputs.get(getInputName()); - String type; - - for (int argumentNumber = 1; argumentNumber < args.size(); argumentNumber++) { - String dataTypeName = ""; - bFound = false; - if (INDEX.equals(args.get(argumentNumber).toString()) || (args.get(argumentNumber) instanceof Integer)) { - bFound = true; - } else { - type = (String) data.get(TYPE); - //get type name - if (type.equals("list") || type.equals("map")) { - LinkedHashMap schema = (LinkedHashMap) data.get(ENTRY_SCHEMA); - dataTypeName = (String) schema.get(TYPE); - } else { - dataTypeName = type; - } - //check property name - LinkedHashMap dataType = (LinkedHashMap) toscaTpl.getCustomDefs().get(dataTypeName); - if (dataType != null) { - LinkedHashMap props = (LinkedHashMap) dataType.get(PROPERTIES); - data = (LinkedHashMap) props.get(args.get(argumentNumber).toString()); - if (data != null) { - bFound = true; - } - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE282", String.format( - "UnknownDataType: Unknown data type \"%s\"", args.get(argumentNumber)))); - } - } - } - } - - public Object result() { - if (toscaTpl.getParsedParams() != null && - toscaTpl.getParsedParams().get(getInputName()) != null) { - LinkedHashMap ttinp = (LinkedHashMap) toscaTpl.getTpl().get(INPUTS); - LinkedHashMap ttinpinp = (LinkedHashMap) ttinp.get(getInputName()); - String type = (String) ttinpinp.get("type"); - - Object value = DataEntity.validateDatatype( - type, toscaTpl.getParsedParams().get(getInputName()), null, toscaTpl.getCustomDefs(), null); - //SDC resolving Get Input - if (value instanceof ArrayList) { - if (args.size() == 2 && args.get(1) instanceof Integer && ((ArrayList) value).size() > (Integer) args.get(1)) { - return ((ArrayList) value).get((Integer) args.get(1)); - } - /* commented out for network cloud (SDNC) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE273",String.format( - "GetInputError: cannot resolve input name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0)))); - return null; -*/ - } - return value; - } - - Input inputDef = null; - for (Input inpDef : toscaTpl.getInputs()) { - if (getInputName().equals(inpDef.getName())) { - inputDef = inpDef; - break; - } - } - if (inputDef != null) { - if (args.size() == 2 && inputDef.getDefault() != null && inputDef.getDefault() instanceof ArrayList) { - if (args.get(1) instanceof Integer - && ((ArrayList) inputDef.getDefault()).size() > ((Integer) args.get(1)).intValue()) { - return ((ArrayList) inputDef.getDefault()).get(((Integer) args.get(1)).intValue()); - } -/* - commented out for network cloud (SDNC) - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE274",(String.format( - "GetInputError: cannot resolve input Def name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0))))); - return null; -*/ - } - return inputDef.getDefault(); - } - return null; - } - - public String getInputName() { - return (String) args.get(0); - } - - public LinkedHashMap getEntrySchema() { - LinkedHashMap inputs = (LinkedHashMap) toscaTpl.getTpl().get(INPUTS); - LinkedHashMap inputValue = (LinkedHashMap) inputs.get(getInputName()); - return (LinkedHashMap) inputValue.get(ENTRY_SCHEMA); - } - - public ArrayList getArguments() { - return args; - } -} - -/*python - -class GetInput(Function): -"""Get a property value declared within the input of the service template. - -Arguments: - -* Input name. - -Example: - -* get_input: port -""" - -def validate(self): - if len(self.args) != 1: - ValidationIssueCollector.appendException( - ValueError(_( - 'Expected one argument for function "get_input" but ' - 'received "%s".') % self.args)) - inputs = [input.name for input in self.tosca_tpl.inputs] - if self.args[0] not in inputs: - ValidationIssueCollector.appendException( - UnknownInputError(input_name=self.args[0])) - -def result(self): - if self.tosca_tpl.parsed_params and \ - self.input_name in self.tosca_tpl.parsed_params: - return DataEntity.validate_datatype( - self.tosca_tpl.tpl['inputs'][self.input_name]['type'], - self.tosca_tpl.parsed_params[self.input_name]) - - input = [input_def for input_def in self.tosca_tpl.inputs - if self.input_name == input_def.name][0] - return input.default - -@property -def input_name(self): - return self.args[0] - -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java deleted file mode 100644 index 06a28d6..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java +++ /dev/null @@ -1,243 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.functions; - -import org.onap.sdc.toscaparser.api.EntityTemplate; -import org.onap.sdc.toscaparser.api.NodeTemplate; -import org.onap.sdc.toscaparser.api.RelationshipTemplate; -import org.onap.sdc.toscaparser.api.TopologyTemplate; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.elements.InterfacesDef; -import org.onap.sdc.toscaparser.api.elements.RelationshipType; -import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; - - -public class GetOperationOutput extends Function { - - public GetOperationOutput(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl, context, name, args); - } - - @Override - public void validate() { - if (args.size() == 4) { - _findNodeTemplate((String) args.get(0)); - String interfaceName = _findInterfaceName((String) args.get(1)); - _findOperationName(interfaceName, (String) args.get(2)); - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE159", - "ValueError: Illegal arguments for function \"get_operation_output\". " + - "Expected arguments: \"template_name\",\"interface_name\"," + - "\"operation_name\",\"output_variable_name\"")); - } - } - - private String _findInterfaceName(String _interfaceName) { - boolean bFound = false; - for (String sect : InterfacesDef.SECTIONS) { - if (sect.equals(_interfaceName)) { - bFound = true; - break; - } - } - if (bFound) { - return _interfaceName; - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE160", String.format( - "ValueError: invalid interface name \"%s\" in \"get_operation_output\"", - _interfaceName))); - return null; - } - } - - private String _findOperationName(String interfaceName, String operationName) { - - if (interfaceName.equals("Configure") || - interfaceName.equals("tosca.interfaces.node.relationship.Configure")) { - boolean bFound = false; - for (String sect : StatefulEntityType.INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS) { - if (sect.equals(operationName)) { - bFound = true; - break; - } - } - if (bFound) { - return operationName; - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE161", String.format( - "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", - operationName))); - return null; - } - } - if (interfaceName.equals("Standard") || - interfaceName.equals("tosca.interfaces.node.lifecycle.Standard")) { - boolean bFound = false; - for (String sect : StatefulEntityType.INTERFACE_NODE_LIFECYCLE_OPERATIONS) { - if (sect.equals(operationName)) { - bFound = true; - break; - } - } - if (bFound) { - return operationName; - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE162", String.format( - "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"", - operationName))); - return null; - } - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE163", String.format( - "ValueError: Invalid interface name \"%s\" in \"get_operation_output\"", - interfaceName))); - return null; - } - } - - private NodeTemplate _findNodeTemplate(String nodeTemplateName) { - if (nodeTemplateName.equals(TARGET)) { - if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE164", - "KeyError: \"TARGET\" keyword can only be used in context " + - " to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate) context).getTarget(); - } - if (nodeTemplateName.equals(SOURCE)) { - if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE165", - "KeyError: \"SOURCE\" keyword can only be used in context " + - " to \"Relationships\" source node")); - return null; - } - return ((RelationshipTemplate) context).getTarget(); - } - String name; - if (nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) { - name = ((NodeTemplate) context).getName(); - } else { - name = nodeTemplateName; - } - for (NodeTemplate nt : toscaTpl.getNodeTemplates()) { - if (nodeTemplateName.equals(name)) { - return nt; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE166", String.format( - "KeyError: Node template \"%s\" was not found", nodeTemplateName))); - return null; - } - - @Override - public Object result() { - return this; - } - -} - -/*python - -class GetOperationOutput(Function): -def validate(self): - if len(self.args) == 4: - self._find_node_template(self.args[0]) - interface_name = self._find_interface_name(self.args[1]) - self._find_operation_name(interface_name, self.args[2]) - else: - ValidationIssueCollector.appendException( - ValueError(_('Illegal arguments for function "{0}". Expected ' - 'arguments: "template_name","interface_name",' - '"operation_name","output_variable_name"' - ).format(GET_OPERATION_OUTPUT))) - return - -def _find_interface_name(self, interface_name): - if interface_name in toscaparser.elements.interfaces.SECTIONS: - return interface_name - else: - ValidationIssueCollector.appendException( - ValueError(_('Enter a valid interface name' - ).format(GET_OPERATION_OUTPUT))) - return - -def _find_operation_name(self, interface_name, operation_name): - if(interface_name == 'Configure' or - interface_name == 'tosca.interfaces.node.relationship.Configure'): - if(operation_name in - StatefulEntityType. - interfaces_relationship_configure_operations): - return operation_name - else: - ValidationIssueCollector.appendException( - ValueError(_('Enter an operation of Configure interface' - ).format(GET_OPERATION_OUTPUT))) - return - elif(interface_name == 'Standard' or - interface_name == 'tosca.interfaces.node.lifecycle.Standard'): - if(operation_name in - StatefulEntityType.interfaces_node_lifecycle_operations): - return operation_name - else: - ValidationIssueCollector.appendException( - ValueError(_('Enter an operation of Standard interface' - ).format(GET_OPERATION_OUTPUT))) - return - else: - ValidationIssueCollector.appendException( - ValueError(_('Enter a valid operation name' - ).format(GET_OPERATION_OUTPUT))) - return - -def _find_node_template(self, node_template_name): - if node_template_name == TARGET: - if not isinstance(self.context.type_definition, RelationshipType): - ValidationIssueCollector.appendException( - KeyError(_('"TARGET" keyword can only be used in context' - ' to "Relationships" target node'))) - return - return self.context.target - if node_template_name == SOURCE: - if not isinstance(self.context.type_definition, RelationshipType): - ValidationIssueCollector.appendException( - KeyError(_('"SOURCE" keyword can only be used in context' - ' to "Relationships" source node'))) - return - return self.context.source - name = self.context.name \ - if node_template_name == SELF and \ - not isinstance(self.context, list) \ - else node_template_name - for node_template in self.tosca_tpl.nodetemplates: - if node_template.name == name: - return node_template - ValidationIssueCollector.appendException( - KeyError(_( - 'Node template "{0}" was not found.' - ).format(node_template_name))) - -def result(self): - return self -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java deleted file mode 100644 index 90e0a8e..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java +++ /dev/null @@ -1,639 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.functions; - -import org.onap.sdc.toscaparser.api.CapabilityAssignment; -import org.onap.sdc.toscaparser.api.NodeTemplate; -import org.onap.sdc.toscaparser.api.Property; -import org.onap.sdc.toscaparser.api.RelationshipTemplate; -import org.onap.sdc.toscaparser.api.RequirementAssignment; -import org.onap.sdc.toscaparser.api.TopologyTemplate; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef; -import org.onap.sdc.toscaparser.api.elements.EntityType; -import org.onap.sdc.toscaparser.api.elements.NodeType; -import org.onap.sdc.toscaparser.api.elements.PropertyDef; -import org.onap.sdc.toscaparser.api.elements.RelationshipType; -import org.onap.sdc.toscaparser.api.elements.StatefulEntityType; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.LinkedHashMap; - -public class GetProperty extends Function { - // Get a property value of an entity defined in the same service template - - // Arguments: - - // * Node template name | SELF | HOST | SOURCE | TARGET. - // * Requirement or capability name (optional). - // * Property name. - - // If requirement or capability name is specified, the behavior is as follows: - // The req or cap name is first looked up in the specified node template's - // requirements. - // If found, it would search for a matching capability - // of an other node template and get its property as specified in function - // arguments. - // Otherwise, the req or cap name would be looked up in the specified - // node template's capabilities and if found, it would return the property of - // the capability as specified in function arguments. - - // Examples: - - // * { get_property: [ mysql_server, port ] } - // * { get_property: [ SELF, db_port ] } - // * { get_property: [ SELF, database_endpoint, port ] } - // * { get_property: [ SELF, database_endpoint, port, 1 ] } - - - public GetProperty(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl, context, name, args); - } - - @Override - void validate() { - if (args.size() < 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE167", - "ValueError: Illegal arguments for function \"get_property\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"")); - return; - } - if (args.size() == 2) { - Property foundProp = _findProperty((String) args.get(1)); - if (foundProp == null) { - return; - } - Object prop = foundProp.getValue(); - if (prop instanceof Function) { - getFunction(toscaTpl, context, prop, toscaTpl.getResolveGetInput()); - } - } else if (args.size() >= 3) { - // do not use _find_property to avoid raise KeyError - // if the prop is not found - // First check if there is property with this name - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - LinkedHashMap props; - if (nodeTpl != null) { - props = nodeTpl.getProperties(); - } else { - props = new LinkedHashMap<>(); - } - int index = 2; - Object propertyValue; - if (props.get(args.get(1)) != null) { - propertyValue = ((Property) props.get(args.get(1))).getValue(); - } else { - index = 3; - // then check the req or caps - propertyValue = _findReqOrCapProperty((String) args.get(1), (String) args.get(2)); - } - - if (args.size() > index) { - for (Object elem : args.subList(index, args.size() - 1)) { - if (propertyValue instanceof ArrayList) { - int intElem = (int) elem; - propertyValue = _getIndexValue(propertyValue, intElem); - } else { - propertyValue = _getAttributeValue(propertyValue, (String) elem); - } - } - } - } - } - - @SuppressWarnings("unchecked") - private Object _findReqOrCapProperty(String reqOrCap, String propertyName) { - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - if (nodeTpl == null) { - return null; - } - // look for property in node template's requirements - for (RequirementAssignment req : nodeTpl.getRequirements().getAll()) { - String nodeName = req.getNodeTemplateName(); - if (req.getName().equals(reqOrCap)) { - NodeTemplate nodeTemplate = _findNodeTemplate(nodeName); - return _getCapabilityProperty(nodeTemplate, req.getName(), propertyName, true); - } - } - // If requirement was not found, look in node template's capabilities - return _getCapabilityProperty(nodeTpl, reqOrCap, propertyName, true); - } - - private Object _getCapabilityProperty(NodeTemplate nodeTemplate, - String capabilityName, - String propertyName, - boolean throwErrors) { - - // Gets a node template capability property - Object property = null; - CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName); - if (cap != null) { - LinkedHashMap props = cap.getProperties(); - if (props != null && props.get(propertyName) != null) { - property = ((Property) props.get(propertyName)).getValue(); - } - if (property == null && throwErrors) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE168", String.format( - "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - propertyName, capabilityName, nodeTemplate.getName(), ((NodeTemplate) context).getName()))); - } - return property; - } - if (throwErrors) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE169", String.format( - "KeyError: Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"", - capabilityName, ((NodeTemplate) context).getName(), nodeTemplate.getName()))); - } - - return null; - } - - private Property _findProperty(String propertyName) { - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - if (nodeTpl == null) { - return null; - } - LinkedHashMap props = nodeTpl.getProperties(); - Property found = props.get(propertyName); - if (found == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE170", String.format( - "KeyError: Property \"%s\" was not found in node template \"%s\"", - propertyName, nodeTpl.getName()))); - } - return found; - } - - private NodeTemplate _findNodeTemplate(String nodeTemplateName) { - if (nodeTemplateName.equals(SELF)) { - return (NodeTemplate) context; - } - // enable the HOST value in the function - if (nodeTemplateName.equals(HOST)) { - NodeTemplate node = _findHostContainingProperty(null); - if (node == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE171", String.format( - "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"", - (String) args.get(2), (String) args.get(1), ((NodeTemplate) context).getName()))); - return null; - } - return node; - } - if (nodeTemplateName.equals(TARGET)) { - if (!(((RelationshipTemplate) context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE172", - "KeyError: \"TARGET\" keyword can only be used in context to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate) context).getTarget(); - } - if (nodeTemplateName.equals(SOURCE)) { - if (!(((RelationshipTemplate) context).getTypeDefinition() instanceof RelationshipType)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE173", - "KeyError: \"SOURCE\" keyword can only be used in context to \"Relationships\" target node")); - return null; - } - return ((RelationshipTemplate) context).getSource(); - } - if (toscaTpl.getNodeTemplates() == null) { - return null; - } - for (NodeTemplate nodeTemplate : toscaTpl.getNodeTemplates()) { - if (nodeTemplate.getName().equals(nodeTemplateName)) { - return nodeTemplate; - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE174", String.format( - "KeyError: Node template \"%s\" was not found. Referenced from Node Template \"%s\"", - nodeTemplateName, ((NodeTemplate) context).getName()))); - - return null; - } - - @SuppressWarnings("rawtypes") - private Object _getIndexValue(Object value, int index) { - if (value instanceof ArrayList) { - if (index < ((ArrayList) value).size()) { - return ((ArrayList) value).get(index); - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE175", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an element with index %d", - args.get(2), args.get(1), ((NodeTemplate) context).getName(), index))); - - } - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE176", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a list", - args.get(2), args.get(1), ((NodeTemplate) context).getName()))); - } - return null; - } - - @SuppressWarnings("unchecked") - private Object _getAttributeValue(Object value, String attribute) { - if (value instanceof LinkedHashMap) { - Object ov = ((LinkedHashMap) value).get(attribute); - if (ov != null) { - return ov; - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE177", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an attribute named \"%s\"", - args.get(2), args.get(1), ((NodeTemplate) context).getName(), attribute))); - } - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE178", String.format( - "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a dict", - args.get(2), args.get(1), ((NodeTemplate) context).getName()))); - } - return null; - } - - // Add this functions similar to get_attribute case - private NodeTemplate _findHostContainingProperty(String nodeTemplateName) { - if (nodeTemplateName == null) { - nodeTemplateName = SELF; - } - NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName); - LinkedHashMap hostedOnRel = (LinkedHashMap) - EntityType.TOSCA_DEF.get(HOSTED_ON); - for (RequirementAssignment requirement : nodeTemplate.getRequirements().getAll()) { - String targetName = requirement.getNodeTemplateName(); - NodeTemplate targetNode = _findNodeTemplate(targetName); - NodeType targetType = (NodeType) targetNode.getTypeDefinition(); - for (CapabilityTypeDef capDef : targetType.getCapabilitiesObjects()) { - if (capDef.inheritsFrom((ArrayList) hostedOnRel.get("valid_target_types"))) { - if (_propertyExistsInType(targetType)) { - return targetNode; - } - // If requirement was not found, look in node - // template's capabilities - if (args.size() > 2 && - _getCapabilityProperty(targetNode, (String) args.get(1), (String) args.get(2), false) != null) { - return targetNode; - } - - return _findHostContainingProperty(targetName); - } - } - - } - return null; - } - - private boolean _propertyExistsInType(StatefulEntityType typeDefinition) { - LinkedHashMap propsDef = typeDefinition.getPropertiesDef(); - return propsDef.keySet().contains((String) args.get(1)); - } - - @Override - public Object result() { - Object propertyValue; - if (args.size() >= 3) { - // First check if there is property with this name - NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0)); - LinkedHashMap props; - if (nodeTpl != null) { - props = nodeTpl.getProperties(); - } else { - props = new LinkedHashMap<>(); - } - int index = 2; - if (props.get(args.get(1)) != null) { - propertyValue = ((Property) props.get(args.get(1))).getValue(); - } else { - index = 3; - // then check the req or caps - propertyValue = _findReqOrCapProperty((String) args.get(1), (String) args.get(2)); - } - - if (args.size() > index) { - for (Object elem : args.subList(index, args.size() - 1)) { - if (propertyValue instanceof ArrayList) { - int intElem = (int) elem; - propertyValue = _getIndexValue(propertyValue, intElem); - } else { - propertyValue = _getAttributeValue(propertyValue, (String) elem); - } - } - } - } else { - propertyValue = _findProperty((String) args.get(1)).getValue(); - } - if (propertyValue instanceof Function) { - return ((Function) propertyValue).result(); - } - return getFunction(toscaTpl, context, propertyValue, toscaTpl.getResolveGetInput()); - } - - public String getNodeTemplateName() { - return (String) args.get(0); - } - - public String getPropertyName() { - if (args.size() > 2) { - return (String) args.get(2); - } - return (String) args.get(1); - } - - public String getReqorCap() { - if (args.size() > 2) { - return (String) args.get(1); - } - return null; - } - -} - -/*python - -class GetProperty(Function): -"""Get a property value of an entity defined in the same service template. - -Arguments: - -* Node template name | SELF | HOST | SOURCE | TARGET. -* Requirement or capability name (optional). -* Property name. - -If requirement or capability name is specified, the behavior is as follows: -The req or cap name is first looked up in the specified node template's -requirements. -If found, it would search for a matching capability -of an other node template and get its property as specified in function -arguments. -Otherwise, the req or cap name would be looked up in the specified -node template's capabilities and if found, it would return the property of -the capability as specified in function arguments. - -Examples: - -* { get_property: [ mysql_server, port ] } -* { get_property: [ SELF, db_port ] } -* { get_property: [ SELF, database_endpoint, port ] } -* { get_property: [ SELF, database_endpoint, port, 1 ] } -""" - -def validate(self): - if len(self.args) < 2: - ValidationIssueCollector.appendException( - ValueError(_( - 'Expected arguments: "node-template-name", "req-or-cap" ' - '(optional), "property name".'))) - return - if len(self.args) == 2: - found_prop = self._find_property(self.args[1]) - if not found_prop: - return - prop = found_prop.value - if not isinstance(prop, Function): - get_function(self.tosca_tpl, self.context, prop) - elif len(self.args) >= 3: - # do not use _find_property to avoid raise KeyError - # if the prop is not found - # First check if there is property with this name - node_tpl = self._find_node_template(self.args[0]) - props = node_tpl.get_properties() if node_tpl else [] - index = 2 - found = [props[self.args[1]]] if self.args[1] in props else [] - if found: - property_value = found[0].value - else: - index = 3 - # then check the req or caps - property_value = self._find_req_or_cap_property(self.args[1], - self.args[2]) - if len(self.args) > index: - for elem in self.args[index:]: - if isinstance(property_value, list): - int_elem = int(elem) - property_value = self._get_index_value(property_value, - int_elem) - else: - property_value = self._get_attribute_value( - property_value, - elem) - -def _find_req_or_cap_property(self, req_or_cap, property_name): - node_tpl = self._find_node_template(self.args[0]) - # Find property in node template's requirements - for r in node_tpl.requirements: - for req, node_name in r.items(): - if req == req_or_cap: - node_template = self._find_node_template(node_name) - return self._get_capability_property( - node_template, - req, - property_name) - # If requirement was not found, look in node template's capabilities - return self._get_capability_property(node_tpl, - req_or_cap, - property_name) - -def _get_capability_property(self, - node_template, - capability_name, - property_name): - """Gets a node template capability property.""" - caps = node_template.get_capabilities() - if caps and capability_name in caps.keys(): - cap = caps[capability_name] - property = None - props = cap.get_properties() - if props and property_name in props.keys(): - property = props[property_name].value - if not property: - ValidationIssueCollector.appendException( - KeyError(_('Property "%(prop)s" was not found in ' - 'capability "%(cap)s" of node template ' - '"%(ntpl1)s" referenced from node template ' - '"%(ntpl2)s".') % {'prop': property_name, - 'cap': capability_name, - 'ntpl1': node_template.name, - 'ntpl2': self.context.name})) - return property - msg = _('Requirement/CapabilityAssignment "{0}" referenced from node template ' - '"{1}" was not found in node template "{2}".').format( - capability_name, - self.context.name, - node_template.name) - ValidationIssueCollector.appendException(KeyError(msg)) - -def _find_property(self, property_name): - node_tpl = self._find_node_template(self.args[0]) - if not node_tpl: - return - props = node_tpl.get_properties() - found = [props[property_name]] if property_name in props else [] - if len(found) == 0: - ValidationIssueCollector.appendException( - KeyError(_('Property "%(prop)s" was not found in node ' - 'template "%(ntpl)s".') % - {'prop': property_name, - 'ntpl': node_tpl.name})) - return None - return found[0] - -def _find_node_template(self, node_template_name): - if node_template_name == SELF: - return self.context - # enable the HOST value in the function - if node_template_name == HOST: - return self._find_host_containing_property() - if node_template_name == TARGET: - if not isinstance(self.context.type_definition, RelationshipType): - ValidationIssueCollector.appendException( - KeyError(_('"TARGET" keyword can only be used in context' - ' to "Relationships" target node'))) - return - return self.context.target - if node_template_name == SOURCE: - if not isinstance(self.context.type_definition, RelationshipType): - ValidationIssueCollector.appendException( - KeyError(_('"SOURCE" keyword can only be used in context' - ' to "Relationships" source node'))) - return - return self.context.source - if not hasattr(self.tosca_tpl, 'nodetemplates'): - return - for node_template in self.tosca_tpl.nodetemplates: - if node_template.name == node_template_name: - return node_template - ValidationIssueCollector.appendException( - KeyError(_( - 'Node template "{0}" was not found.' - ).format(node_template_name))) - -def _get_index_value(self, value, index): - if isinstance(value, list): - if index < len(value): - return value[index] - else: - ValidationIssueCollector.appendException( - KeyError(_( - "Property '{0}' found in capability '{1}'" - " referenced from node template {2}" - " must have an element with index {3}."). - format(self.args[2], - self.args[1], - self.context.name, - index))) - else: - ValidationIssueCollector.appendException( - KeyError(_( - "Property '{0}' found in capability '{1}'" - " referenced from node template {2}" - " must be a list.").format(self.args[2], - self.args[1], - self.context.name))) - -def _get_attribute_value(self, value, attibute): - if isinstance(value, dict): - if attibute in value: - return value[attibute] - else: - ValidationIssueCollector.appendException( - KeyError(_( - "Property '{0}' found in capability '{1}'" - " referenced from node template {2}" - " must have an attribute named {3}."). - format(self.args[2], - self.args[1], - self.context.name, - attibute))) - else: - ValidationIssueCollector.appendException( - KeyError(_( - "Property '{0}' found in capability '{1}'" - " referenced from node template {2}" - " must be a dict.").format(self.args[2], - self.args[1], - self.context.name))) - -# Add this functions similar to get_attribute case -def _find_host_containing_property(self, node_template_name=SELF): - node_template = self._find_node_template(node_template_name) - hosted_on_rel = EntityType.TOSCA_DEF[HOSTED_ON] - for r in node_template.requirements: - for requirement, target_name in r.items(): - target_node = self._find_node_template(target_name) - target_type = target_node.type_definition - for capability in target_type.get_capabilities_objects(): - if capability.type in hosted_on_rel['valid_target_types']: - if self._property_exists_in_type(target_type): - return target_node - return self._find_host_containing_property( - target_name) - return None - -def _property_exists_in_type(self, type_definition): - props_def = type_definition.get_properties_def() - found = [props_def[self.args[1]]] \ - if self.args[1] in props_def else [] - return len(found) == 1 - -def result(self): - if len(self.args) >= 3: - # First check if there is property with this name - node_tpl = self._find_node_template(self.args[0]) - props = node_tpl.get_properties() if node_tpl else [] - index = 2 - found = [props[self.args[1]]] if self.args[1] in props else [] - if found: - property_value = found[0].value - else: - index = 3 - # then check the req or caps - property_value = self._find_req_or_cap_property(self.args[1], - self.args[2]) - if len(self.args) > index: - for elem in self.args[index:]: - if isinstance(property_value, list): - int_elem = int(elem) - property_value = self._get_index_value(property_value, - int_elem) - else: - property_value = self._get_attribute_value( - property_value, - elem) - else: - property_value = self._find_property(self.args[1]).value - if isinstance(property_value, Function): - return property_value.result() - return get_function(self.tosca_tpl, - self.context, - property_value) - -@property -def node_template_name(self): - return self.args[0] - -@property -def property_name(self): - if len(self.args) > 2: - return self.args[2] - return self.args[1] - -@property -def req_or_cap(self): - if len(self.args) > 2: - return self.args[1] - return None -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java b/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java deleted file mode 100644 index 240ce85..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java +++ /dev/null @@ -1,130 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.functions; - -import org.onap.sdc.toscaparser.api.TopologyTemplate; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; - -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class Token extends Function { - // Validate the function and provide an instance of the function - - //The token function is used within a TOSCA service template on a string to - //parse out (tokenize) substrings separated by one or more token characters - //within a larger string. - - //Arguments: - - //* The composite string that contains one or more substrings separated by - // token characters. - //* The string that contains one or more token characters that separate - // substrings within the composite string. - //* The integer indicates the index of the substring to return from the - // composite string. Note that the first substring is denoted by using - // the '0' (zero) integer value. - - //Example: - - // [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ] - - - public Token(TopologyTemplate ttpl, Object context, String name, ArrayList args) { - super(ttpl, context, name, args); - } - - @Override - public Object result() { - return this; - } - - @Override - void validate() { - if (args.size() < 3) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE180", - "ValueError: Invalid arguments for function \"token\". " + - "Expected at least three arguments")); - } else { - if (!(args.get(1) instanceof String) || - ((String) args.get(1)).length() != 1) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE181", - "ValueError: Invalid arguments for function \"token\". " + - "Expected single char value as second argument")); - } - if (!(args.get(2) instanceof Integer)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE182", - "ValueError: Invalid arguments for function \"token\"" + - "Expected integer value as third argument")); - } - } - } - -} - -/*python - -class Token(Function): -"""Validate the function and provide an instance of the function - -The token function is used within a TOSCA service template on a string to -parse out (tokenize) substrings separated by one or more token characters -within a larger string. - - -Arguments: - -* The composite string that contains one or more substrings separated by - token characters. -* The string that contains one or more token characters that separate - substrings within the composite string. -* The integer indicates the index of the substring to return from the - composite string. Note that the first substring is denoted by using - the '0' (zero) integer value. - -Example: - - [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ] - -""" - -def validate(self): - if len(self.args) < 3: - ValidationIssueCollector.appendException( - ValueError(_('Invalid arguments for function "{0}". Expected ' - 'at least three arguments.').format(TOKEN))) - else: - if not isinstance(self.args[1], str) or len(self.args[1]) != 1: - ValidationIssueCollector.appendException( - ValueError(_('Invalid arguments for function "{0}". ' - 'Expected single char value as second ' - 'argument.').format(TOKEN))) - - if not isinstance(self.args[2], int): - ValidationIssueCollector.appendException( - ValueError(_('Invalid arguments for function "{0}". ' - 'Expected integer value as third ' - 'argument.').format(TOKEN))) - -def result(self): - return self -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java deleted file mode 100644 index a34ebb5..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java +++ /dev/null @@ -1,98 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.parameters; - -import org.onap.sdc.toscaparser.api.Property; -import org.onap.sdc.toscaparser.api.elements.enums.ToscaElementNames; - -import java.util.ArrayList; -import java.util.Map; -import java.util.Optional; -import java.util.stream.Collectors; - -public class Annotation { - - private static final String HEAT = "HEAT"; - private String name; - private String type; - private ArrayList properties; - - - public Annotation() { - } - - @SuppressWarnings("unchecked") - public Annotation(Map.Entry annotationEntry) { - if (annotationEntry != null) { - name = annotationEntry.getKey(); - Map annValue = (Map) annotationEntry.getValue(); - type = (String) annValue.get(ToscaElementNames.TYPE.getName()); - properties = fetchProperties((Map) annValue.get(ToscaElementNames.PROPERTIES.getName())); - } - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getType() { - return type; - } - - public void setType(String type) { - this.type = type; - } - - public ArrayList getProperties() { - return properties; - } - - public void setProperties(ArrayList properties) { - this.properties = properties; - } - - private ArrayList fetchProperties(Map properties) { - if (properties != null) { - return (ArrayList) properties.entrySet().stream() - .map(Property::new) - .collect(Collectors.toList()); - } - return null; - } - - public boolean isHeatSourceType() { - if (properties == null) { - return false; - } - Optional sourceType = properties.stream() - .filter(p -> p.getName().equals(ToscaElementNames.SOURCE_TYPE.getName())) - .findFirst(); - if (!sourceType.isPresent()) { - return false; - } - return sourceType.get().getValue() != null && ((String) sourceType.get().getValue()).equals(HEAT); - } - -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java deleted file mode 100644 index 5d3ecb4..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java +++ /dev/null @@ -1,199 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.parameters; - -import org.onap.sdc.toscaparser.api.DataEntity; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.elements.EntityType; -import org.onap.sdc.toscaparser.api.elements.constraints.Constraint; -import org.onap.sdc.toscaparser.api.elements.constraints.Schema; -import org.onap.sdc.toscaparser.api.elements.enums.ToscaElementNames; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.Map; -import java.util.stream.Collectors; - -public class Input { - - private static final String TYPE = "type"; - private static final String DESCRIPTION = "description"; - private static final String DEFAULT = "default"; - private static final String CONSTRAINTS = "constraints"; - private static final String REQUIRED = "required"; - private static final String STATUS = "status"; - private static final String ENTRY_SCHEMA = "entry_schema"; - - public static final String INTEGER = "integer"; - public static final String STRING = "string"; - public static final String BOOLEAN = "boolean"; - public static final String FLOAT = "float"; - public static final String LIST = "list"; - public static final String MAP = "map"; - public static final String JSON = "json"; - - private static String[] inputField = { - TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED, STATUS, ENTRY_SCHEMA - }; - - private static String[] primitiveTypes = { - INTEGER, STRING, BOOLEAN, FLOAT, LIST, MAP, JSON - }; - - private String name; - private Schema schema; - private LinkedHashMap customDefs; - private Map annotations; - - public Input() { - } - - public Input(String name, LinkedHashMap schema, LinkedHashMap customDefinitions) { - this.name = name; - this.schema = new Schema(name, schema); - customDefs = customDefinitions; - } - - @SuppressWarnings("unchecked") - public void parseAnnotations() { - if (schema.getSchema() != null) { - LinkedHashMap annotations = (LinkedHashMap) schema.getSchema().get(ToscaElementNames.ANNOTATIONS.getName()); - if (annotations != null) { - setAnnotations(annotations.entrySet().stream() - .map(Annotation::new) - .filter(Annotation::isHeatSourceType) - .collect(Collectors.toMap(Annotation::getName, a -> a))); - } - } - } - - public String getName() { - return name; - } - - public String getType() { - return schema.getType(); - } - - public String getDescription() { - return schema.getDescription(); - } - - public boolean isRequired() { - return schema.isRequired(); - } - - public Object getDefault() { - return schema.getDefault(); - } - - public ArrayList getConstraints() { - return schema.getConstraints(); - } - - public void validate(Object value) { - validateField(); - validateType(getType()); - if (value != null) { - validateValue(value); - } - } - - private void validateField() { - for (String key : schema.getSchema().keySet()) { - boolean bFound = false; - for (String ifld : inputField) { - if (key.equals(ifld)) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE214", String.format( - "UnknownFieldError: Input \"%s\" contains unknown field \"%s\"", - name, key))); - } - } - } - - private void validateType(String inputType) { - boolean bFound = false; - for (String pt : Schema.PROPERTY_TYPES) { - if (pt.equals(inputType)) { - bFound = true; - break; - } - } - - if (!bFound) { - if (customDefs.get(inputType) != null) { - bFound = true; - } - } - - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE215", String.format( - "ValueError: Invalid type \"%s\"", inputType))); - } - } - - @SuppressWarnings("unchecked") - private void validateValue(Object value) { - Object datatype; - if (EntityType.TOSCA_DEF.get(getType()) != null) { - datatype = EntityType.TOSCA_DEF.get(getType()); - } else if (EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()) != null) { - datatype = EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()); - } - - String type = getType(); - // if it's one of the basic types DON'T look in customDefs - if (Arrays.asList(primitiveTypes).contains(type)) { - DataEntity.validateDatatype(getType(), value, null, customDefs, null); - return; - } else if (customDefs.get(getType()) != null) { - datatype = customDefs.get(getType()); - DataEntity.validateDatatype(getType(), value, (LinkedHashMap) datatype, customDefs, null); - return; - } - - DataEntity.validateDatatype(getType(), value, null, customDefs, null); - } - - public Map getAnnotations() { - return annotations; - } - - private void setAnnotations(Map annotations) { - this.annotations = annotations; - } - - public void resetAnnotaions() { - annotations = null; - } - - public LinkedHashMap getEntrySchema() { - return schema.getEntrySchema(); - } - -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java b/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java deleted file mode 100644 index 8ef82b3..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java +++ /dev/null @@ -1,129 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.parameters; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.util.LinkedHashMap; - -public class Output { - - private static final String DESCRIPTION = "description"; - public static final String VALUE = "value"; - private static final String[] OUTPUT_FIELD = {DESCRIPTION, VALUE}; - - private String name; - private LinkedHashMap attributes; - - public Output(String name, LinkedHashMap attributes) { - this.name = name; - this.attributes = attributes; - } - - public String getDescription() { - return (String) attributes.get(DESCRIPTION); - } - - public Object getValue() { - return attributes.get(VALUE); - } - - public void validate() { - validateField(); - } - - private void validateField() { - if (attributes == null) { - //TODO wrong error message... - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE216", String.format( - "ValidationError: Output \"%s\" has wrong type. Expecting a dict", - name))); - } - - if (getValue() == null) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE217", String.format( - "MissingRequiredFieldError: Output \"%s\" is missing required \"%s\"", - name, VALUE))); - } - for (String key : attributes.keySet()) { - boolean bFound = false; - for (String of : OUTPUT_FIELD) { - if (key.equals(of)) { - bFound = true; - break; - } - } - if (!bFound) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE218", String.format( - "UnknownFieldError: Output \"%s\" contains unknown field \"%s\"", - name, key))); - } - } - } - - // getter/setter - - public String getName() { - return name; - } - - public void setAttr(String name, Object value) { - attributes.put(name, value); - } -} - -/*python - -class Output(object): - - OUTPUT_FIELD = (DESCRIPTION, VALUE) = ('description', 'value') - - def __init__(self, name, attributes): - self.name = name - self.attributes = attributes - - @property - def description(self): - return self.attributes.get(self.DESCRIPTION) - - @property - def value(self): - return self.attributes.get(self.VALUE) - - def validate(self): - self._validate_field() - - def _validate_field(self): - if not isinstance(self.attributes, dict): - ValidationIssueCollector.appendException( - MissingRequiredFieldError(what='Output "%s"' % self.name, - required=self.VALUE)) - if self.value is None: - ValidationIssueCollector.appendException( - MissingRequiredFieldError(what='Output "%s"' % self.name, - required=self.VALUE)) - for name in self.attributes: - if name not in self.OUTPUT_FIELD: - ValidationIssueCollector.appendException( - UnknownFieldError(what='Output "%s"' % self.name, - field=name)) -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java b/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java deleted file mode 100644 index 4ada267..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java +++ /dev/null @@ -1,790 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.prereq; - -import org.onap.sdc.toscaparser.api.ImportsLoader; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; -import org.onap.sdc.toscaparser.api.utils.UrlUtils; - -import java.io.BufferedOutputStream; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.RandomAccessFile; -import java.net.URL; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.StandardCopyOption; -import java.util.*; -import java.util.zip.ZipEntry; -import java.util.zip.ZipFile; -import java.util.zip.ZipInputStream; - -import org.onap.sdc.toscaparser.api.common.JToscaException; -import org.onap.sdc.toscaparser.api.utils.JToscaErrorCodes; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.yaml.snakeyaml.Yaml; - -public class CSAR { - - private static Logger log = LoggerFactory.getLogger(CSAR.class.getName()); - private static final ArrayList META_PROPERTIES_FILES = new ArrayList<>(Arrays.asList("TOSCA-Metadata/TOSCA.meta", "csar.meta")); - - private String path; - private boolean isFile; - private boolean isValidated; - private boolean errorCaught; - private String csar; - private String tempDir; - // private Metadata metaData; - private File tempFile; - private LinkedHashMap> metaProperties; - - public CSAR(String csarPath, boolean aFile) { - path = csarPath; - isFile = aFile; - isValidated = false; - errorCaught = false; - csar = null; - tempDir = null; - tempFile = null; - metaProperties = new LinkedHashMap<>(); - } - - public boolean validate() throws JToscaException { - isValidated = true; - - //validate that the file or URL exists - - if (isFile) { - File f = new File(path); - if (!f.isFile()) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE220", String.format("\"%s\" is not a file", path))); - return false; - } else { - this.csar = path; - } - } else { - if (!UrlUtils.validateUrl(path)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE221", String.format("ImportError: \"%s\" does not exist", path))); - return false; - } - // get it to a local file - try { - File tempFile = File.createTempFile("csartmp", ".csar"); - Path ptf = Paths.get(tempFile.getPath()); - URL webfile = new URL(path); - InputStream in = webfile.openStream(); - Files.copy(in, ptf, StandardCopyOption.REPLACE_EXISTING); - } catch (Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE222", "ImportError: failed to load CSAR from " + path)); - return false; - } - - log.debug("CSAR - validate - currently only files are supported"); - return false; - } - - _parseAndValidateMetaProperties(); - - if (errorCaught) { - return false; - } - - // validate that external references in the main template actually exist and are accessible - _validateExternalReferences(); - - return !errorCaught; - - } - - private void _parseAndValidateMetaProperties() throws JToscaException { - - ZipFile zf = null; - - try { - - // validate that it is a valid zip file - RandomAccessFile raf = new RandomAccessFile(csar, "r"); - long n = raf.readInt(); - raf.close(); - // check if Zip's magic number - if (n != 0x504B0304) { - String errorString = String.format("\"%s\" is not a valid zip file", csar); - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue()); - } - - // validate that it contains the metadata file in the correct location - zf = new ZipFile(csar); - ZipEntry ze = zf.getEntry("TOSCA-Metadata/TOSCA.meta"); - if (ze == null) { - - String errorString = String.format( - "\"%s\" is not a valid CSAR as it does not contain the " + - "required file \"TOSCA.meta\" in the folder \"TOSCA-Metadata\"", csar); - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.MISSING_META_FILE.getValue()); - } - - //Going over expected metadata files and parsing them - for (String metaFile : META_PROPERTIES_FILES) { - - byte ba[] = new byte[4096]; - ze = zf.getEntry(metaFile); - if (ze != null) { - InputStream inputStream = zf.getInputStream(ze); - n = inputStream.read(ba, 0, 4096); - String md = new String(ba); - md = md.substring(0, (int) n); - - String errorString = String.format( - "The file \"%s\" in the" + - " CSAR \"%s\" does not contain valid YAML content", ze.getName(), csar); - - try { - Yaml yaml = new Yaml(); - Object mdo = yaml.load(md); - if (!(mdo instanceof LinkedHashMap)) { - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); - } - - String[] split = ze.getName().split("/"); - String fileName = split[split.length - 1]; - - if (!metaProperties.containsKey(fileName)) { - metaProperties.put(fileName, (LinkedHashMap) mdo); - } - } catch (Exception e) { - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue()); - } - } - } - - // verify it has "Entry-Definition" - String edf = _getMetadata("Entry-Definitions"); - if (edf == null) { - String errorString = String.format( - "The CSAR \"%s\" is missing the required metadata " + - "\"Entry-Definitions\" in \"TOSCA-Metadata/TOSCA.meta\"", csar); - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.ENTRY_DEFINITION_NOT_DEFINED.getValue()); - } - - //validate that "Entry-Definitions' metadata value points to an existing file in the CSAR - boolean foundEDF = false; - Enumeration entries = zf.entries(); - while (entries.hasMoreElements()) { - ze = entries.nextElement(); - if (ze.getName().equals(edf)) { - foundEDF = true; - break; - } - } - if (!foundEDF) { - String errorString = String.format( - "The \"Entry-Definitions\" file defined in the CSAR \"%s\" does not exist", csar); - log.error(errorString); - throw new JToscaException(errorString, JToscaErrorCodes.MISSING_ENTRY_DEFINITION_FILE.getValue()); - } - } catch (JToscaException e) { - //ThreadLocalsHolder.getCollector().appendCriticalException(e.getMessage()); - throw e; - } catch (Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE223", "ValidationError: " + e.getMessage())); - errorCaught = true; - } - - try { - if (zf != null) { - zf.close(); - } - } catch (IOException e) { - } - } - - public void cleanup() { - try { - if (tempFile != null) { - tempFile.delete(); - } - } catch (Exception e) { - } - } - - private String _getMetadata(String key) throws JToscaException { - if (!isValidated) { - validate(); - } - Object value = _getMetaProperty("TOSCA.meta").get(key); - return value != null ? value.toString() : null; - } - - public String getAuthor() throws JToscaException { - return _getMetadata("Created-By"); - } - - public String getVersion() throws JToscaException { - return _getMetadata("CSAR-Version"); - } - - public LinkedHashMap> getMetaProperties() { - return metaProperties; - } - - private LinkedHashMap _getMetaProperty(String propertiesFile) { - return metaProperties.get(propertiesFile); - } - - public String getMainTemplate() throws JToscaException { - String entryDef = _getMetadata("Entry-Definitions"); - ZipFile zf; - boolean ok = false; - try { - zf = new ZipFile(path); - ok = (zf.getEntry(entryDef) != null); - zf.close(); - } catch (IOException e) { - if (!ok) { - log.error("CSAR - getMainTemplate - failed to open {}", path); - } - } - if (ok) { - return entryDef; - } else { - return null; - } - } - - @SuppressWarnings("unchecked") - public LinkedHashMap getMainTemplateYaml() throws JToscaException { - String mainTemplate = tempDir + File.separator + getMainTemplate(); - if (mainTemplate != null) { - try (InputStream input = new FileInputStream(new File(mainTemplate));) { - Yaml yaml = new Yaml(); - Object data = yaml.load(input); - if (!(data instanceof LinkedHashMap)) { - throw new IOException(); - } - return (LinkedHashMap) data; - } catch (Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE224", String.format( - "The file \"%s\" in the CSAR \"%s\" does not " + - "contain valid TOSCA YAML content", - mainTemplate, csar))); - } - } - return null; - } - - public String getDescription() throws JToscaException { - String desc = _getMetadata("Description"); - if (desc != null) { - return desc; - } - - Map metaData = metaProperties.get("TOSCA.meta"); - metaData.put("Description", getMainTemplateYaml().get("description")); - return _getMetadata("Description"); - } - - public String getTempDir() { - return tempDir; - } - - public void decompress() throws IOException, JToscaException { - if (!isValidated) { - validate(); - } - - if (tempDir == null || tempDir.isEmpty()) { - tempDir = Files.createTempDirectory("JTP").toString(); - unzip(path, tempDir); - } - } - - private void _validateExternalReferences() throws JToscaException { - // Extracts files referenced in the main template - // These references are currently supported: - // * imports - // * interface implementations - // * artifacts - try { - decompress(); - String mainTplFile = getMainTemplate(); - if (mainTplFile == null) { - return; - } - - LinkedHashMap mainTpl = getMainTemplateYaml(); - if (mainTpl.get("imports") != null) { - // this loads the imports - ImportsLoader il = new ImportsLoader((ArrayList) mainTpl.get("imports"), - tempDir + File.separator + mainTplFile, - (Object) null, - (LinkedHashMap) null); - } - - if (mainTpl.get("topology_template") != null) { - LinkedHashMap topologyTemplate = - (LinkedHashMap) mainTpl.get("topology_template"); - - if (topologyTemplate.get("node_templates") != null) { - LinkedHashMap nodeTemplates = - (LinkedHashMap) topologyTemplate.get("node_templates"); - for (String nodeTemplateKey : nodeTemplates.keySet()) { - LinkedHashMap nodeTemplate = - (LinkedHashMap) nodeTemplates.get(nodeTemplateKey); - if (nodeTemplate.get("artifacts") != null) { - LinkedHashMap artifacts = - (LinkedHashMap) nodeTemplate.get("artifacts"); - for (String artifactKey : artifacts.keySet()) { - Object artifact = artifacts.get(artifactKey); - if (artifact instanceof String) { - _validateExternalReference(mainTplFile, (String) artifact, true); - } else if (artifact instanceof LinkedHashMap) { - String file = (String) ((LinkedHashMap) artifact).get("file"); - if (file != null) { - _validateExternalReference(mainTplFile, file, true); - } - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE225", String.format( - "ValueError: Unexpected artifact definition for \"%s\"", - artifactKey))); - errorCaught = true; - } - } - } - if (nodeTemplate.get("interfaces") != null) { - LinkedHashMap interfaces = - (LinkedHashMap) nodeTemplate.get("interfaces"); - for (String interfaceKey : interfaces.keySet()) { - LinkedHashMap _interface = - (LinkedHashMap) interfaces.get(interfaceKey); - for (String operationKey : _interface.keySet()) { - Object operation = _interface.get(operationKey); - if (operation instanceof String) { - _validateExternalReference(mainTplFile, (String) operation, false); - } else if (operation instanceof LinkedHashMap) { - String imp = (String) ((LinkedHashMap) operation).get("implementation"); - if (imp != null) { - _validateExternalReference(mainTplFile, imp, true); - } - } - } - } - } - } - } - } - } catch (IOException e) { - errorCaught = true; - } finally { - // delete tempDir (only here?!?) - File fdir = new File(tempDir); - deleteDir(fdir); - tempDir = null; - } - } - - public static void deleteDir(File fdir) { - try { - if (fdir.isDirectory()) { - for (File c : fdir.listFiles()) - deleteDir(c); - } - fdir.delete(); - } catch (Exception e) { - } - } - - private void _validateExternalReference(String tplFile, String resourceFile, boolean raiseExc) { - // Verify that the external resource exists - - // If resource_file is a URL verify that the URL is valid. - // If resource_file is a relative path verify that the path is valid - // considering base folder (self.temp_dir) and tpl_file. - // Note that in a CSAR resource_file cannot be an absolute path. - if (UrlUtils.validateUrl(resourceFile)) { - String msg = String.format("URLException: The resource at \"%s\" cannot be accessed", resourceFile); - try { - if (UrlUtils.isUrlAccessible(resourceFile)) { - return; - } else { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE226", msg)); - errorCaught = true; - } - } catch (Exception e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE227", msg)); - } - } - - String dirPath = Paths.get(tplFile).getParent().toString(); - String filePath = tempDir + File.separator + dirPath + File.separator + resourceFile; - File f = new File(filePath); - if (f.isFile()) { - return; - } - - if (raiseExc) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE228", String.format( - "ValueError: The resource \"%s\" does not exist", resourceFile))); - } - errorCaught = true; - } - - private void unzip(String zipFilePath, String destDirectory) throws IOException { - File destDir = new File(destDirectory); - if (!destDir.exists()) { - destDir.mkdir(); - } - - try (ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFilePath));) { - ZipEntry entry = zipIn.getNextEntry(); - // iterates over entries in the zip file - while (entry != null) { - // create all directories needed for nested items - String[] parts = entry.getName().split("/"); - String s = destDirectory + File.separator; - for (int i = 0; i < parts.length - 1; i++) { - s += parts[i]; - File idir = new File(s); - if (!idir.exists()) { - idir.mkdir(); - } - s += File.separator; - } - String filePath = destDirectory + File.separator + entry.getName(); - if (!entry.isDirectory()) { - // if the entry is a file, extracts it - extractFile(zipIn, filePath); - } else { - // if the entry is a directory, make the directory - File dir = new File(filePath); - dir.mkdir(); - } - zipIn.closeEntry(); - entry = zipIn.getNextEntry(); - } - } - } - - /** - * Extracts a zip entry (file entry) - * - * @param zipIn - * @param filePath - * @throws IOException - */ - private static final int BUFFER_SIZE = 4096; - - private void extractFile(ZipInputStream zipIn, String filePath) throws IOException { - //BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath)); - try (FileOutputStream fos = new FileOutputStream(filePath); - BufferedOutputStream bos = new BufferedOutputStream(fos);) { - byte[] bytesIn = new byte[BUFFER_SIZE]; - int read = 0; - while ((read = zipIn.read(bytesIn)) != -1) { - bos.write(bytesIn, 0, read); - } - } - } - -} - -/*python - -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import URLException -from toscaparser.common.exception import ValidationError -from toscaparser.imports import ImportsLoader -from toscaparser.utils.gettextutils import _ -from toscaparser.utils.urlutils import UrlUtils - -try: # Python 2.x - from BytesIO import BytesIO -except ImportError: # Python 3.x - from io import BytesIO - - -class CSAR(object): - - def __init__(self, csar_file, a_file=True): - self.path = csar_file - self.a_file = a_file - self.is_validated = False - self.error_caught = False - self.csar = None - self.temp_dir = None - - def validate(self): - """Validate the provided CSAR file.""" - - self.is_validated = True - - # validate that the file or URL exists - missing_err_msg = (_('"%s" does not exist.') % self.path) - if self.a_file: - if not os.path.isfile(self.path): - ValidationIssueCollector.appendException( - ValidationError(message=missing_err_msg)) - return False - else: - self.csar = self.path - else: # a URL - if not UrlUtils.validate_url(self.path): - ValidationIssueCollector.appendException( - ValidationError(message=missing_err_msg)) - return False - else: - response = requests.get(self.path) - self.csar = BytesIO(response.content) - - # validate that it is a valid zip file - if not zipfile.is_zipfile(self.csar): - err_msg = (_('"%s" is not a valid zip file.') % self.path) - ValidationIssueCollector.appendException( - ValidationError(message=err_msg)) - return False - - # validate that it contains the metadata file in the correct location - self.zfile = zipfile.ZipFile(self.csar, 'r') - filelist = self.zfile.namelist() - if 'TOSCA-Metadata/TOSCA.meta' not in filelist: - err_msg = (_('"%s" is not a valid CSAR as it does not contain the ' - 'required file "TOSCA.meta" in the folder ' - '"TOSCA-Metadata".') % self.path) - ValidationIssueCollector.appendException( - ValidationError(message=err_msg)) - return False - - # validate that 'Entry-Definitions' property exists in TOSCA.meta - data = self.zfile.read('TOSCA-Metadata/TOSCA.meta') - invalid_yaml_err_msg = (_('The file "TOSCA-Metadata/TOSCA.meta" in ' - 'the CSAR "%s" does not contain valid YAML ' - 'content.') % self.path) - try: - meta = yaml.load(data) - if type(meta) is dict: - self.metadata = meta - else: - ValidationIssueCollector.appendException( - ValidationError(message=invalid_yaml_err_msg)) - return False - except yaml.YAMLError: - ValidationIssueCollector.appendException( - ValidationError(message=invalid_yaml_err_msg)) - return False - - if 'Entry-Definitions' not in self.metadata: - err_msg = (_('The CSAR "%s" is missing the required metadata ' - '"Entry-Definitions" in ' - '"TOSCA-Metadata/TOSCA.meta".') - % self.path) - ValidationIssueCollector.appendException( - ValidationError(message=err_msg)) - return False - - # validate that 'Entry-Definitions' metadata value points to an - # existing file in the CSAR - entry = self.metadata.get('Entry-Definitions') - if entry and entry not in filelist: - err_msg = (_('The "Entry-Definitions" file defined in the ' - 'CSAR "%s" does not exist.') % self.path) - ValidationIssueCollector.appendException( - ValidationError(message=err_msg)) - return False - - # validate that external references in the main template actually - # exist and are accessible - self._validate_external_references() - return not self.error_caught - - def get_metadata(self): - """Return the metadata dictionary.""" - - # validate the csar if not already validated - if not self.is_validated: - self.validate() - - # return a copy to avoid changes overwrite the original - return dict(self.metadata) if self.metadata else None - - def _get_metadata(self, key): - if not self.is_validated: - self.validate() - return self.metadata.get(key) - - def get_author(self): - return self._get_metadata('Created-By') - - def get_version(self): - return self._get_metadata('CSAR-Version') - - def get_main_template(self): - entry_def = self._get_metadata('Entry-Definitions') - if entry_def in self.zfile.namelist(): - return entry_def - - def get_main_template_yaml(self): - main_template = self.get_main_template() - if main_template: - data = self.zfile.read(main_template) - invalid_tosca_yaml_err_msg = ( - _('The file "%(template)s" in the CSAR "%(csar)s" does not ' - 'contain valid TOSCA YAML content.') % - {'template': main_template, 'csar': self.path}) - try: - tosca_yaml = yaml.load(data) - if type(tosca_yaml) is not dict: - ValidationIssueCollector.appendException( - ValidationError(message=invalid_tosca_yaml_err_msg)) - return tosca_yaml - except Exception: - ValidationIssueCollector.appendException( - ValidationError(message=invalid_tosca_yaml_err_msg)) - - def get_description(self): - desc = self._get_metadata('Description') - if desc is not None: - return desc - - self.metadata['Description'] = \ - self.get_main_template_yaml().get('description') - return self.metadata['Description'] - - def decompress(self): - if not self.is_validated: - self.validate() - self.temp_dir = tempfile.NamedTemporaryFile().name - with zipfile.ZipFile(self.csar, "r") as zf: - zf.extractall(self.temp_dir) - - def _validate_external_references(self): - """Extracts files referenced in the main template - - These references are currently supported: - * imports - * interface implementations - * artifacts - """ - try: - self.decompress() - main_tpl_file = self.get_main_template() - if not main_tpl_file: - return - main_tpl = self.get_main_template_yaml() - - if 'imports' in main_tpl: - ImportsLoader(main_tpl['imports'], - os.path.join(self.temp_dir, main_tpl_file)) - - if 'topology_template' in main_tpl: - topology_template = main_tpl['topology_template'] - - if 'node_templates' in topology_template: - node_templates = topology_template['node_templates'] - - for node_template_key in node_templates: - node_template = node_templates[node_template_key] - if 'artifacts' in node_template: - artifacts = node_template['artifacts'] - for artifact_key in artifacts: - artifact = artifacts[artifact_key] - if isinstance(artifact, six.string_types): - self._validate_external_reference( - main_tpl_file, - artifact) - elif isinstance(artifact, dict): - if 'file' in artifact: - self._validate_external_reference( - main_tpl_file, - artifact['file']) - else: - ValidationIssueCollector.appendException( - ValueError(_('Unexpected artifact ' - 'definition for "%s".') - % artifact_key)) - self.error_caught = True - if 'interfaces' in node_template: - interfaces = node_template['interfaces'] - for interface_key in interfaces: - interface = interfaces[interface_key] - for opertation_key in interface: - operation = interface[opertation_key] - if isinstance(operation, six.string_types): - self._validate_external_reference( - main_tpl_file, - operation, - False) - elif isinstance(operation, dict): - if 'implementation' in operation: - self._validate_external_reference( - main_tpl_file, - operation['implementation']) - finally: - if self.temp_dir: - shutil.rmtree(self.temp_dir) - - def _validate_external_reference(self, tpl_file, resource_file, - raise_exc=True): - """Verify that the external resource exists - - If resource_file is a URL verify that the URL is valid. - If resource_file is a relative path verify that the path is valid - considering base folder (self.temp_dir) and tpl_file. - Note that in a CSAR resource_file cannot be an absolute path. - """ - if UrlUtils.validate_url(resource_file): - msg = (_('The resource at "%s" cannot be accessed.') % - resource_file) - try: - if UrlUtils.url_accessible(resource_file): - return - else: - ValidationIssueCollector.appendException( - URLException(what=msg)) - self.error_caught = True - except Exception: - ValidationIssueCollector.appendException( - URLException(what=msg)) - self.error_caught = True - - if os.path.isfile(os.path.join(self.temp_dir, - os.path.dirname(tpl_file), - resource_file)): - return - - if raise_exc: - ValidationIssueCollector.appendException( - ValueError(_('The resource "%s" does not exist.') - % resource_file)) - self.error_caught = True -*/ - - diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java deleted file mode 100644 index 237b738..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java +++ /dev/null @@ -1,50 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.utils; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -public class CopyUtils { - - private CopyUtils() { - } - - @SuppressWarnings("unchecked") - public static Object copyLhmOrAl(Object src) { - if (src instanceof LinkedHashMap) { - LinkedHashMap dst = new LinkedHashMap(); - for (Map.Entry me : ((LinkedHashMap) src).entrySet()) { - dst.put(me.getKey(), me.getValue()); - } - return dst; - } else if (src instanceof ArrayList) { - ArrayList dst = new ArrayList(); - for (Object o : (ArrayList) src) { - dst.add(o); - } - return dst; - } else { - return null; - } - } -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java deleted file mode 100644 index 158a3e1..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java +++ /dev/null @@ -1,68 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.utils; - -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.Map; - -public class DumpUtils { - - @SuppressWarnings("unchecked") - private static void dumpYaml(Object yo, int level) { - final String indent = " "; - try { - if (yo == null) { - System.out.println(""); - return; - } - String cname = yo.getClass().getSimpleName(); - System.out.print(cname); - if (cname.equals("LinkedHashMap")) { - LinkedHashMap lhm = (LinkedHashMap) yo; - System.out.println(); - for (Map.Entry me : lhm.entrySet()) { - System.out.print(indent.substring(0, level) + me.getKey() + ": "); - dumpYaml(me.getValue(), level + 2); - } - } else if (cname.equals("ArrayList")) { - ArrayList al = (ArrayList) yo; - System.out.println(); - for (int i = 0; i < al.size(); i++) { - System.out.format("%s[%d] ", indent.substring(0, level), i); - dumpYaml(al.get(i), level + 2); - } - } else if (cname.equals("String")) { - System.out.println(" ==> \"" + (String) yo + "\""); - } else if (cname.equals("Integer")) { - System.out.println(" ==> " + (int) yo); - } else if (cname.equals("Boolean")) { - System.out.println(" ==> " + (boolean) yo); - } else if (cname.equals("Double")) { - System.out.println(" ==> " + (double) yo); - } else { - System.out.println(" !! unexpected type"); - } - } catch (Exception e) { - System.out.println("Exception!! " + e.getMessage()); - } - } -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java deleted file mode 100644 index 3849ce0..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java +++ /dev/null @@ -1,52 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.utils; - - -public enum JToscaErrorCodes { - MISSING_META_FILE("JE1001"), - INVALID_META_YAML_CONTENT("JE1002"), - ENTRY_DEFINITION_NOT_DEFINED("JE1003"), - MISSING_ENTRY_DEFINITION_FILE("JE1004"), - GENERAL_ERROR("JE1005"), - PATH_NOT_VALID("JE1006"), - CSAR_TOSCA_VALIDATION_ERROR("JE1007"), - INVALID_CSAR_FORMAT("JE1008"); - - private String value; - - JToscaErrorCodes(String value) { - this.value = value; - } - - public String getValue() { - return value; - } - - public static JToscaErrorCodes getByCode(String code) { - for (JToscaErrorCodes v : values()) { - if (v.getValue().equals(code)) { - return v; - } - } - return null; - } -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java deleted file mode 100644 index a753d62..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java +++ /dev/null @@ -1,209 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.utils; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -// test with functions/test_concat.yaml -public class TOSCAVersionProperty { - - private String version; - - private static final String VERSION_RE = - "^(?([0-9][0-9]*))" - + "(\\.(?([0-9][0-9]*)))?" - + "(\\.(?([0-9][0-9]*)))?" - + "(\\.(?([0-9A-Za-z]+)))?" - + "(\\-(?[0-9])*)?$"; - - private String minorVersion = null; - private String majorVersion = null; - private String fixVersion = null; - private String qualifier = null; - private String buildVersion = null; - - - public TOSCAVersionProperty(String version) { - - if (version.equals("0") || version.equals("0.0") || version.equals("0.0.0")) { - return; - } - - Pattern pattern = Pattern.compile(VERSION_RE); - Matcher matcher = pattern.matcher(version); - if (!matcher.find()) { - ThreadLocalsHolder.getCollector().appendValidationIssue( - new JToscaValidationIssue( - "JE252", - "InvalidTOSCAVersionPropertyException: " - + "Value of TOSCA version property \"" + version + "\" is invalid" - )); - return; - } - minorVersion = matcher.group("gMinorVersion"); - majorVersion = matcher.group("gMajorVersion"); - fixVersion = matcher.group("gFixVersion"); - qualifier = validateQualifier(matcher.group("gQualifier")); - buildVersion = validateBuild(matcher.group("gBuildVersion")); - validateMajorVersion(majorVersion); - - this.version = version; - - } - - private String validateMajorVersion(String value) { - // Validate major version - - // Checks if only major version is provided and assumes - // minor version as 0. - // Eg: If version = 18, then it returns version = '18.0' - - if (minorVersion == null && buildVersion == null && !value.equals("0")) { - //log.warning(_('Minor version assumed "0".')) - version = version + "0"; - } - return value; - } - - private String validateQualifier(String value) { - // Validate qualifier - - // TOSCA version is invalid if a qualifier is present without the - // fix version or with all of major, minor and fix version 0s. - - // For example, the following versions are invalid - // 18.0.abc - // 0.0.0.abc - - if ((fixVersion == null && value != null) || (minorVersion.equals("0") && majorVersion.equals("0") - && fixVersion.equals("0") && value != null)) { - ThreadLocalsHolder.getCollector().appendValidationIssue( - new JToscaValidationIssue( - "JE253", - "InvalidTOSCAVersionPropertyException: Value of TOSCA version property \"" - + version - + "\" is invalid" - )); - } - return value; - } - - private String validateBuild(String value) { - // Validate build version - - // TOSCA version is invalid if build version is present without the qualifier. - // Eg: version = 18.0.0-1 is invalid. - - if (qualifier == null && value != null) { - ThreadLocalsHolder.getCollector().appendValidationIssue( - new JToscaValidationIssue( - "JE254", - "InvalidTOSCAVersionPropertyException: " - + "Value of TOSCA version property \"" + version + "\" is invalid" - ) - ); - } - return value; - } - - public Object getVersion() { - return version; - } - -} - -/*python - -class TOSCAVersionProperty(object): - - VERSION_RE = re.compile('^(?P([0-9][0-9]*))' - '(\.(?P([0-9][0-9]*)))?' - '(\.(?P([0-9][0-9]*)))?' - '(\.(?P([0-9A-Za-z]+)))?' - '(\-(?P[0-9])*)?$') - - def __init__(self, version): - self.version = str(version) - match = self.VERSION_RE.match(self.version) - if not match: - ValidationIssueCollector.appendException( - InvalidTOSCAVersionPropertyException(what=(self.version))) - return - ver = match.groupdict() - if self.version in ['0', '0.0', '0.0.0']: - log.warning(_('Version assumed as not provided')) - self.version = None - self.minor_version = ver['minor_version'] - self.major_version = ver['major_version'] - self.fix_version = ver['fix_version'] - self.qualifier = self._validate_qualifier(ver['qualifier']) - self.build_version = self._validate_build(ver['build_version']) - self._validate_major_version(self.major_version) - - def _validate_major_version(self, value): - """Validate major version - - Checks if only major version is provided and assumes - minor version as 0. - Eg: If version = 18, then it returns version = '18.0' - """ - - if self.minor_version is None and self.build_version is None and \ - value != '0': - log.warning(_('Minor version assumed "0".')) - self.version = '.'.join([value, '0']) - return value - - def _validate_qualifier(self, value): - """Validate qualifier - - TOSCA version is invalid if a qualifier is present without the - fix version or with all of major, minor and fix version 0s. - - For example, the following versions are invalid - 18.0.abc - 0.0.0.abc - """ - if (self.fix_version is None and value) or \ - (self.minor_version == self.major_version == - self.fix_version == '0' and value): - ValidationIssueCollector.appendException( - InvalidTOSCAVersionPropertyException(what=(self.version))) - return value - - def _validate_build(self, value): - """Validate build version - - TOSCA version is invalid if build version is present without the - qualifier. - Eg: version = 18.0.0-1 is invalid. - """ - if not self.qualifier and value: - ValidationIssueCollector.appendException( - InvalidTOSCAVersionPropertyException(what=(self.version))) - return value - - def get_version(self): - return self.version -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java deleted file mode 100644 index 4c4581b..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java +++ /dev/null @@ -1,45 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.utils; - -import org.onap.sdc.toscaparser.api.common.ValidationIssueCollector; - -public class ThreadLocalsHolder { - - private static final ThreadLocal EXCEPTION_COLLECTOR_THREAD_LOCAL = new ThreadLocal<>(); - - private ThreadLocalsHolder() { - } - - public static ValidationIssueCollector getCollector() { - return EXCEPTION_COLLECTOR_THREAD_LOCAL.get(); - } - - public static void setCollector(ValidationIssueCollector validationIssueCollector) { - cleanup(); - EXCEPTION_COLLECTOR_THREAD_LOCAL.set(validationIssueCollector); - } - - public static void cleanup() { - EXCEPTION_COLLECTOR_THREAD_LOCAL.remove(); - } - -} diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java deleted file mode 100644 index d081d91..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java +++ /dev/null @@ -1,145 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.utils; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.io.IOException; -import java.net.HttpURLConnection; -import java.net.MalformedURLException; -import java.net.URL; - -public class UrlUtils { - - private static final int HTTP_STATUS_OK = 200; - - private UrlUtils() { - } - - public static boolean validateUrl(String sUrl) { - // Validates whether the given path is a URL or not - - // If the given path includes a scheme (http, https, ftp, ...) and a net - // location (a domain name such as www.github.com) it is validated as a URL - try { - URL url = new URL(sUrl); - if (url.getProtocol().equals("file")) { - return true; - } - return url.getAuthority() != null; - } catch (MalformedURLException e) { - return false; - } - } - - public static String joinUrl(String sUrl, String relativePath) { - // Builds a new URL from the given URL and the relative path - - // Example: - // url: http://www.githib.com/openstack/heat - // relative_path: heat-translator - // - joined: http://www.githib.com/openstack/heat-translator - if (!validateUrl(sUrl)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE255", String.format( - "ValueError: The URL \"%s\" is malformed", sUrl))); - } - try { - URL base = new URL(sUrl); - return (new URL(base, relativePath)).toString(); - } catch (MalformedURLException e) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE256", String.format( - "ValueError: Joining URL \"%s\" and relative path \"%s\" caused an exception", sUrl, relativePath))); - return sUrl; - } - } - - public static boolean isUrlAccessible(String sUrl) { - // Validates whether the given URL is accessible - - // Returns true if the get call returns a 200 response code. - // Otherwise, returns false. - try { - HttpURLConnection connection = (HttpURLConnection) new URL(sUrl).openConnection(); - connection.setRequestMethod("HEAD"); - int responseCode = connection.getResponseCode(); - return responseCode == HTTP_STATUS_OK; - } catch (IOException e) { - return false; - } - } - -} - -/*python - -from six.moves.urllib.parse import urljoin -from six.moves.urllib.parse import urlparse -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.utils.gettextutils import _ - -try: - # Python 3.x - import urllib.request as urllib2 -except ImportError: - # Python 2.x - import urllib2 - - -class UrlUtils(object): - - @staticmethod - def validate_url(path): - """Validates whether the given path is a URL or not. - - If the given path includes a scheme (http, https, ftp, ...) and a net - location (a domain name such as www.github.com) it is validated as a - URL. - """ - parsed = urlparse(path) - if parsed.scheme == 'file': - # If the url uses the file scheme netloc will be "" - return True - else: - return bool(parsed.scheme) and bool(parsed.netloc) - - @staticmethod - def join_url(url, relative_path): - """Builds a new URL from the given URL and the relative path. - - Example: - url: http://www.githib.com/openstack/heat - relative_path: heat-translator - - joined: http://www.githib.com/openstack/heat-translator - """ - if not UrlUtils.validate_url(url): - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a valid URL.') % url)) - return urljoin(url, relative_path) - - @staticmethod - def url_accessible(url): - """Validates whether the given URL is accessible. - - Returns true if the get call returns a 200 response code. - Otherwise, returns false. - """ - return urllib2.urlopen(url).getcode() == 200 -*/ diff --git a/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java b/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java deleted file mode 100644 index b90d882..0000000 --- a/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java +++ /dev/null @@ -1,439 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.utils; - -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; - -import java.util.ArrayList; -import java.util.Date; -import java.util.LinkedHashMap; - -public class ValidateUtils { - - private static final String RANGE_UNBOUNDED = "UNBOUNDED"; - - private ValidateUtils() { - } - - public static Object strToNum(Object value) { - // Convert a string representation of a number into a numeric type - // TODO(TBD) we should not allow numeric values in, input should be str - if (value instanceof Number) { - return value; - } - try { - return Integer.parseInt((String) value); - } catch (NumberFormatException e) { - } - try { - return Float.parseFloat((String) value); - } catch (Exception e) { - } - return null; - } - - public static Object validateNumeric(Object value) { - if (value != null) { - if (!(value instanceof Number)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE257", String.format( - "ValueError: \"%s\" is not a numeric", value.toString()))); - } - } - return value; - } - - public static Object validateInteger(Object value) { - if (value != null) { - if (!(value instanceof Integer)) { - // allow "true" and "false" - if (value instanceof Boolean) { - return (Boolean) value ? 1 : 0; - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE258", String.format( - "ValueError: \"%s\" is not an integer", value.toString()))); - } - } - return value; - } - - public static Object validateFloat(Object value) { - if (value != null) { - if (!(value instanceof Float || value instanceof Double)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE259", String.format( - "ValueError: \"%s\" is not a float", value.toString()))); - } - } - return value; - } - - public static Object validateString(Object value) { - if (value != null) { - if (!(value instanceof String)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE260", String.format( - "ValueError: \'%s\' is not a string", value.toString()))); - } - } - return value; - } - - public static Object validateList(Object value) { - if (value != null) { - if (!(value instanceof ArrayList)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE261", String.format( - "ValueError: \"%s\" is not a list", value.toString()))); - } - } - return value; - } - - - @SuppressWarnings("unchecked") - public static Object validateRange(Object range) { - // list class check - validateList(range); - // validate range list has a min and max - if (range instanceof ArrayList && ((ArrayList) range).size() != 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE262", String.format( - "ValueError: \"%s\" is not a valid range", range.toString()))); - // too dangerous to continue... - return range; - } - // validate min and max are numerics or the keyword UNBOUNDED - boolean minTest = false; - boolean maxTest = false; - Object r0 = ((ArrayList) range).get(0); - Object r1 = ((ArrayList) range).get(1); - - if (!(r0 instanceof Integer) && !(r0 instanceof Float) - || !(r1 instanceof Integer) && !(r1 instanceof Float)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE263", String.format( - "ValueError: \"%s\" is not a valid range", range.toString()))); - // too dangerous to continue... - return range; - } - - Float min = 0.0F; - Float max = 0.0F; - if (r0 instanceof String && ((String) r0).equals(RANGE_UNBOUNDED)) { - minTest = true; - } else { - min = r0 instanceof Integer ? ((Integer) r0).floatValue() : (Float) r0; - } - if (r1 instanceof String && ((String) r1).equals(RANGE_UNBOUNDED)) { - maxTest = true; - } else { - max = r1 instanceof Integer ? ((Integer) r1).floatValue() : (Float) r1; - } - - // validate the max > min (account for UNBOUNDED) - if (!minTest && !maxTest) { - // Note: min == max is allowed - if (min > max) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE264", String.format( - "ValueError:\"%s\" is not a valid range", range.toString()))); - } - } - return range; - } - - @SuppressWarnings("unchecked") - public static Object validateValueInRange(Object value, Object range, String propName) { - // verify all 3 are numeric and convert to Floats - if (!(value instanceof Integer || value instanceof Float)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE265", String.format( - "ValueError: validateInRange: \"%s\" is not a number", range.toString()))); - return value; - } - Float fval = value instanceof Integer ? ((Integer) value).floatValue() : (Float) value; - - ////////////////////////// - //"validateRange(range);" - ////////////////////////// - // better safe than sorry... - // validate that range list has a min and max - if (range instanceof ArrayList && ((ArrayList) range).size() != 2) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE266", String.format( - "ValueError: \"%s\" is not a valid range", range.toString()))); - // too dangerous to continue... - return value; - } - // validate min and max are numerics or the keyword UNBOUNDED - boolean minTest = false; - boolean maxTest = false; - Object r0 = ((ArrayList) range).get(0); - Object r1 = ((ArrayList) range).get(1); - - if (!(r0 instanceof Integer) && !(r0 instanceof Float) - || !(r1 instanceof Integer) && !(r1 instanceof Float)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE267", String.format( - "ValueError: \"%s\" is not a valid range", range.toString()))); - // too dangerous to continue... - return value; - } - - Float min = 0.0F; - Float max = 0.0F; - if (r0 instanceof String && ((String) r0).equals(RANGE_UNBOUNDED)) { - minTest = true; - } else { - min = r0 instanceof Integer ? ((Integer) r0).floatValue() : (Float) r0; - } - if (r1 instanceof String && ((String) r1).equals(RANGE_UNBOUNDED)) { - maxTest = true; - } else { - max = r1 instanceof Integer ? ((Integer) r1).floatValue() : (Float) r1; - } - - // validate the max > min (account for UNBOUNDED) - if (!minTest && !maxTest) { - // Note: min == max is allowed - if (min > max) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE268", String.format( - "ValueError:\"%s\" is not a valid range", range.toString()))); - } - } - // finally... - boolean bError = false; - //Note: value is valid if equal to min - if (!minTest) { - if (fval < min) { - bError = true; - } - } - // Note: value is valid if equal to max - if (!maxTest) { - if (fval > max) { - bError = true; - } - } - if (bError) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE269", String.format( - "RangeValueError: Property \"%s\", \"%s\" not in range [\"%s\" - \"%s\"", - propName, value.toString(), r0.toString(), r1.toString()))); - } - return value; - } - - public static Object validateMap(Object ob) { - if (ob != null) { - if (!(ob instanceof LinkedHashMap)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE270", String.format( - "ValueError\"%s\" is not a map.", ob.toString()))); - } - } - return ob; - } - - public static Object validateBoolean(Object value) { - if (value != null) { - if (value instanceof Boolean) { - return value; - } - if (value instanceof String) { - String normalized = ((String) value).toLowerCase(); - if (normalized.equals("true") || normalized.equals("false")) { - return normalized.equals("true"); - } - } - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE271", String.format( - "ValueError: \"%s\" is not a boolean", value.toString()))); - } - return value; - } - - public static Object validateTimestamp(Object value) { - - /* - try: - # Note: we must return our own exception message - # as dateutil's parser returns different types / values on - # different systems. OSX, for example, returns a tuple - # containing a different error message than Linux - dateutil.parser.parse(value) - except Exception as e: - original_err_msg = str(e) - log.error(original_err_msg) - ValidationIssueCollector.appendException( - ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') % - {'val': value, 'msg': original_err_msg})) - */ - // timestamps are loaded as Date objects by the YAML parser - if (value != null) { - if (!(value instanceof Date)) { - ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE272", String.format( - "ValueError: \"%s\" is not a valid timestamp", - value.toString()))); - - } - } - return value; - } - -} - -/*python - -from toscaparser.elements import constraints -from toscaparser.common.exception import ValidationIssueCollector -from toscaparser.common.exception import InvalidTOSCAVersionPropertyException -from toscaparser.common.exception import RangeValueError -from toscaparser.utils.gettextutils import _ - -log = logging.getLogger('tosca') - -RANGE_UNBOUNDED = 'UNBOUNDED' - - -def str_to_num(value): - '''Convert a string representation of a number into a numeric type.''' - # tODO(TBD) we should not allow numeric values in, input should be str - if isinstance(value, numbers.Number): - return value - try: - return int(value) - except ValueError: - return float(value) - - -def validate_numeric(value): - if not isinstance(value, numbers.Number): - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a numeric.') % value)) - return value - - -def validate_integer(value): - if not isinstance(value, int): - try: - value = int(value) - except Exception: - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not an integer.') % value)) - return value - - -def validate_float(value): - if not isinstance(value, float): - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a float.') % value)) - return value - - -def validate_string(value): - if not isinstance(value, six.string_types): - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a string.') % value)) - return value - - -def validate_list(value): - if not isinstance(value, list): - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a list.') % value)) - return value - - -def validate_range(range): - # list class check - validate_list(range) - # validate range list has a min and max - if len(range) != 2: - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a valid range.') % range)) - # validate min and max are numerics or the keyword UNBOUNDED - min_test = max_test = False - if not range[0] == RANGE_UNBOUNDED: - min = validate_numeric(range[0]) - else: - min_test = True - if not range[1] == RANGE_UNBOUNDED: - max = validate_numeric(range[1]) - else: - max_test = True - # validate the max > min (account for UNBOUNDED) - if not min_test and not max_test: - # Note: min == max is allowed - if min > max: - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a valid range.') % range)) - - return range - - -def validate_value_in_range(value, range, prop_name): - validate_numeric(value) - validate_range(range) - - # Note: value is valid if equal to min - if range[0] != RANGE_UNBOUNDED: - if value < range[0]: - ValidationIssueCollector.appendException( - RangeValueError(pname=prop_name, - pvalue=value, - vmin=range[0], - vmax=range[1])) - # Note: value is valid if equal to max - if range[1] != RANGE_UNBOUNDED: - if value > range[1]: - ValidationIssueCollector.appendException( - RangeValueError(pname=prop_name, - pvalue=value, - vmin=range[0], - vmax=range[1])) - return value - - -def validate_map(value): - if not isinstance(value, collections.Mapping): - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a map.') % value)) - return value - - -def validate_boolean(value): - if isinstance(value, bool): - return value - - if isinstance(value, str): - normalised = value.lower() - if normalised in ['true', 'false']: - return normalised == 'true' - - ValidationIssueCollector.appendException( - ValueError(_('"%s" is not a boolean.') % value)) - - -def validate_timestamp(value): - try: - # Note: we must return our own exception message - # as dateutil's parser returns different types / values on - # different systems. OSX, for example, returns a tuple - # containing a different error message than Linux - dateutil.parser.parse(value) - except Exception as e: - original_err_msg = str(e) - log.error(original_err_msg) - ValidationIssueCollector.appendException( - ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') % - {'val': value, 'msg': original_err_msg})) - return - -*/ diff --git a/src/main/resources/TOSCA_definition_1_0.yaml b/src/main/resources/TOSCA_definition_1_0.yaml deleted file mode 100644 index d80ed17..0000000 --- a/src/main/resources/TOSCA_definition_1_0.yaml +++ /dev/null @@ -1,971 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -########################################################################## -# The content of this file reflects TOSCA Simple Profile in YAML version -# 1.0.0. It describes the definition for TOSCA types including Node Type, -# Relationship Type, CapabilityAssignment Type and Interfaces. -########################################################################## -tosca_definitions_version: tosca_simple_yaml_1_0 - -########################################################################## -# Node Type. -# A Node Type is a reusable entity that defines the type of one or more -# Node Templates. -########################################################################## -node_types: - tosca.nodes.Root: - description: > - The TOSCA root node all other TOSCA base node types derive from. - attributes: - tosca_id: - type: string - tosca_name: - type: string - state: - type: string - capabilities: - feature: - type: tosca.capabilities.Node - requirements: - - dependency: - capability: tosca.capabilities.Node - node: tosca.nodes.Root - relationship: tosca.relationships.DependsOn - occurrences: [ 0, UNBOUNDED ] - interfaces: - Standard: - type: tosca.interfaces.node.lifecycle.Standard - - tosca.nodes.Compute: - derived_from: tosca.nodes.Root - attributes: - private_address: - type: string - public_address: - type: string - networks: - type: map - entry_schema: - type: tosca.datatypes.network.NetworkInfo - ports: - type: map - entry_schema: - type: tosca.datatypes.network.PortInfo - capabilities: - host: - type: tosca.capabilities.Container - binding: - type: tosca.capabilities.network.Bindable - os: - type: tosca.capabilities.OperatingSystem - scalable: - type: tosca.capabilities.Scalable - endpoint: - type: tosca.capabilities.Endpoint.Admin - requirements: - - local_storage: - capability: tosca.capabilities.Attachment - node: tosca.nodes.BlockStorage - relationship: tosca.relationships.AttachesTo - occurrences: [0, UNBOUNDED] - - tosca.nodes.SoftwareComponent: - derived_from: tosca.nodes.Root - properties: - # domain-specific software component version - component_version: - type: version - required: false - description: > - Software component version. - admin_credential: - type: tosca.datatypes.Credential - required: false - requirements: - - host: - capability: tosca.capabilities.Container - node: tosca.nodes.Compute - relationship: tosca.relationships.HostedOn - - tosca.nodes.DBMS: - derived_from: tosca.nodes.SoftwareComponent - properties: - port: - required: false - type: integer - description: > - The port the DBMS service will listen to for data and requests. - root_password: - required: false - type: string - description: > - The root password for the DBMS service. - capabilities: - host: - type: tosca.capabilities.Container - valid_source_types: [tosca.nodes.Database] - - tosca.nodes.Database: - derived_from: tosca.nodes.Root - properties: - user: - required: false - type: string - description: > - User account name for DB administration - port: - required: false - type: integer - description: > - The port the database service will use to listen for incoming data and - requests. - name: - required: false - type: string - description: > - The name of the database. - password: - required: false - type: string - description: > - The password for the DB user account - requirements: - - host: - capability: tosca.capabilities.Container - node: tosca.nodes.DBMS - relationship: tosca.relationships.HostedOn - capabilities: - database_endpoint: - type: tosca.capabilities.Endpoint.Database - - tosca.nodes.WebServer: - derived_from: tosca.nodes.SoftwareComponent - capabilities: - data_endpoint: - type: tosca.capabilities.Endpoint - admin_endpoint: - type: tosca.capabilities.Endpoint.Admin - host: - type: tosca.capabilities.Container - valid_source_types: [tosca.nodes.WebApplication] - - tosca.nodes.WebApplication: - derived_from: tosca.nodes.Root - properties: - context_root: - type: string - required: false - requirements: - - host: - capability: tosca.capabilities.Container - node: tosca.nodes.WebServer - relationship: tosca.relationships.HostedOn - capabilities: - app_endpoint: - type: tosca.capabilities.Endpoint - - tosca.nodes.BlockStorage: - derived_from: tosca.nodes.Root - properties: - size: - type: scalar-unit.size - constraints: - - greater_or_equal: 1 MB - volume_id: - type: string - required: false - snapshot_id: - type: string - required: false - attributes: - volume_id: - type: string - capabilities: - attachment: - type: tosca.capabilities.Attachment - - tosca.nodes.network.Network: - derived_from: tosca.nodes.Root - description: > - The TOSCA Network node represents a simple, logical network service. - properties: - ip_version: - type: integer - required: false - default: 4 - constraints: - - valid_values: [ 4, 6 ] - description: > - The IP version of the requested network. Valid values are 4 for ipv4 - or 6 for ipv6. - cidr: - type: string - required: false - description: > - The cidr block of the requested network. - start_ip: - type: string - required: false - description: > - The IP address to be used as the start of a pool of addresses within - the full IP range derived from the cidr block. - end_ip: - type: string - required: false - description: > - The IP address to be used as the end of a pool of addresses within - the full IP range derived from the cidr block. - gateway_ip: - type: string - required: false - description: > - The gateway IP address. - network_name: - type: string - required: false - description: > - An identifier that represents an existing Network instance in the - underlying cloud infrastructure or can be used as the name of the - newly created network. If network_name is provided and no other - properties are provided (with exception of network_id), then an - existing network instance will be used. If network_name is provided - alongside with more properties then a new network with this name will - be created. - network_id: - type: string - required: false - description: > - An identifier that represents an existing Network instance in the - underlying cloud infrastructure. This property is mutually exclusive - with all other properties except network_name. This can be used alone - or together with network_name to identify an existing network. - segmentation_id: - type: string - required: false - description: > - A segmentation identifier in the underlying cloud infrastructure. - E.g. VLAN ID, GRE tunnel ID, etc.. - network_type: - type: string - required: false - description: > - It specifies the nature of the physical network in the underlying - cloud infrastructure. Examples are flat, vlan, gre or vxlan. - For flat and vlan types, physical_network should be provided too. - physical_network: - type: string - required: false - description: > - It identifies the physical network on top of which the network is - implemented, e.g. physnet1. This property is required if network_type - is flat or vlan. - dhcp_enabled: - type: boolean - required: false - default: true - description: > - Indicates should DHCP service be enabled on the network or not. - capabilities: - link: - type: tosca.capabilities.network.Linkable - - tosca.nodes.network.Port: - derived_from: tosca.nodes.Root - description: > - The TOSCA Port node represents a logical entity that associates between - Compute and Network normative types. The Port node type effectively - represents a single virtual NIC on the Compute node instance. - properties: - ip_address: - type: string - required: false - description: > - Allow the user to set a static IP. - order: - type: integer - required: false - default: 0 - constraints: - - greater_or_equal: 0 - description: > - The order of the NIC on the compute instance (e.g. eth2). - is_default: - type: boolean - required: false - default: false - description: > - If is_default=true this port will be used for the default gateway - route. Only one port that is associated to single compute node can - set as is_default=true. - ip_range_start: - type: string - required: false - description: > - Defines the starting IP of a range to be allocated for the compute - instances that are associated with this Port. - ip_range_end: - type: string - required: false - description: > - Defines the ending IP of a range to be allocated for the compute - instances that are associated with this Port. - attributes: - ip_address: - type: string - requirements: - - binding: - description: > - Binding requirement expresses the relationship between Port and - Compute nodes. Effectively it indicates that the Port will be - attached to specific Compute node instance - capability: tosca.capabilities.network.Bindable - relationship: tosca.relationships.network.BindsTo - node: tosca.nodes.Compute - - link: - description: > - Link requirement expresses the relationship between Port and Network - nodes. It indicates which network this port will connect to. - capability: tosca.capabilities.network.Linkable - relationship: tosca.relationships.network.LinksTo - node: tosca.nodes.network.Network - - tosca.nodes.network.FloatingIP: - derived_from: tosca.nodes.Root - description: > - The TOSCA FloatingIP node represents a floating IP that can associate to a Port. - properties: - floating_network: - type: string - required: true - floating_ip_address: - type: string - required: false - port_id: - type: string - required: false - requirements: - - link: - capability: tosca.capabilities.network.Linkable - relationship: tosca.relationships.network.LinksTo - node: tosca.nodes.network.Port - - tosca.nodes.ObjectStorage: - derived_from: tosca.nodes.Root - description: > - The TOSCA ObjectStorage node represents storage that provides the ability - to store data as objects (or BLOBs of data) without consideration for the - underlying filesystem or devices - properties: - name: - type: string - required: true - description: > - The logical name of the object store (or container). - size: - type: scalar-unit.size - required: false - constraints: - - greater_or_equal: 0 GB - description: > - The requested initial storage size. - maxsize: - type: scalar-unit.size - required: false - constraints: - - greater_or_equal: 0 GB - description: > - The requested maximum storage size. - capabilities: - storage_endpoint: - type: tosca.capabilities.Endpoint - - tosca.nodes.LoadBalancer: - derived_from: tosca.nodes.Root - properties: - algorithm: - type: string - required: false - status: experimental - capabilities: - client: - type: tosca.capabilities.Endpoint.Public - occurrences: [0, UNBOUNDED] - description: the Floating (IP) client’s on the public network can connect to - requirements: - - application: - capability: tosca.capabilities.Endpoint - relationship: tosca.relationships.RoutesTo - occurrences: [0, UNBOUNDED] - description: Connection to one or more load balanced applications - - tosca.nodes.Container.Application: - derived_from: tosca.nodes.Root - requirements: - - host: - capability: tosca.capabilities.Container - node: tosca.nodes.Container.Runtime - relationship: tosca.relationships.HostedOn - - tosca.nodes.Container.Runtime: - derived_from: tosca.nodes.SoftwareComponent - capabilities: - host: - type: tosca.capabilities.Container - scalable: - type: tosca.capabilities.Scalable - - tosca.nodes.Container.Application.Docker: - derived_from: tosca.nodes.Container.Application - requirements: - - host: - capability: tosca.capabilities.Container.Docker - -########################################################################## -# Relationship Type. -# A Relationship Type is a reusable entity that defines the type of one -# or more relationships between Node Types or Node Templates. -########################################################################## -relationship_types: - tosca.relationships.Root: - description: > - The TOSCA root Relationship Type all other TOSCA base Relationship Types - derive from. - attributes: - tosca_id: - type: string - tosca_name: - type: string - interfaces: - Configure: - type: tosca.interfaces.relationship.Configure - - tosca.relationships.DependsOn: - derived_from: tosca.relationships.Root - - tosca.relationships.HostedOn: - derived_from: tosca.relationships.Root - valid_target_types: [ tosca.capabilities.Container ] - - tosca.relationships.ConnectsTo: - derived_from: tosca.relationships.Root - valid_target_types: [ tosca.capabilities.Endpoint ] - credential: - type: tosca.datatypes.Credential - required: false - - tosca.relationships.AttachesTo: - derived_from: tosca.relationships.Root - valid_target_types: [ tosca.capabilities.Attachment ] - properties: - location: - required: true - type: string - constraints: - - min_length: 1 - device: - required: false - type: string - - tosca.relationships.RoutesTo: - derived_from: tosca.relationships.ConnectsTo - valid_target_types: [ tosca.capabilities.Endpoint ] - - tosca.relationships.network.LinksTo: - derived_from: tosca.relationships.DependsOn - valid_target_types: [ tosca.capabilities.network.Linkable ] - - tosca.relationships.network.BindsTo: - derived_from: tosca.relationships.DependsOn - valid_target_types: [ tosca.capabilities.network.Bindable ] - -########################################################################## -# CapabilityAssignment Type. -# A CapabilityAssignment Type is a reusable entity that describes a kind of -# capability that a Node Type can declare to expose. -########################################################################## -capability_types: - tosca.capabilities.Root: - description: > - The TOSCA root Capability Type all other TOSCA base Capability Types - derive from. - - tosca.capabilities.Node: - derived_from: tosca.capabilities.Root - - tosca.capabilities.Container: - derived_from: tosca.capabilities.Root - properties: - num_cpus: - required: false - type: integer - constraints: - - greater_or_equal: 1 - cpu_frequency: - required: false - type: scalar-unit.frequency - constraints: - - greater_or_equal: 0.1 GHz - disk_size: - required: false - type: scalar-unit.size - constraints: - - greater_or_equal: 0 MB - mem_size: - required: false - type: scalar-unit.size - constraints: - - greater_or_equal: 0 MB - - tosca.capabilities.Endpoint: - derived_from: tosca.capabilities.Root - properties: - protocol: - type: string - required: true - default: tcp - port: - type: tosca.datatypes.network.PortDef - required: false - secure: - type: boolean - required: false - default: false - url_path: - type: string - required: false - port_name: - type: string - required: false - network_name: - type: string - required: false - default: PRIVATE - initiator: - type: string - required: false - default: source - constraints: - - valid_values: [source, target, peer] - ports: - type: map - required: false - constraints: - - min_length: 1 - entry_schema: - type: tosca.datatypes.network.PortSpec - attributes: - ip_address: - type: string - - tosca.capabilities.Endpoint.Admin: - derived_from: tosca.capabilities.Endpoint - properties: - secure: - type: boolean - default: true - constraints: - - equal: true - - tosca.capabilities.Endpoint.Public: - derived_from: tosca.capabilities.Endpoint - properties: - # Change the default network_name to use the first public network found - network_name: - type: string - default: PUBLIC - constraints: - - equal: PUBLIC - floating: - description: > - Indicates that the public address should be allocated from a pool of - floating IPs that are associated with the network. - type: boolean - default: false - status: experimental - dns_name: - description: The optional name to register with DNS - type: string - required: false - status: experimental - - tosca.capabilities.Scalable: - derived_from: tosca.capabilities.Root - properties: - min_instances: - type: integer - required: true - default: 1 - description: > - This property is used to indicate the minimum number of instances - that should be created for the associated TOSCA Node Template by - a TOSCA orchestrator. - max_instances: - type: integer - required: true - default: 1 - description: > - This property is used to indicate the maximum number of instances - that should be created for the associated TOSCA Node Template by - a TOSCA orchestrator. - default_instances: - type: integer - required: false - description: > - An optional property that indicates the requested default number - of instances that should be the starting number of instances a - TOSCA orchestrator should attempt to allocate. - The value for this property MUST be in the range between the values - set for min_instances and max_instances properties. - - tosca.capabilities.Endpoint.Database: - derived_from: tosca.capabilities.Endpoint - - tosca.capabilities.Attachment: - derived_from: tosca.capabilities.Root - - tosca.capabilities.network.Linkable: - derived_from: tosca.capabilities.Root - description: > - A node type that includes the Linkable capability indicates that it can - be pointed by tosca.relationships.network.LinksTo relationship type, which - represents an association relationship between Port and Network node types. - - tosca.capabilities.network.Bindable: - derived_from: tosca.capabilities.Root - description: > - A node type that includes the Bindable capability indicates that it can - be pointed by tosca.relationships.network.BindsTo relationship type, which - represents a network association relationship between Port and Compute node - types. - - tosca.capabilities.OperatingSystem: - derived_from: tosca.capabilities.Root - properties: - architecture: - required: false - type: string - description: > - The host Operating System (OS) architecture. - type: - required: false - type: string - description: > - The host Operating System (OS) type. - distribution: - required: false - type: string - description: > - The host Operating System (OS) distribution. Examples of valid values - for an “type” of “Linux” would include: - debian, fedora, rhel and ubuntu. - version: - required: false - type: version - description: > - The host Operating System version. - - tosca.capabilities.Container.Docker: - derived_from: tosca.capabilities.Container - properties: - version: - type: list - required: false - entry_schema: - type: version - description: > - The Docker version capability. - publish_all: - type: boolean - default: false - required: false - description: > - Indicates that all ports (ranges) listed in the dockerfile - using the EXPOSE keyword be published. - publish_ports: - type: list - entry_schema: - type: tosca.datatypes.network.PortSpec - required: false - description: > - List of ports mappings from source (Docker container) - to target (host) ports to publish. - expose_ports: - type: list - entry_schema: - type: tosca.datatypes.network.PortSpec - required: false - description: > - List of ports mappings from source (Docker container) to expose - to other Docker containers (not accessible outside host). - volumes: - type: list - entry_schema: - type: string - required: false - description: > - The dockerfile VOLUME command which is used to enable access - from the Docker container to a directory on the host machine. - host_id: - type: string - required: false - description: > - The optional identifier of an existing host resource - that should be used to run this container on. - volume_id: - type: string - required: false - description: > - The optional identifier of an existing storage volume (resource) - that should be used to create the container's mount point(s) on. - -########################################################################## - # Interfaces Type. - # The Interfaces element describes a list of one or more interface - # definitions for a modelable entity (e.g., a Node or Relationship Type) - # as defined within the TOSCA Simple Profile specification. -########################################################################## -interface_types: - tosca.interfaces.node.lifecycle.Standard: - create: - description: Standard lifecycle create operation. - configure: - description: Standard lifecycle configure operation. - start: - description: Standard lifecycle start operation. - stop: - description: Standard lifecycle stop operation. - delete: - description: Standard lifecycle delete operation. - - tosca.interfaces.relationship.Configure: - pre_configure_source: - description: Operation to pre-configure the source endpoint. - pre_configure_target: - description: Operation to pre-configure the target endpoint. - post_configure_source: - description: Operation to post-configure the source endpoint. - post_configure_target: - description: Operation to post-configure the target endpoint. - add_target: - description: Operation to add a target node. - remove_target: - description: Operation to remove a target node. - add_source: > - description: Operation to notify the target node of a source node which - is now available via a relationship. - description: - target_changed: > - description: Operation to notify source some property or attribute of the - target changed - -########################################################################## - # Data Type. - # A Datatype is a complex data type declaration which contains other - # complex or simple data types. -########################################################################## -data_types: - tosca.datatypes.Root: - description: > - The TOSCA root Data Type all other TOSCA base Data Types derive from - - tosca.datatypes.network.NetworkInfo: - derived_from: tosca.datatypes.Root - properties: - network_name: - type: string - network_id: - type: string - addresses: - type: list - entry_schema: - type: string - - tosca.datatypes.network.PortInfo: - derived_from: tosca.datatypes.Root - properties: - port_name: - type: string - port_id: - type: string - network_id: - type: string - mac_address: - type: string - addresses: - type: list - entry_schema: - type: string - - tosca.datatypes.network.PortDef: - derived_from: tosca.datatypes.Root - type: integer - constraints: - - in_range: [ 1, 65535 ] - - tosca.datatypes.network.PortSpec: - derived_from: tosca.datatypes.Root - properties: - protocol: - type: string - required: true - default: tcp - constraints: - - valid_values: [ udp, tcp, igmp ] - target: - type: tosca.datatypes.network.PortDef - required: false - target_range: - type: range - required: false - constraints: - - in_range: [ 1, 65535 ] - source: - type: tosca.datatypes.network.PortDef - required: false - source_range: - type: range - required: false - constraints: - - in_range: [ 1, 65535 ] - - tosca.datatypes.Credential: - derived_from: tosca.datatypes.Root - properties: - protocol: - type: string - required: false - token_type: - type: string - default: password - required: true - token: - type: string - required: true - keys: - type: map - entry_schema: - type: string - required: false - user: - type: string - required: false - -########################################################################## - # Artifact Type. - # An Artifact Type is a reusable entity that defines the type of one or more - # files which Node Types or Node Templates can have dependent relationships - # and used during operations such as during installation or deployment. -########################################################################## -artifact_types: - tosca.artifacts.Root: - description: > - The TOSCA Artifact Type all other TOSCA Artifact Types derive from - properties: - version: version - - tosca.artifacts.File: - derived_from: tosca.artifacts.Root - - tosca.artifacts.Deployment: - derived_from: tosca.artifacts.Root - description: TOSCA base type for deployment artifacts - - tosca.artifacts.Deployment.Image: - derived_from: tosca.artifacts.Deployment - - tosca.artifacts.Deployment.Image.VM: - derived_from: tosca.artifacts.Deployment.Image - - tosca.artifacts.Implementation: - derived_from: tosca.artifacts.Root - description: TOSCA base type for implementation artifacts - - tosca.artifacts.Implementation.Bash: - derived_from: tosca.artifacts.Implementation - description: Script artifact for the Unix Bash shell - mime_type: application/x-sh - file_ext: [ sh ] - - tosca.artifacts.Implementation.Python: - derived_from: tosca.artifacts.Implementation - description: Artifact for the interpreted Python language - mime_type: application/x-python - file_ext: [ py ] - - tosca.artifacts.Deployment.Image.Container.Docker: - derived_from: tosca.artifacts.Deployment.Image - description: Docker container image - - tosca.artifacts.Deployment.Image.VM.ISO: - derived_from: tosca.artifacts.Deployment.Image - description: Virtual Machine (VM) image in ISO disk format - mime_type: application/octet-stream - file_ext: [ iso ] - - tosca.artifacts.Deployment.Image.VM.QCOW2: - derived_from: tosca.artifacts.Deployment.Image - description: Virtual Machine (VM) image in QCOW v2 standard disk format - mime_type: application/octet-stream - file_ext: [ qcow2 ] - -########################################################################## - # Policy Type. - # TOSCA Policy Types represent logical grouping of TOSCA nodes that have - # an implied relationship and need to be orchestrated or managed together - # to achieve some result. -########################################################################## -policy_types: - tosca.policies.Root: - description: The TOSCA Policy Type all other TOSCA Policy Types derive from. - - tosca.policies.Placement: - derived_from: tosca.policies.Root - description: The TOSCA Policy Type definition that is used to govern - placement of TOSCA nodes or groups of nodes. - - tosca.policies.Scaling: - derived_from: tosca.policies.Root - description: The TOSCA Policy Type definition that is used to govern - scaling of TOSCA nodes or groups of nodes. - - tosca.policies.Monitoring: - derived_from: tosca.policies.Root - description: The TOSCA Policy Type definition that is used to govern - monitoring of TOSCA nodes or groups of nodes. - - tosca.policies.Update: - derived_from: tosca.policies.Root - description: The TOSCA Policy Type definition that is used to govern - update of TOSCA nodes or groups of nodes. - - tosca.policies.Performance: - derived_from: tosca.policies.Root - description: The TOSCA Policy Type definition that is used to declare - performance requirements for TOSCA nodes or groups of nodes. - - onap.policies.Monitoring: - derived_from: tosca.policies.Root - description: The ONAP Policy Type definition for DCAE uS component monitoring policies. - -########################################################################## - # Group Type. - # Group Type represents logical grouping of TOSCA nodes that have an - # implied membership relationship and may need to be orchestrated or - # managed together to achieve some result. -########################################################################## -group_types: - tosca.groups.Root: - description: The TOSCA Group Type all other TOSCA Group Types derive from - interfaces: - Standard: - type: tosca.interfaces.node.lifecycle.Standard diff --git a/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py b/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py deleted file mode 100644 index a5bda4a..0000000 --- a/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py +++ /dev/null @@ -1,19 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# VERSION and DEFS_FILE are required for all extensions - -VERSION = 'tosca_simple_yaml_1_0_0' - -DEFS_FILE = "TOSCA_simple_yaml_definition_1_0_0.yaml" - -SECTIONS = ('metadata') diff --git a/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml b/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml deleted file mode 100644 index c645e27..0000000 --- a/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml +++ /dev/null @@ -1,240 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -########################################################################## -# The content of this file reflects TOSCA NFV Profile in YAML version -# 1.0.0. It describes the definition for TOSCA NFV types including Node Type, -# Relationship Type, CapabilityAssignment Type and Interfaces. -########################################################################## -tosca_definitions_version: tosca_simple_yaml_1_0_0 - -########################################################################## -# Node Type. -# A Node Type is a reusable entity that defines the type of one or more -# Node Templates. -########################################################################## -node_types: - tosca.nodes.nfv.VNF: - derived_from: tosca.nodes.Root # Or should this be its own top - level type? - properties: - id: - type: string - description: ID of this VNF - vendor: - type: string - description: name of the vendor who generate this VNF - version: - type: version - description: version of the software for this VNF - requirements: - - virtualLink: - capability: tosca.capabilities.nfv.VirtualLinkable - relationship: tosca.relationships.nfv.VirtualLinksTo - node: tosca.nodes.nfv.VL - - tosca.nodes.nfv.VDU: - derived_from: tosca.nodes.Compute - capabilities: - high_availability: - type: tosca.capabilities.nfv.HA - virtualbinding: - type: tosca.capabilities.nfv.VirtualBindable - monitoring_parameter: - type: tosca.capabilities.nfv.Metric - requirements: - - high_availability: - capability: tosca.capabilities.nfv.HA - relationship: tosca.relationships.nfv.HA - node: tosca.nodes.nfv.VDU - occurrences: [ 0, 1 ] - - tosca.nodes.nfv.CP: - derived_from: tosca.nodes.network.Port - properties: - type: - type: string - required: false - requirements: - - virtualLink: - capability: tosca.capabilities.nfv.VirtualLinkable - relationship: tosca.relationships.nfv.VirtualLinksTo - node: tosca.nodes.nfv.VL - - virtualBinding: - capability: tosca.capabilities.nfv.VirtualBindable - relationship: tosca.relationships.nfv.VirtualBindsTo - node: tosca.nodes.nfv.VDU - attributes: - address: - type: string - - tosca.nodes.nfv.VL: - derived_from: tosca.nodes.network.Network - properties: - vendor: - type: string - required: true - description: name of the vendor who generate this VL - capabilities: - virtual_linkable: - type: tosca.capabilities.nfv.VirtualLinkable - - tosca.nodes.nfv.VL.ELine: - derived_from: tosca.nodes.nfv.VL - capabilities: - virtual_linkable: - occurrences: 2 - - tosca.nodes.nfv.VL.ELAN: - derived_from: tosca.nodes.nfv.VL - - tosca.nodes.nfv.VL.ETree: - derived_from: tosca.nodes.nfv.VL - - tosca.nodes.nfv.FP: - derived_from: tosca.nodes.Root - properties: - policy: - type: string - required: false - description: name of the vendor who generate this VL - requirements: - - forwarder: - capability: tosca.capabilities.nfv.Forwarder - relationship: tosca.relationships.nfv.ForwardsTo - -########################################################################## -# Relationship Type. -# A Relationship Type is a reusable entity that defines the type of one -# or more relationships between Node Types or Node Templates. -########################################################################## - -relationship_types: - tosca.relationships.nfv.VirtualLinksTo: - derived_from: tosca.relationships.network.LinksTo - valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ] - - tosca.relationships.nfv.VirtualBindsTo: - derived_from: tosca.relationships.network.BindsTo - valid_target_types: [ tosca.capabilities.nfv.VirtualBindable ] - - tosca.relationships.nfv.HA: - derived_from: tosca.relationships.Root - valid_target_types: [ tosca.capabilities.nfv.HA ] - - tosca.relationships.nfv.Monitor: - derived_from: tosca.relationships.ConnectsTo - valid_target_types: [ tosca.capabilities.nfv.Metric ] - - tosca.relationships.nfv.ForwardsTo: - derived_from: tosca.relationships.root - valid_target_types: [ tosca.capabilities.nfv.Forwarder] - -########################################################################## -# CapabilityAssignment Type. -# A CapabilityAssignment Type is a reusable entity that describes a kind of -# capability that a Node Type can declare to expose. -########################################################################## - -capability_types: - tosca.capabilities.nfv.VirtualLinkable: - derived_from: tosca.capabilities.network.Linkable - - tosca.capabilities.nfv.VirtualBindable: - derived_from: tosca.capabilities.network.Bindable - - tosca.capabilities.nfv.HA: - derived_from: tosca.capabilities.Root - valid_source_types: [ tosca.nodes.nfv.VDU ] - - tosca.capabilities.nfv.HA.ActiveActive: - derived_from: tosca.capabilities.nfv.HA - - tosca.capabilities.nfv.HA.ActivePassive: - derived_from: tosca.capabilities.nfv.HA - - tosca.capabilities.nfv.Metric: - derived_from: tosca.capabilities.Root - - tosca.capabilities.nfv.Forwarder: - derived_from: tosca.capabilities.Root - -########################################################################## - # Interfaces Type. - # The Interfaces element describes a list of one or more interface - # definitions for a modelable entity (e.g., a Node or Relationship Type) - # as defined within the TOSCA Simple Profile specification. -########################################################################## - -########################################################################## - # Data Type. - # A Datatype is a complex data type declaration which contains other - # complex or simple data types. -########################################################################## - -########################################################################## - # Artifact Type. - # An Artifact Type is a reusable entity that defines the type of one or more - # files which Node Types or Node Templates can have dependent relationships - # and used during operations such as during installation or deployment. -########################################################################## - -########################################################################## - # Policy Type. - # TOSCA Policy Types represent logical grouping of TOSCA nodes that have - # an implied relationship and need to be orchestrated or managed together - # to achieve some result. -########################################################################## - -########################################################################## - # Group Type - # -########################################################################## -group_types: - tosca.groups.nfv.VNFFG: - derived_from: tosca.groups.Root - - properties: - vendor: - type: string - required: true - description: name of the vendor who generate this VNFFG - - version: - type: string - required: true - description: version of this VNFFG - - number_of_endpoints: - type: integer - required: true - description: count of the external endpoints included in this VNFFG - - dependent_virtual_link: - type: list - entry_schema: - type: string - required: true - description: Reference to a VLD used in this Forwarding Graph - - connection_point: - type: list - entry_schema: - type: string - required: true - description: Reference to Connection Points forming the VNFFG - - constituent_vnfs: - type: list - entry_schema: - type: string - required: true - description: Reference to a list of VNFD used in this VNF Forwarding Graph diff --git a/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml b/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml deleted file mode 100644 index 8b08837..0000000 --- a/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml +++ /dev/null @@ -1,240 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -########################################################################## -# The content of this file reflects TOSCA NFV Profile in YAML version -# 1.0.0. It describes the definition for TOSCA NFV types including Node Type, -# Relationship Type, CapabilityAssignment Type and Interfaces. -########################################################################## -tosca_definitions_version: tosca_simple_profile_for_nfv_1_0_0 - -########################################################################## -# Node Type. -# A Node Type is a reusable entity that defines the type of one or more -# Node Templates. -########################################################################## -node_types: - tosca.nodes.nfv.VNF: - derived_from: tosca.nodes.Root # Or should this be its own top - level type? - properties: - id: - type: string - description: ID of this VNF - vendor: - type: string - description: name of the vendor who generate this VNF - version: - type: version - description: version of the software for this VNF - requirements: - - virtualLink: - capability: tosca.capabilities.nfv.VirtualLinkable - relationship: tosca.relationships.nfv.VirtualLinksTo - node: tosca.nodes.nfv.VL - - tosca.nodes.nfv.VDU: - derived_from: tosca.nodes.Compute - capabilities: - high_availability: - type: tosca.capabilities.nfv.HA - virtualbinding: - type: tosca.capabilities.nfv.VirtualBindable - monitoring_parameter: - type: tosca.capabilities.nfv.Metric - requirements: - - high_availability: - capability: tosca.capabilities.nfv.HA - relationship: tosca.relationships.nfv.HA - node: tosca.nodes.nfv.VDU - occurrences: [ 0, 1 ] - - tosca.nodes.nfv.CP: - derived_from: tosca.nodes.network.Port - properties: - type: - type: string - required: false - requirements: - - virtualLink: - capability: tosca.capabilities.nfv.VirtualLinkable - relationship: tosca.relationships.nfv.VirtualLinksTo - node: tosca.nodes.nfv.VL - - virtualBinding: - capability: tosca.capabilities.nfv.VirtualBindable - relationship: tosca.relationships.nfv.VirtualBindsTo - node: tosca.nodes.nfv.VDU - attributes: - address: - type: string - - tosca.nodes.nfv.VL: - derived_from: tosca.nodes.network.Network - properties: - vendor: - type: string - required: true - description: name of the vendor who generate this VL - capabilities: - virtual_linkable: - type: tosca.capabilities.nfv.VirtualLinkable - - tosca.nodes.nfv.VL.ELine: - derived_from: tosca.nodes.nfv.VL - capabilities: - virtual_linkable: - occurrences: 2 - - tosca.nodes.nfv.VL.ELAN: - derived_from: tosca.nodes.nfv.VL - - tosca.nodes.nfv.VL.ETree: - derived_from: tosca.nodes.nfv.VL - - tosca.nodes.nfv.FP: - derived_from: tosca.nodes.Root - properties: - policy: - type: string - required: false - description: name of the vendor who generate this VL - requirements: - - forwarder: - capability: tosca.capabilities.nfv.Forwarder - relationship: tosca.relationships.nfv.ForwardsTo - -########################################################################## -# Relationship Type. -# A Relationship Type is a reusable entity that defines the type of one -# or more relationships between Node Types or Node Templates. -########################################################################## - -relationship_types: - tosca.relationships.nfv.VirtualLinksTo: - derived_from: tosca.relationships.network.LinksTo - valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ] - - tosca.relationships.nfv.VirtualBindsTo: - derived_from: tosca.relationships.network.BindsTo - valid_target_types: [ tosca.capabilities.nfv.VirtualBindable ] - - tosca.relationships.nfv.HA: - derived_from: tosca.relationships.Root - valid_target_types: [ tosca.capabilities.nfv.HA ] - - tosca.relationships.nfv.Monitor: - derived_from: tosca.relationships.ConnectsTo - valid_target_types: [ tosca.capabilities.nfv.Metric ] - - tosca.relationships.nfv.ForwardsTo: - derived_from: tosca.relationships.root - valid_target_types: [ tosca.capabilities.nfv.Forwarder] - -########################################################################## -# CapabilityAssignment Type. -# A CapabilityAssignment Type is a reusable entity that describes a kind of -# capability that a Node Type can declare to expose. -########################################################################## - -capability_types: - tosca.capabilities.nfv.VirtualLinkable: - derived_from: tosca.capabilities.network.Linkable - - tosca.capabilities.nfv.VirtualBindable: - derived_from: tosca.capabilities.network.Bindable - - tosca.capabilities.nfv.HA: - derived_from: tosca.capabilities.Root - valid_source_types: [ tosca.nodes.nfv.VDU ] - - tosca.capabilities.nfv.HA.ActiveActive: - derived_from: tosca.capabilities.nfv.HA - - tosca.capabilities.nfv.HA.ActivePassive: - derived_from: tosca.capabilities.nfv.HA - - tosca.capabilities.nfv.Metric: - derived_from: tosca.capabilities.Root - - tosca.capabilities.nfv.Forwarder: - derived_from: tosca.capabilities.Root - -########################################################################## - # Interfaces Type. - # The Interfaces element describes a list of one or more interface - # definitions for a modelable entity (e.g., a Node or Relationship Type) - # as defined within the TOSCA Simple Profile specification. -########################################################################## - -########################################################################## - # Data Type. - # A Datatype is a complex data type declaration which contains other - # complex or simple data types. -########################################################################## - -########################################################################## - # Artifact Type. - # An Artifact Type is a reusable entity that defines the type of one or more - # files which Node Types or Node Templates can have dependent relationships - # and used during operations such as during installation or deployment. -########################################################################## - -########################################################################## - # Policy Type. - # TOSCA Policy Types represent logical grouping of TOSCA nodes that have - # an implied relationship and need to be orchestrated or managed together - # to achieve some result. -########################################################################## - -########################################################################## - # Group Type - # -########################################################################## -group_types: - tosca.groups.nfv.VNFFG: - derived_from: tosca.groups.Root - - properties: - vendor: - type: string - required: true - description: name of the vendor who generate this VNFFG - - version: - type: string - required: true - description: version of this VNFFG - - number_of_endpoints: - type: integer - required: true - description: count of the external endpoints included in this VNFFG - - dependent_virtual_link: - type: list - entry_schema: - type: string - required: true - description: Reference to a VLD used in this Forwarding Graph - - connection_point: - type: list - entry_schema: - type: string - required: true - description: Reference to Connection Points forming the VNFFG - - constituent_vnfs: - type: list - entry_schema: - type: string - required: true - description: Reference to a list of VNFD used in this VNF Forwarding Graph diff --git a/src/main/resources/extensions/nfv/nfv.py b/src/main/resources/extensions/nfv/nfv.py deleted file mode 100644 index 0c7c2b9..0000000 --- a/src/main/resources/extensions/nfv/nfv.py +++ /dev/null @@ -1,19 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT -# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the -# License for the specific language governing permissions and limitations -# under the License. - -# VERSION and DEFS_FILE are required for all extensions - -VERSION = 'tosca_simple_profile_for_nfv_1_0_0' - -DEFS_FILE = "TOSCA_nfv_definition_1_0.yaml" - -SECTIONS = ('metadata') diff --git a/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java b/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java deleted file mode 100644 index 140a6e9..0000000 --- a/src/test/java/org/onap/sdc/toscaparser/api/GetValidationIssues.java +++ /dev/null @@ -1,100 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import com.opencsv.CSVWriter; - -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Scanner; - -//Generate excel file, include all validation issues errors in jtosca -//the error java code, the line number and file name for each error. -public class GetValidationIssues { - - public static CSVWriter fileWriter = null; - public static List data = new ArrayList<>(); - - public static void main(String[] args) { - System.out.println("GetAllValidationIssues - path to project files Directory is " + Arrays.toString(args)); - File jtoscaFiles = new File(args[0] + "\\jtosca\\src\\main\\java\\org\\onap\\sdc\\toscaparser\\api"); - - try { - printFiles(jtoscaFiles); - fileWriter = new CSVWriter(new FileWriter(args[1] + "\\JToscaValidationIssues_" + System.currentTimeMillis() + ".csv"), '\t'); - fileWriter.writeNext(new String[]{"Error Message", "Class Name", "Line No."}, false); - fileWriter.writeAll(data, false); - } catch (IOException e) { - e.printStackTrace(); - } finally { - try { - fileWriter.flush(); - fileWriter.close(); - } catch (IOException e) { - System.out.println("Error while flushing/closing fileWriter !!!"); - e.printStackTrace(); - } - } - } - - private static void printFiles(File dir) { - if (dir != null && dir.exists()) { - for (File file : dir.listFiles()) { - if (file.isDirectory()) - printFiles(file); - else { - Scanner scanner = null; - try { - scanner = new Scanner(file); - - int lineNum = 0; - while (scanner.hasNextLine()) { - String line = scanner.nextLine(); - lineNum++; - if (line.startsWith("/*python")) - break; - - if (!line.trim().startsWith("//") && !line.trim().startsWith("#") && line.contains("ThreadLocalsHolder.getCollector().appendValidationIssue")) { - String errMsg = line.trim(); - if (!errMsg.contains(";")) { - String nextLine = null; - while (scanner.hasNextLine() && (nextLine == null || !nextLine.contains(";"))) { - nextLine = scanner.nextLine(); - errMsg += nextLine.trim(); - } - } - - data.add(new String[]{errMsg, file.getName(), String.valueOf(lineNum)}); - } - } - } catch (IOException e) { - e.printStackTrace(); - } - } - } - } - } -} - diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java deleted file mode 100644 index 5876ac7..0000000 --- a/src/test/java/org/onap/sdc/toscaparser/api/JToscaImportTest.java +++ /dev/null @@ -1,309 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (c) 2017 AT&T Intellectual Property. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - * Modifications copyright (c) 2019 Fujitsu Limited. - * ================================================================================ - */ -package org.onap.sdc.toscaparser.api; - -import org.junit.Test; -import org.onap.sdc.toscaparser.api.common.JToscaException; -import org.onap.sdc.toscaparser.api.elements.DataType; -import org.onap.sdc.toscaparser.api.elements.PropertyDef; -import org.onap.sdc.toscaparser.api.elements.constraints.Schema; -import org.onap.sdc.toscaparser.api.parameters.Annotation; -import org.onap.sdc.toscaparser.api.parameters.Input; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.io.File; -import java.util.*; -import java.util.stream.Collectors; - -import static org.hamcrest.CoreMatchers.containsString; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.core.IsNull.notNullValue; -import static org.junit.Assert.*; - -public class JToscaImportTest { - - @Test - public void testNoMissingTypeValidationError() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/sdc-onboarding_csar.csar") - .getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List missingTypeErrors = ThreadLocalsHolder.getCollector().getValidationIssueReport().stream() - .filter(s -> s.contains("JE136")).collect(Collectors.toList()); - assertEquals(0, missingTypeErrors.size()); - } - - @Test - public void testNoStackOverFlowError() { - Exception jte = null; - try { - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/sdc-onboarding_csar.csar") - .getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - } catch (Exception e) { - jte = e; - } - assertEquals(null, jte); - } - - @Test - public void testNoInvalidImports() throws JToscaException { - List fileNames = new ArrayList<>(); - fileNames.add("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); - fileNames.add("csars/sdc-onboarding_csar.csar"); - fileNames.add("csars/resource-Spgw-csar-ZTE.csar"); - - for (String fileName : fileNames) { - String fileStr = JToscaImportTest.class.getClassLoader().getResource(fileName).getFile(); - File file = new File(fileStr); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List invalidImportErrors = ThreadLocalsHolder.getCollector().getValidationIssueReport().stream() - .filter(s -> s.contains("JE195")).collect(Collectors.toList()); - assertEquals(0, invalidImportErrors.size()); - } - } - - @Test - public void testParseAnnotations() throws JToscaException { - - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - - List inputs = toscaTemplate.getInputs(); - assertNotNull(inputs); - assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); - - inputs.forEach(Input::parseAnnotations); - assertTrue(!inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); - } - - @Test - public void testGetInputsWithAndWithoutAnnotations() throws JToscaException { - - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List inputs = toscaTemplate.getInputs(); - assertNotNull(inputs); - assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); - - inputs = toscaTemplate.getInputs(true); - assertNotNull(inputs); - validateInputsAnnotations(inputs); - - inputs = toscaTemplate.getInputs(false); - assertNotNull(inputs); - assertTrue(inputs.stream().filter(i -> i.getAnnotations() != null).collect(Collectors.toList()).isEmpty()); - } - - @Test - public void testGetPropertyNameTest() throws JToscaException { - - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(0); - - ArrayList valueList = (ArrayList) nodeTemplate.getPropertyValueFromTemplatesByName("vmxvpfe_sriov41_0_port_vlanfilter"); - assertEquals(4, valueList.size()); - - assertEquals("vPE", (String) nodeTemplate.getPropertyValueFromTemplatesByName("nf_role")); - - assertNull(nodeTemplate.getPropertyValueFromTemplatesByName("test")); - } - - @Test - public void testGetParentNodeTemplateTest() throws JToscaException { - - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-AdiodVmxVpeBvService-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(0); - //parent of this VF is service (null) - assertNull(nodeTemplate.getParentNodeTemplate()); - List children = nodeTemplate.getSubMappingToscaTemplate().getNodeTemplates(); - assertFalse(children.isEmpty()); - NodeTemplate cVFC = children.get(4); - //parent is the VF above - assertEquals("2017-488_ADIOD-vPE 0", cVFC.getParentNodeTemplate().getName()); - List children1 = cVFC.getSubMappingToscaTemplate().getNodeTemplates(); - assertFalse(children1.isEmpty()); - //parent is the CVFC above - assertEquals(cVFC, children1.get(0).getParentNodeTemplate()); - -/* - - TopologyTemplate tt = nodeTemplate.getOriginComponentTemplate(); - List groups = tt.getGroups(); - List policies = tt.getPolicies(); - - TopologyTemplate tt1 = cVFC.getOriginComponentTemplate(); - groups = tt.getGroups(); - policies = tt.getPolicies(); -*/ - - } - - @Test - public void testNullValueHasNoNullPointerException() throws JToscaException { - - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-JennyVtsbcKarunaSvc-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - List inputs = toscaTemplate.getInputs(); - assertNotNull(inputs); - } - - @Test - public void testGetPolicyMetadata() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - ArrayList policies = toscaTemplate.getPolicies(); - assertNotNull(policies); - assertEquals(1, policies.size()); - assertEquals("org.openecomp.policies.External", policies.get(0).getType()); - assertEquals("adf03496-bf87-43cf-b20a-450e47cb44bd", policies.get(0).getMetaData().getOrDefault("UUID", "").toString()); - assertTrue(policies.get(0).getMetaData().getOrDefault("UUID_test", "").toString().isEmpty()); - } - - @Test - public void testGetPolicyMetadataObj() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar").getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - ArrayList policies = toscaTemplate.getPolicies(); - assertNotNull(policies); - assertEquals(1, policies.size()); - assertEquals("adf03496-bf87-43cf-b20a-450e47cb44bd", policies.get(0).getMetaDataObj().getAllProperties().getOrDefault("UUID", "").toString()); - assertTrue(policies.get(0).getMetaDataObj().getAllProperties().getOrDefault("name_test", "").toString().isEmpty()); - } - - private void validateInputsAnnotations(List inputs) { - List inputsWithAnnotations = inputs.stream().filter(i -> i.getAnnotations() != null) - .collect(Collectors.toList()); - assertTrue(!inputs.isEmpty()); - inputsWithAnnotations.stream().forEach(i -> validateAnnotations(i)); - } - - private void validateAnnotations(Input input) { - assertNotNull(input.getAnnotations()); - assertEquals(input.getAnnotations().size(), 1); - Annotation annotation = input.getAnnotations().get("source"); - assertEquals(annotation.getName(), "source"); - assertEquals(annotation.getType().toLowerCase(), "org.openecomp.annotations.source"); - assertNotNull(annotation.getProperties()); - Optional source_type = annotation.getProperties().stream() - .filter(p -> p.getName().equals("source_type")).findFirst(); - assertTrue(source_type.isPresent()); - assertEquals(source_type.get().getValue(), "HEAT"); - } - - private static final String TEST_DATATYPE_FILENAME = "csars/dataTypes-test-service.csar"; - private static final String TEST_DATATYPE_TEST1 = "TestType1"; - private static final String TEST_DATATYPE_TEST2 = "TestType2"; - private static final String TEST_DATATYPE_PROPERTY_STR = "strdata"; - private static final String TEST_DATATYPE_PROPERTY_INT = "intdata"; - private static final String TEST_DATATYPE_PROPERTY_LIST = "listdata"; - private static final String TEST_DATATYPE_PROPERTY_TYPE = "type"; - private static final String TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA = "entry_schema"; - private static final String TEST_DATATYPE_TOSTRING = "data_types="; - - @Test - public void testGetDataType() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_DATATYPE_FILENAME).getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - HashSet dataTypes = toscaTemplate.getDataTypes(); - assertThat(dataTypes, notNullValue()); - assertThat(dataTypes.size(), is(2)); - - for (DataType dataType : dataTypes) { - LinkedHashMap properties; - PropertyDef property; - if (dataType.getType().equals(TEST_DATATYPE_TEST1)) { - properties = dataType.getAllProperties(); - property = properties.get(TEST_DATATYPE_PROPERTY_STR); - assertThat(property, notNullValue()); - assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_STR)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.STRING)); - } - if (dataType.getType().equals(TEST_DATATYPE_TEST2)) { - properties = dataType.getAllProperties(); - property = properties.get(TEST_DATATYPE_PROPERTY_INT); - assertThat(property, notNullValue()); - assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_INT)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.INTEGER)); - - property = properties.get(TEST_DATATYPE_PROPERTY_LIST); - assertThat(property, notNullValue()); - assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_LIST)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.LIST)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA), is(TEST_DATATYPE_TEST1)); - - assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST1), notNullValue()); - assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST2), notNullValue()); - assertThat(toscaTemplate.toString(), containsString(TEST_DATATYPE_TOSTRING)); - } - } - - } - - @Test - public void testGetInputValidate() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_DATATYPE_FILENAME).getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - HashSet dataTypes = toscaTemplate.getDataTypes(); - assertThat(dataTypes, notNullValue()); - assertThat(dataTypes.size(), is(2)); - - for (DataType dataType : dataTypes) { - LinkedHashMap properties; - PropertyDef property; - if (dataType.getType().equals(TEST_DATATYPE_TEST1)) { - properties = dataType.getAllProperties(); - property = properties.get(TEST_DATATYPE_PROPERTY_STR); - assertThat(property, notNullValue()); - assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_STR)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.STRING)); - } - if (dataType.getType().equals(TEST_DATATYPE_TEST2)) { - properties = dataType.getAllProperties(); - property = properties.get(TEST_DATATYPE_PROPERTY_INT); - assertThat(property, notNullValue()); - assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_INT)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.INTEGER)); - - property = properties.get(TEST_DATATYPE_PROPERTY_LIST); - assertThat(property, notNullValue()); - assertThat(property.getName(), is(TEST_DATATYPE_PROPERTY_LIST)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_TYPE), is(Schema.LIST)); - assertThat(property.getSchema().get(TEST_DATATYPE_PROPERTY_ENTRY_SCHEMA), is(TEST_DATATYPE_TEST1)); - - assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST1), notNullValue()); - assertThat((LinkedHashMap) toscaTemplate.getTopologyTemplate().getCustomDefs().get(TEST_DATATYPE_TEST2), notNullValue()); - assertThat(toscaTemplate.toString(), containsString(TEST_DATATYPE_TOSTRING)); - } - } - } -} diff --git a/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java b/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java deleted file mode 100644 index 2ec41b2..0000000 --- a/src/test/java/org/onap/sdc/toscaparser/api/JToscaMetadataParse.java +++ /dev/null @@ -1,127 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api; - -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.hasSize; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.io.File; -import java.net.URL; -import java.util.ArrayList; -import java.util.Collection; -import java.util.LinkedHashMap; - -import java.util.Map; -import org.junit.Test; -import org.onap.sdc.toscaparser.api.common.JToscaException; -import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue; -import org.onap.sdc.toscaparser.api.utils.JToscaErrorCodes; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -public class JToscaMetadataParse { - - @Test - public void testMetadataParsedCorrectly() throws JToscaException { - final File file = loadCsar("csars/csar_hello_world.csar"); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - LinkedHashMap metadataProperties = toscaTemplate.getMetaProperties("TOSCA.meta"); - assertNotNull(metadataProperties); - Object entryDefinition = metadataProperties.get("Entry-Definitions"); - assertNotNull(entryDefinition); - assertEquals("tosca_helloworld.yaml", entryDefinition); - } - - @Test - public void noWarningsAfterParse() throws JToscaException { - final File file = loadCsar("csars/tmpCSAR_Huawei_vSPGW_fixed.csar"); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); - assertTrue(validationIssuesCaught == 0); - } - - @Test - public void requiredInputErrorsAfterParse() throws JToscaException { - final File file = loadCsar("csars/tmpCSAR_Huawei_vSPGW_without_required_inputs.csar"); - new ToscaTemplate(file.getAbsolutePath(), null, true, null); - - final Map validationIssues = ThreadLocalsHolder.getCollector() - .getValidationIssues(); - final Collection actualValidationIssueList = validationIssues.values(); - - final Collection expectedValidationIssueList = new ArrayList<>(); - final String errorCode = "JE003"; - final String errorFormat = "MissingRequiredFieldError: The required input \"%s\" was not provided"; - expectedValidationIssueList.add(new JToscaValidationIssue(errorCode - , String.format(errorFormat, "nf_naming_code"))); - expectedValidationIssueList.add(new JToscaValidationIssue(errorCode - , String.format(errorFormat, "nf_type"))); - expectedValidationIssueList.add(new JToscaValidationIssue(errorCode - , String.format(errorFormat, "nf_role"))); - expectedValidationIssueList.add(new JToscaValidationIssue(errorCode - , String.format(errorFormat, "min_instances"))); - expectedValidationIssueList.add(new JToscaValidationIssue(errorCode - , String.format(errorFormat, "max_instances"))); - expectedValidationIssueList.add(new JToscaValidationIssue(errorCode - , String.format(errorFormat, "nf_function"))); - - assertThat("The actual and the expected validation issue lists should have the same size" - , actualValidationIssueList, hasSize(expectedValidationIssueList.size()) - ); - - assertThat("The actual and the expected validation issue lists should be the same" - , actualValidationIssueList, containsInAnyOrder(expectedValidationIssueList.toArray(new JToscaValidationIssue[0])) - ); - } - - @Test - public void testEmptyCsar() throws JToscaException { - final File file = loadCsar("csars/emptyCsar.csar"); - try { - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - } catch (JToscaException e) { - assertTrue(e.getCode().equals(JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue())); - } - int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught(); - assertTrue(validationIssuesCaught == 0); - } - - @Test - public void testEmptyPath() throws JToscaException { - String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("").getFile(); - File file = new File(fileStr); - try { - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - } catch (JToscaException e) { - assertTrue(e.getCode().equals(JToscaErrorCodes.PATH_NOT_VALID.getValue())); - } - } - - private File loadCsar(final String csarFilePath) { - final URL resourceUrl = JToscaMetadataParse.class.getClassLoader().getResource(csarFilePath); - assertNotNull(String.format("Could not load CSAR file '%s'", csarFilePath), resourceUrl); - - return new File(resourceUrl.getFile()); - } -} diff --git a/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java b/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java deleted file mode 100644 index fd84d6e..0000000 --- a/src/test/java/org/onap/sdc/toscaparser/api/elements/CalculatePropertyByPathTest.java +++ /dev/null @@ -1,167 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import org.junit.BeforeClass; -import org.junit.Test; -import org.onap.sdc.toscaparser.api.JToscaImportTest; -import org.onap.sdc.toscaparser.api.NodeTemplate; -import org.onap.sdc.toscaparser.api.Property; -import org.onap.sdc.toscaparser.api.ToscaTemplate; -import org.onap.sdc.toscaparser.api.common.JToscaException; - -import java.io.File; -import java.net.URL; -import java.util.List; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -public class CalculatePropertyByPathTest { - private static ToscaTemplate toscaTemplate; - - @BeforeClass - public static void setUpClass() throws JToscaException { - URL scarUrl = JToscaImportTest.class.getClassLoader().getResource("csars/service-NetworkCloudVnfServiceMock-csar.csar"); - if (scarUrl != null) { - File file = new File(scarUrl.getFile()); - toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null); - } - - } - - @Test - public void testGetPropertyWhenPropertyHasListOfDataTypesAndPathIsNotEmpty() throws JToscaException { - NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 - .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM - .getSubMappingToscaTemplate().getNodeTemplates().get(0); //testVM_testVM_SRIOVtrunk1_port - - Property property = cp.getProperties().get("related_networks"); - List propertyValueList = property.getLeafPropertyValue("related_network_role"); - assertEquals(3, propertyValueList.size()); - assertTrue(propertyValueList.contains("cor_direct_2")); - assertTrue(propertyValueList.contains("sgi_direct_2")); - assertTrue(propertyValueList.contains("int_imbl_2")); - } - - @Test - public void testGetPropertyWhenPropertyHasDataTypeAndPathIsEmpty() { - NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 - .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM - .getSubMappingToscaTemplate().getNodeTemplates().get(1); //testVM_testVM_SRIOVNonTrunk0_port - - Property property = cp.getProperties().get("exCP_naming"); - List propertyValueList = property.getLeafPropertyValue(""); - assertTrue(propertyValueList.isEmpty()); - } - - @Test - public void testGetPropertyWhenPropertyHasSimpleTypeAndValueAsGetInputIsNotResolvedCorrectlyAndPathIsEmpty() { - NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 - .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM - .getSubMappingToscaTemplate().getNodeTemplates().get(1); //testVM_testVM_SRIOVNonTrunk0_port - - Property property = cp.getProperties().get("network"); - List propertyValueList = property.getLeafPropertyValue(""); - assertTrue(propertyValueList.isEmpty()); - } - - @Test - public void testGetPropertyWhenPropertyHasSimpleTypeAndPathIsEmpty() { - NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 - .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM - .getSubMappingToscaTemplate().getNodeTemplates().get(1); //testVM_testVM_SRIOVNonTrunk0_port - - Property property = cp.getProperties().get("subinterface_indicator"); - List propertyValueList = property.getLeafPropertyValue(""); - assertEquals(1, propertyValueList.size()); - assertEquals("false", propertyValueList.get(0)); - } - - - @Test - public void testGetPropertyWhenPropertyHasDataTypeAndPathIsNotEmpty() { - NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 - .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM - .getSubMappingToscaTemplate().getNodeTemplates().get(2); //testVM_testVM_OVS_port - - Property property = cp.getProperties().get("ip_requirements"); - List propertyValueList = property.getLeafPropertyValue("ip_version"); - assertEquals(1, propertyValueList.size()); - assertEquals("4", propertyValueList.get(0)); - } - - @Test - public void testGetPropertyWhenPropertyHasListOfDataTypesAndPathIsNull() { - NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 - .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM - .getSubMappingToscaTemplate().getNodeTemplates().get(2); //testVM_testVM_OVS_port - - Property property = cp.getProperties().get("ip_requirements"); - assertTrue(property.getLeafPropertyValue(null).isEmpty()); - } - - @Test - public void testGetPropertyWhenPropertyHasListOfDataTypesAndPathIsComplex() { - NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 - .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM - .getSubMappingToscaTemplate().getNodeTemplates().get(0); //testVM_testVM_SRIOVtrunk1_port - - Property property = cp.getProperties().get("ip_requirements"); - List propertyValueList = property.getLeafPropertyValue("ip_count_required#is_required"); - assertEquals(1, propertyValueList.size()); - assertEquals("false", propertyValueList.get(0)); - } - - @Test - public void testGetPropertyWhenPropertyHasListOfDataTypesAndPathIsWrong() { - NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 - .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM - .getSubMappingToscaTemplate().getNodeTemplates().get(0); //testVM_testVM_SRIOVtrunk1_port - - Property property = cp.getProperties().get("ip_requirements"); - List propertyValueList = property.getLeafPropertyValue("ip_count_required#is_required_1"); - assertEquals(0, propertyValueList.size()); - } - - @Test - public void testGetPropertyWhenPropertyHasDataTypeWithoutSchemaAndComplexPath() { - NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 - .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM - .getSubMappingToscaTemplate().getNodeTemplates().get(0); //testVM_testVM_SRIOVtrunk1_port - - Property property = cp.getProperties().get("mac_requirements"); - List propertyValueList = property.getLeafPropertyValue("mac_count_required#is_required"); - assertEquals(1, propertyValueList.size()); - assertEquals("false", propertyValueList.get(0)); - } - - @Test - public void testGetPropertyWhenPropertyHasDataTypeWithoutSchemaAndSimplePath() { - NodeTemplate cp = toscaTemplate.getNodeTemplates().get(0) //Network Cloud VNF MOCK 0 - .getSubMappingToscaTemplate().getNodeTemplates().get(0) //abstract_testVM - .getSubMappingToscaTemplate().getNodeTemplates().get(0); //testVM_testVM_SRIOVtrunk1_port - - Property property = cp.getProperties().get("mac_requirements"); - List propertyValueList = property.getLeafPropertyValue("mac_count_required"); - assertEquals(0, propertyValueList.size()); - } -} diff --git a/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java b/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java deleted file mode 100644 index d65de28..0000000 --- a/src/test/java/org/onap/sdc/toscaparser/api/elements/EntityTypeTest.java +++ /dev/null @@ -1,75 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * SDC - * ================================================================================ - * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ - -package org.onap.sdc.toscaparser.api.elements; - -import org.junit.After; -import org.junit.Test; - -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.Map; - -import static org.junit.Assert.assertEquals; - -public class EntityTypeTest { - - private static final Map origMap = EntityType.TOSCA_DEF; - - @Test - public void testUpdateDefinitions() throws Exception { - - Map testData = new HashMap<>(); - testData.put("tosca.nodes.nfv.VNF", "{derived_from=tosca.nodes.Root, properties={id={type=string, description=ID of this VNF}, vendor={type=string, description=name of the vendor who generate this VNF}, version={type=version, description=version of the software for this VNF}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}]}"); - testData.put("tosca.nodes.nfv.VDU", "{derived_from=tosca.nodes.Compute, capabilities={high_availability={type=tosca.capabilities.nfv.HA}, virtualbinding={type=tosca.capabilities.nfv.VirtualBindable}, monitoring_parameter={type=tosca.capabilities.nfv.Metric}}, requirements=[{high_availability={capability=tosca.capabilities.nfv.HA, relationship=tosca.relationships.nfv.HA, node=tosca.nodes.nfv.VDU, occurrences=[0, 1]}}]}"); - testData.put("tosca.nodes.nfv.CP", "{derived_from=tosca.nodes.network.Port, properties={type={type=string, required=false}}, requirements=[{virtualLink={capability=tosca.capabilities.nfv.VirtualLinkable, relationship=tosca.relationships.nfv.VirtualLinksTo, node=tosca.nodes.nfv.VL}}, {virtualBinding={capability=tosca.capabilities.nfv.VirtualBindable, relationship=tosca.relationships.nfv.VirtualBindsTo, node=tosca.nodes.nfv.VDU}}], attributes={address={type=string}}}"); - testData.put("tosca.nodes.nfv.VL", "{derived_from=tosca.nodes.network.Network, properties={vendor={type=string, required=true, description=name of the vendor who generate this VL}}, capabilities={virtual_linkable={type=tosca.capabilities.nfv.VirtualLinkable}}}"); - testData.put("tosca.nodes.nfv.VL.ELine", "{derived_from=tosca.nodes.nfv.VL, capabilities={virtual_linkable={occurrences=2}}}"); - testData.put("tosca.nodes.nfv.VL.ELAN", "{derived_from=tosca.nodes.nfv.VL}"); - testData.put("tosca.nodes.nfv.VL.ETree", "{derived_from=tosca.nodes.nfv.VL}"); - testData.put("tosca.nodes.nfv.FP", "{derived_from=tosca.nodes.Root, properties={policy={type=string, required=false, description=name of the vendor who generate this VL}}, requirements=[{forwarder={capability=tosca.capabilities.nfv.Forwarder, relationship=tosca.relationships.nfv.ForwardsTo}}]}"); - testData.put("tosca.groups.nfv.VNFFG", "{derived_from=tosca.groups.Root, properties={vendor={type=string, required=true, description=name of the vendor who generate this VNFFG}, version={type=string, required=true, description=version of this VNFFG}, number_of_endpoints={type=integer, required=true, description=count of the external endpoints included in this VNFFG}, dependent_virtual_link={type=list, entry_schema={type=string}, required=true, description=Reference to a VLD used in this Forwarding Graph}, connection_point={type=list, entry_schema={type=string}, required=true, description=Reference to Connection Points forming the VNFFG}, constituent_vnfs={type=list, entry_schema={type=string}, required=true, description=Reference to a list of VNFD used in this VNF Forwarding Graph}}}"); - testData.put("tosca.relationships.nfv.VirtualLinksTo", "{derived_from=tosca.relationships.network.LinksTo, valid_target_types=[tosca.capabilities.nfv.VirtualLinkable]}"); - testData.put("tosca.relationships.nfv.VirtualBindsTo", "{derived_from=tosca.relationships.network.BindsTo, valid_target_types=[tosca.capabilities.nfv.VirtualBindable]}"); - testData.put("tosca.relationships.nfv.HA", "{derived_from=tosca.relationships.Root, valid_target_types=[tosca.capabilities.nfv.HA]}"); - testData.put("tosca.relationships.nfv.Monitor", "{derived_from=tosca.relationships.ConnectsTo, valid_target_types=[tosca.capabilities.nfv.Metric]}"); - testData.put("tosca.relationships.nfv.ForwardsTo", "{derived_from=tosca.relationships.root, valid_target_types=[tosca.capabilities.nfv.Forwarder]}"); - testData.put("tosca.capabilities.nfv.VirtualLinkable", "{derived_from=tosca.capabilities.network.Linkable}"); - testData.put("tosca.capabilities.nfv.VirtualBindable", "{derived_from=tosca.capabilities.network.Bindable}"); - testData.put("tosca.capabilities.nfv.HA", "{derived_from=tosca.capabilities.Root, valid_source_types=[tosca.nodes.nfv.VDU]}"); - testData.put("tosca.capabilities.nfv.HA.ActiveActive", "{derived_from=tosca.capabilities.nfv.HA}"); - testData.put("tosca.capabilities.nfv.HA.ActivePassive", "{derived_from=tosca.capabilities.nfv.HA}"); - testData.put("tosca.capabilities.nfv.Metric", "{derived_from=tosca.capabilities.Root}"); - testData.put("tosca.capabilities.nfv.Forwarder", "{derived_from=tosca.capabilities.Root}"); - - Map expectedDefMap = origMap; - expectedDefMap.putAll(testData); - EntityType.updateDefinitions("tosca_simple_profile_for_nfv_1_0_0"); - - assertEquals(expectedDefMap, EntityType.TOSCA_DEF); - - } - - @After - public void tearDown() throws Exception { - EntityType.TOSCA_DEF = (LinkedHashMap) origMap; - } - -} diff --git a/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java b/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java deleted file mode 100644 index 98e5102..0000000 --- a/src/test/java/org/onap/sdc/toscaparser/api/functions/GetInputTest.java +++ /dev/null @@ -1,96 +0,0 @@ -/*- - * ============LICENSE_START======================================================= - * Copyright (c) 2019 Fujitsu Limited. - * ================================================================================ - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============LICENSE_END========================================================= - */ -package org.onap.sdc.toscaparser.api.functions; - -import org.junit.Test; -import org.onap.sdc.toscaparser.api.*; -import org.onap.sdc.toscaparser.api.common.JToscaException; -import org.onap.sdc.toscaparser.api.elements.constraints.Schema; -import org.onap.sdc.toscaparser.api.parameters.Input; -import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder; - -import java.io.File; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; - -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.notNullValue; -import static org.junit.Assert.*; - -public class GetInputTest { - - private static final String TEST_FILENAME = "csars/listed_input.csar"; - private static final String TEST_FILENAME_NG = "csars/listed_input_ng.csar"; - private static final String TEST_PROPERTY_ROLE = "role"; - private static final String TEST_PROPERTY_LONGITUDE = "longitude"; - private static final String TEST_DEFAULT_VALUE = "dsvpn-hub"; - private static final String TEST_DESCRIPTION_VALUE = "This is used for SDWAN only"; - private static final String TEST_INPUT_TYPE = "type"; - private static final String TEST_INPUT_SCHEMA_TYPE = "tosca.datatypes.siteresource.site"; - private static final String TEST_TOSTRING = "get_input:[sites, 1, longitude]"; - private static final String TEST_INPUT_SITES = "sites"; - - @Test - public void validate() throws JToscaException { - String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_FILENAME).getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null, false); - NodeTemplate nodeTemplate = toscaTemplate.getNodeTemplates().get(1).getSubMappingToscaTemplate().getNodeTemplates().get(0); - ArrayList inputs = toscaTemplate.getNodeTemplates().get(1).getSubMappingToscaTemplate().getInputs(); - LinkedHashMap properties = nodeTemplate.getProperties(); - assertThat(properties, notNullValue()); - assertThat(properties.size(), is(14)); - - Property property = properties.get(TEST_PROPERTY_ROLE); - assertThat(properties, notNullValue()); - assertThat(property.getName(), is(TEST_PROPERTY_ROLE)); - assertThat(property.getType(), is(Schema.STRING)); - assertThat(property.getDefault(), is(TEST_DEFAULT_VALUE)); - assertThat(property.getDescription(), is(TEST_DESCRIPTION_VALUE)); - GetInput getInput = (GetInput) property.getValue(); - assertThat(getInput.getEntrySchema().get(TEST_INPUT_TYPE).toString(), is(TEST_INPUT_SCHEMA_TYPE)); - - property = properties.get(TEST_PROPERTY_LONGITUDE); - assertThat(properties, notNullValue()); - assertThat(property.getName(), is(TEST_PROPERTY_LONGITUDE)); - assertThat(property.getValue().toString(), is(TEST_TOSTRING)); - getInput = (GetInput) property.getValue(); - ArrayList getInputArguments = getInput.getArguments(); - assertThat(getInputArguments.size(), is(3)); - assertThat(getInputArguments.get(0).toString(), is(TEST_INPUT_SITES)); - assertThat(getInputArguments.get(1).toString(), is("1")); - assertThat(getInputArguments.get(2).toString(), is(TEST_PROPERTY_LONGITUDE)); - - Input in = inputs.get(10); - assertThat(in.getEntrySchema().get(TEST_INPUT_TYPE), is(TEST_INPUT_SCHEMA_TYPE)); - assertThat(in.getName(), is(TEST_INPUT_SITES)); - assertThat(in.getType(), is(Input.LIST)); - } - - @Test - public void validate_ng() throws JToscaException { - //invalid file - String fileStr = JToscaImportTest.class.getClassLoader().getResource(TEST_FILENAME_NG).getFile(); - File file = new File(fileStr); - ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null, false); - - List issues = ThreadLocalsHolder.getCollector().getValidationIssueReport(); - assertTrue(issues.stream().anyMatch(x -> x.contains("JE282"))); - } -} diff --git a/src/test/resources/csars/csar_hello_world.csar b/src/test/resources/csars/csar_hello_world.csar deleted file mode 100644 index 43ffbbc..0000000 Binary files a/src/test/resources/csars/csar_hello_world.csar and /dev/null differ diff --git a/src/test/resources/csars/dataTypes-test-service.csar b/src/test/resources/csars/dataTypes-test-service.csar deleted file mode 100644 index b4de177..0000000 Binary files a/src/test/resources/csars/dataTypes-test-service.csar and /dev/null differ diff --git a/src/test/resources/csars/emptyCsar.csar b/src/test/resources/csars/emptyCsar.csar deleted file mode 100644 index 15cb0ec..0000000 Binary files a/src/test/resources/csars/emptyCsar.csar and /dev/null differ diff --git a/src/test/resources/csars/listed_input.csar b/src/test/resources/csars/listed_input.csar deleted file mode 100644 index 445b91a..0000000 Binary files a/src/test/resources/csars/listed_input.csar and /dev/null differ diff --git a/src/test/resources/csars/listed_input_ng.csar b/src/test/resources/csars/listed_input_ng.csar deleted file mode 100644 index 6b3402e..0000000 Binary files a/src/test/resources/csars/listed_input_ng.csar and /dev/null differ diff --git a/src/test/resources/csars/resource-Spgw-csar-ZTE.csar b/src/test/resources/csars/resource-Spgw-csar-ZTE.csar deleted file mode 100644 index 58c3ddd..0000000 Binary files a/src/test/resources/csars/resource-Spgw-csar-ZTE.csar and /dev/null differ diff --git a/src/test/resources/csars/sdc-onboarding_csar.csar b/src/test/resources/csars/sdc-onboarding_csar.csar deleted file mode 100644 index f12605d..0000000 Binary files a/src/test/resources/csars/sdc-onboarding_csar.csar and /dev/null differ diff --git a/src/test/resources/csars/service-AdiodVmxVpeBvService-csar.csar b/src/test/resources/csars/service-AdiodVmxVpeBvService-csar.csar deleted file mode 100644 index 28aa6f4..0000000 Binary files a/src/test/resources/csars/service-AdiodVmxVpeBvService-csar.csar and /dev/null differ diff --git a/src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar b/src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar deleted file mode 100644 index ee01780..0000000 Binary files a/src/test/resources/csars/service-JennyVtsbcKarunaSvc-csar.csar and /dev/null differ diff --git a/src/test/resources/csars/service-NetworkCloudVnfServiceMock-csar.csar b/src/test/resources/csars/service-NetworkCloudVnfServiceMock-csar.csar deleted file mode 100644 index aabf83c..0000000 Binary files a/src/test/resources/csars/service-NetworkCloudVnfServiceMock-csar.csar and /dev/null differ diff --git a/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar b/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar deleted file mode 100644 index 9dc29c7..0000000 Binary files a/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_fixed.csar and /dev/null differ diff --git a/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_without_required_inputs.csar b/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_without_required_inputs.csar deleted file mode 100644 index 194fabb..0000000 Binary files a/src/test/resources/csars/tmpCSAR_Huawei_vSPGW_without_required_inputs.csar and /dev/null differ diff --git a/version.properties b/version.properties deleted file mode 100644 index 0f0fb2b..0000000 --- a/version.properties +++ /dev/null @@ -1,13 +0,0 @@ -########################################################### -# Versioning variables -# Note that these variables cannot be structured (e.g. : version.release or version.snapshot etc... ) -# because they are used in Jenkins, whose plug-in doesn't support - -major=1 -minor=6 -patch=0 - -base_version=${major}.${minor}.${patch} - -release_version=${base_version} -snapshot_version=${base_version}-SNAPSHOT -- cgit 1.2.3-korg