aboutsummaryrefslogtreecommitdiffstats
path: root/jtosca/src/main
diff options
context:
space:
mode:
Diffstat (limited to 'jtosca/src/main')
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java174
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java72
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java457
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java885
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/Group.java171
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java748
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java824
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/Policy.java232
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/Property.java401
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java227
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/Repository.java137
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java111
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java59
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java539
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java866
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java129
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java1267
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java201
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java101
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java47
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java75
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java58
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java57
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java121
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java60
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java240
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java136
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java436
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java263
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java283
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java62
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java549
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java309
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java177
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java249
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java121
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java287
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java39
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java43
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java37
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java234
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java173
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java309
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java77
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java130
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java120
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java186
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java100
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java124
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java121
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java110
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java109
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java116
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java309
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java99
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java32
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java40
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java204
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java97
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java259
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java544
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java203
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java243
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java639
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java130
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java98
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java199
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java129
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java790
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java50
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java68
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java52
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java209
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java45
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java145
-rw-r--r--jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java439
-rw-r--r--jtosca/src/main/resources/TOSCA_definition_1_0.yaml971
-rw-r--r--jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py19
-rw-r--r--jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml240
-rw-r--r--jtosca/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml240
-rw-r--r--jtosca/src/main/resources/extensions/nfv/nfv.py19
81 files changed, 19671 insertions, 0 deletions
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java
new file mode 100644
index 0000000..bb7b47d
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignment.java
@@ -0,0 +1,174 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef;
+import org.onap.sdc.toscaparser.api.elements.PropertyDef;
+
+public class CapabilityAssignment {
+
+ private String name;
+ private LinkedHashMap<String, Object> _properties;
+ private CapabilityTypeDef _definition;
+ private LinkedHashMap<String, Object> _customDef;
+
+ public CapabilityAssignment(String cname,
+ LinkedHashMap<String, Object> cproperties,
+ CapabilityTypeDef cdefinition, LinkedHashMap<String, Object> customDef) {
+ name = cname;
+ _properties = cproperties;
+ _definition = cdefinition;
+ _customDef = customDef;
+ }
+
+ /**
+ * Get the properties list for capability
+ *
+ * @return list of property objects for capability
+ */
+ public ArrayList<Property> getPropertiesObjects() {
+ // Return a list of property objects
+ ArrayList<Property> properties = new ArrayList<Property>();
+ LinkedHashMap<String, Object> props = _properties;
+ if (props != null) {
+ for (Map.Entry<String, Object> me : props.entrySet()) {
+ String pname = me.getKey();
+ Object pvalue = me.getValue();
+
+ LinkedHashMap<String, PropertyDef> propsDef = _definition.getPropertiesDef();
+ if (propsDef != null) {
+ PropertyDef pd = (PropertyDef) propsDef.get(pname);
+ if (pd != null) {
+ properties.add(new Property(pname, pvalue, pd.getSchema(), _customDef));
+ }
+ }
+ }
+ }
+ return properties;
+ }
+
+ /**
+ * Get the map of properties
+ *
+ * @return map of all properties contains dictionary of property name and property object
+ */
+ public LinkedHashMap<String, Property> getProperties() {
+ // Return a dictionary of property name-object pairs
+ LinkedHashMap<String, Property> npps = new LinkedHashMap<>();
+ for (Property p : getPropertiesObjects()) {
+ npps.put(p.getName(), p);
+ }
+ return npps;
+ }
+
+ /**
+ * Get the property value by name
+ *
+ * @param pname - the property name for capability
+ * @return the property value for this name
+ */
+ public Object getPropertyValue(String pname) {
+ // Return the value of a given property name
+ LinkedHashMap<String, Property> props = getProperties();
+ if (props != null && props.get(pname) != null) {
+ return props.get(name).getValue();
+ }
+ return null;
+ }
+
+ /**
+ * Get the name for capability
+ *
+ * @return the name for capability
+ */
+ public String getName() {
+ return name;
+ }
+
+ /**
+ * Get the definition for capability
+ *
+ * @return CapabilityTypeDef - contain definition for capability
+ */
+ public CapabilityTypeDef getDefinition() {
+ return _definition;
+ }
+
+ /**
+ * Set the property for capability
+ *
+ * @param pname - the property name for capability to set
+ * @param pvalue - the property valiue for capability to set
+ */
+ public void setProperty(String pname, Object pvalue) {
+ _properties.put(pname, pvalue);
+ }
+
+ @Override
+ public String toString() {
+ return "CapabilityAssignment{" +
+ "name='" + name + '\'' +
+ ", _properties=" + _properties +
+ ", _definition=" + _definition +
+ '}';
+ }
+}
+
+/*python
+
+from toscaparser.properties import Property
+
+
+class CapabilityAssignment(object):
+ '''TOSCA built-in capabilities type.'''
+
+ def __init__(self, name, properties, definition):
+ self.name = name
+ self._properties = properties
+ self.definition = definition
+
+ def get_properties_objects(self):
+ '''Return a list of property objects.'''
+ properties = []
+ props = self._properties
+ if props:
+ for name, value in props.items():
+ props_def = self.definition.get_properties_def()
+ if props_def and name in props_def:
+ properties.append(Property(name, value,
+ props_def[name].schema))
+ return properties
+
+ def get_properties(self):
+ '''Return a dictionary of property name-object pairs.'''
+ return {prop.name: prop
+ for prop in self.get_properties_objects()}
+
+ def get_property_value(self, name):
+ '''Return the value of a given property name.'''
+ props = self.get_properties()
+ if props and name in props:
+ return props[name].value
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java
new file mode 100644
index 0000000..28ada96
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/CapabilityAssignments.java
@@ -0,0 +1,72 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+public class CapabilityAssignments {
+
+ private Map<String, CapabilityAssignment> capabilityAssignments;
+
+ public CapabilityAssignments(Map<String, CapabilityAssignment> capabilityAssignments) {
+ this.capabilityAssignments = capabilityAssignments != null ? new HashMap<>(capabilityAssignments) : new HashMap<>();
+ }
+
+ /**
+ * Get all capability assignments for node template.<br>
+ * This object can be either the original one, holding all capability assignments for this node template,or a filtered one, holding a filtered subset.<br>
+ *
+ * @return list of capability assignments for the node template. <br>
+ * If there are no capability assignments, empty list is returned.
+ */
+ public List<CapabilityAssignment> getAll() {
+ return new ArrayList<>(capabilityAssignments.values());
+ }
+
+ /**
+ * Filter capability assignments by capability tosca type.
+ *
+ * @param type - The tosca type of capability assignments.
+ * @return CapabilityAssignments object, containing capability assignments of this type.<br>
+ * If no such found, filtering will result in an empty collection.
+ */
+ public CapabilityAssignments getCapabilitiesByType(String type) {
+ Map<String, CapabilityAssignment> capabilityAssignmentsMap = capabilityAssignments.entrySet().stream()
+ .filter(cap -> cap.getValue().getDefinition().getType().equals(type)).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
+
+ return new CapabilityAssignments(capabilityAssignmentsMap);
+ }
+
+ /**
+ * Get capability assignment by capability name.
+ *
+ * @param name - The name of capability assignment
+ * @return capability assignment with this name, or null if no such capability assignment was found.
+ */
+ public CapabilityAssignment getCapabilityByName(String name) {
+ return capabilityAssignments.get(name);
+ }
+
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java
new file mode 100644
index 0000000..e95fe72
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/DataEntity.java
@@ -0,0 +1,457 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.elements.DataType;
+import org.onap.sdc.toscaparser.api.elements.PortSpec;
+import org.onap.sdc.toscaparser.api.elements.PropertyDef;
+import org.onap.sdc.toscaparser.api.elements.ScalarUnitFrequency;
+import org.onap.sdc.toscaparser.api.elements.ScalarUnitSize;
+import org.onap.sdc.toscaparser.api.elements.ScalarUnitTime;
+import org.onap.sdc.toscaparser.api.elements.constraints.Constraint;
+import org.onap.sdc.toscaparser.api.elements.constraints.Schema;
+import org.onap.sdc.toscaparser.api.functions.Function;
+import org.onap.sdc.toscaparser.api.utils.TOSCAVersionProperty;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+import org.onap.sdc.toscaparser.api.utils.ValidateUtils;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+
+public class DataEntity {
+ // A complex data value entity
+
+ private LinkedHashMap<String, Object> customDef;
+ private DataType dataType;
+ private LinkedHashMap<String, PropertyDef> schema;
+ private Object value;
+ private String propertyName;
+
+ public DataEntity(String _dataTypeName, Object _valueDict,
+ LinkedHashMap<String, Object> _customDef, String _propName) {
+
+ customDef = _customDef;
+ dataType = new DataType(_dataTypeName, _customDef);
+ schema = dataType.getAllProperties();
+ value = _valueDict;
+ propertyName = _propName;
+ }
+
+ @SuppressWarnings("unchecked")
+ public Object validate() {
+ // Validate the value by the definition of the datatype
+
+ // A datatype can not have both 'type' and 'properties' definitions.
+ // If the datatype has 'type' definition
+ if (dataType.getValueType() != null) {
+ value = DataEntity.validateDatatype(dataType.getValueType(), value, null, customDef, null);
+ Schema schemaCls = new Schema(propertyName, dataType.getDefs());
+ for (Constraint constraint : schemaCls.getConstraints()) {
+ constraint.validate(value);
+ }
+ }
+ // If the datatype has 'properties' definition
+ else {
+ if (!(value instanceof LinkedHashMap)) {
+ //ERROR under investigation
+ String checkedVal = value != null ? value.toString() : null;
+
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE001", String.format(
+ "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"",
+ checkedVal, dataType.getType())));
+
+ if (value instanceof List && ((List) value).size() > 0) {
+ value = ((List) value).get(0);
+ }
+
+ if (!(value instanceof LinkedHashMap)) {
+ return value;
+ }
+ }
+
+
+ LinkedHashMap<String, Object> valueDict = (LinkedHashMap<String, Object>) value;
+ ArrayList<String> allowedProps = new ArrayList<>();
+ ArrayList<String> requiredProps = new ArrayList<>();
+ LinkedHashMap<String, Object> defaultProps = new LinkedHashMap<>();
+ if (schema != null) {
+ allowedProps.addAll(schema.keySet());
+ for (String name : schema.keySet()) {
+ PropertyDef propDef = schema.get(name);
+ if (propDef.isRequired()) {
+ requiredProps.add(name);
+ }
+ if (propDef.getDefault() != null) {
+ defaultProps.put(name, propDef.getDefault());
+ }
+ }
+ }
+
+ // check allowed field
+ for (String valueKey : valueDict.keySet()) {
+ //1710 devlop JSON validation
+ if (!("json").equals(dataType.getType()) && !allowedProps.contains(valueKey)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE100", String.format(
+ "UnknownFieldError: Data value of type \"%s\" contains unknown field \"%s\"",
+ dataType.getType(), valueKey)));
+ }
+ }
+
+ // check default field
+ for (String defKey : defaultProps.keySet()) {
+ Object defValue = defaultProps.get(defKey);
+ if (valueDict.get(defKey) == null) {
+ valueDict.put(defKey, defValue);
+ }
+
+ }
+
+ // check missing field
+ ArrayList<String> missingProp = new ArrayList<>();
+ for (String reqKey : requiredProps) {
+ if (!valueDict.keySet().contains(reqKey)) {
+ missingProp.add(reqKey);
+ }
+ }
+ if (missingProp.size() > 0) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", String.format(
+ "MissingRequiredFieldError: Data value of type \"%s\" is missing required field(s) \"%s\"",
+ dataType.getType(), missingProp.toString())));
+ }
+
+ // check every field
+ for (String vname : valueDict.keySet()) {
+ Object vvalue = valueDict.get(vname);
+ LinkedHashMap<String, Object> schemaName = _findSchema(vname);
+ if (schemaName == null) {
+ continue;
+ }
+ Schema propSchema = new Schema(vname, schemaName);
+ // check if field value meets type defined
+ DataEntity.validateDatatype(propSchema.getType(),
+ vvalue,
+ propSchema.getEntrySchema(),
+ customDef,
+ null);
+
+ // check if field value meets constraints defined
+ if (propSchema.getConstraints() != null) {
+ for (Constraint constraint : propSchema.getConstraints()) {
+ if (vvalue instanceof ArrayList) {
+ for (Object val : (ArrayList<Object>) vvalue) {
+ constraint.validate(val);
+ }
+ } else {
+ constraint.validate(vvalue);
+ }
+ }
+ }
+ }
+ }
+ return value;
+ }
+
+ private LinkedHashMap<String, Object> _findSchema(String name) {
+ if (schema != null && schema.get(name) != null) {
+ return schema.get(name).getSchema();
+ }
+ return null;
+ }
+
+ public static Object validateDatatype(String type,
+ Object value,
+ LinkedHashMap<String, Object> entrySchema,
+ LinkedHashMap<String, Object> customDef,
+ String propName) {
+ // Validate value with given type
+
+ // If type is list or map, validate its entry by entry_schema(if defined)
+ // If type is a user-defined complex datatype, custom_def is required.
+
+ if (Function.isFunction(value)) {
+ return value;
+ } else if (type == null) {
+ //NOT ANALYZED
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE002", String.format(
+ "MissingType: Type is missing for value \"%s\"",
+ value.toString())));
+ return value;
+ } else if (type.equals(Schema.STRING)) {
+ return ValidateUtils.validateString(value);
+ } else if (type.equals(Schema.INTEGER)) {
+ return ValidateUtils.validateInteger(value);
+ } else if (type.equals(Schema.FLOAT)) {
+ return ValidateUtils.validateFloat(value);
+ } else if (type.equals(Schema.NUMBER)) {
+ return ValidateUtils.validateNumeric(value);
+ } else if (type.equals(Schema.BOOLEAN)) {
+ return ValidateUtils.validateBoolean(value);
+ } else if (type.equals(Schema.RANGE)) {
+ return ValidateUtils.validateRange(value);
+ } else if (type.equals(Schema.TIMESTAMP)) {
+ ValidateUtils.validateTimestamp(value);
+ return value;
+ } else if (type.equals(Schema.LIST)) {
+ ValidateUtils.validateList(value);
+ if (entrySchema != null) {
+ DataEntity.validateEntry(value, entrySchema, customDef);
+ }
+ return value;
+ } else if (type.equals(Schema.SCALAR_UNIT_SIZE)) {
+ return (new ScalarUnitSize(value)).validateScalarUnit();
+ } else if (type.equals(Schema.SCALAR_UNIT_FREQUENCY)) {
+ return (new ScalarUnitFrequency(value)).validateScalarUnit();
+ } else if (type.equals(Schema.SCALAR_UNIT_TIME)) {
+ return (new ScalarUnitTime(value)).validateScalarUnit();
+ } else if (type.equals(Schema.VERSION)) {
+ return (new TOSCAVersionProperty(value.toString())).getVersion();
+ } else if (type.equals(Schema.MAP)) {
+ ValidateUtils.validateMap(value);
+ if (entrySchema != null) {
+ DataEntity.validateEntry(value, entrySchema, customDef);
+ }
+ return value;
+ } else if (type.equals(Schema.PORTSPEC)) {
+ // tODO(TBD) bug 1567063, validate source & target as PortDef type
+ // as complex types not just as integers
+ PortSpec.validateAdditionalReq(value, propName, customDef);
+ } else {
+ DataEntity data = new DataEntity(type, value, customDef, null);
+ return data.validate();
+ }
+
+ return value;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static Object validateEntry(Object value,
+ LinkedHashMap<String, Object> entrySchema,
+ LinkedHashMap<String, Object> customDef) {
+
+ // Validate entries for map and list
+ Schema schema = new Schema(null, entrySchema);
+ Object valueob = value;
+ ArrayList<Object> valueList = null;
+ if (valueob instanceof LinkedHashMap) {
+ valueList = new ArrayList<Object>(((LinkedHashMap<String, Object>) valueob).values());
+ } else if (valueob instanceof ArrayList) {
+ valueList = (ArrayList<Object>) valueob;
+ }
+ if (valueList != null) {
+ for (Object v : valueList) {
+ DataEntity.validateDatatype(schema.getType(), v, schema.getEntrySchema(), customDef, null);
+ if (schema.getConstraints() != null) {
+ for (Constraint constraint : schema.getConstraints()) {
+ constraint.validate(v);
+ }
+ }
+ }
+ }
+ return value;
+ }
+
+ @Override
+ public String toString() {
+ return "DataEntity{" +
+ "customDef=" + customDef +
+ ", dataType=" + dataType +
+ ", schema=" + schema +
+ ", value=" + value +
+ ", propertyName='" + propertyName + '\'' +
+ '}';
+ }
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import MissingRequiredFieldError
+from toscaparser.common.exception import TypeMismatchError
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.elements.constraints import Schema
+from toscaparser.elements.datatype import DataType
+from toscaparser.elements.portspectype import PortSpec
+from toscaparser.elements.scalarunit import ScalarUnit_Frequency
+from toscaparser.elements.scalarunit import ScalarUnit_Size
+from toscaparser.elements.scalarunit import ScalarUnit_Time
+from toscaparser.utils.gettextutils import _
+from toscaparser.utils import validateutils
+
+
+class DataEntity(object):
+ '''A complex data value entity.'''
+
+ def __init__(self, datatypename, value_dict, custom_def=None,
+ prop_name=None):
+ self.custom_def = custom_def
+ self.datatype = DataType(datatypename, custom_def)
+ self.schema = self.datatype.get_all_properties()
+ self.value = value_dict
+ self.property_name = prop_name
+
+ def validate(self):
+ '''Validate the value by the definition of the datatype.'''
+
+ # A datatype can not have both 'type' and 'properties' definitions.
+ # If the datatype has 'type' definition
+ if self.datatype.value_type:
+ self.value = DataEntity.validate_datatype(self.datatype.value_type,
+ self.value,
+ None,
+ self.custom_def)
+ schema = Schema(self.property_name, self.datatype.defs)
+ for constraint in schema.constraints:
+ constraint.validate(self.value)
+ # If the datatype has 'properties' definition
+ else:
+ if not isinstance(self.value, dict):
+ ValidationIssueCollector.appendException(
+ TypeMismatchError(what=self.value,
+ type=self.datatype.type))
+ allowed_props = []
+ required_props = []
+ default_props = {}
+ if self.schema:
+ allowed_props = self.schema.keys()
+ for name, prop_def in self.schema.items():
+ if prop_def.required:
+ required_props.append(name)
+ if prop_def.default:
+ default_props[name] = prop_def.default
+
+ # check allowed field
+ for value_key in list(self.value.keys()):
+ if value_key not in allowed_props:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(what=(_('Data value of type "%s"')
+ % self.datatype.type),
+ field=value_key))
+
+ # check default field
+ for def_key, def_value in list(default_props.items()):
+ if def_key not in list(self.value.keys()):
+ self.value[def_key] = def_value
+
+ # check missing field
+ missingprop = []
+ for req_key in required_props:
+ if req_key not in list(self.value.keys()):
+ missingprop.append(req_key)
+ if missingprop:
+ ValidationIssueCollector.appendException(
+ MissingRequiredFieldError(
+ what=(_('Data value of type "%s"')
+ % self.datatype.type), required=missingprop))
+
+ # check every field
+ for name, value in list(self.value.items()):
+ schema_name = self._find_schema(name)
+ if not schema_name:
+ continue
+ prop_schema = Schema(name, schema_name)
+ # check if field value meets type defined
+ DataEntity.validate_datatype(prop_schema.type, value,
+ prop_schema.entry_schema,
+ self.custom_def)
+ # check if field value meets constraints defined
+ if prop_schema.constraints:
+ for constraint in prop_schema.constraints:
+ if isinstance(value, list):
+ for val in value:
+ constraint.validate(val)
+ else:
+ constraint.validate(value)
+
+ return self.value
+
+ def _find_schema(self, name):
+ if self.schema and name in self.schema.keys():
+ return self.schema[name].schema
+
+ @staticmethod
+ def validate_datatype(type, value, entry_schema=None, custom_def=None,
+ prop_name=None):
+ '''Validate value with given type.
+
+ If type is list or map, validate its entry by entry_schema(if defined)
+ If type is a user-defined complex datatype, custom_def is required.
+ '''
+ from toscaparser.functions import is_function
+ if is_function(value):
+ return value
+ if type == Schema.STRING:
+ return validateutils.validate_string(value)
+ elif type == Schema.INTEGER:
+ return validateutils.validate_integer(value)
+ elif type == Schema.FLOAT:
+ return validateutils.validate_float(value)
+ elif type == Schema.NUMBER:
+ return validateutils.validate_numeric(value)
+ elif type == Schema.BOOLEAN:
+ return validateutils.validate_boolean(value)
+ elif type == Schema.RANGE:
+ return validateutils.validate_range(value)
+ elif type == Schema.TIMESTAMP:
+ validateutils.validate_timestamp(value)
+ return value
+ elif type == Schema.LIST:
+ validateutils.validate_list(value)
+ if entry_schema:
+ DataEntity.validate_entry(value, entry_schema, custom_def)
+ return value
+ elif type == Schema.SCALAR_UNIT_SIZE:
+ return ScalarUnit_Size(value).validate_scalar_unit()
+ elif type == Schema.SCALAR_UNIT_FREQUENCY:
+ return ScalarUnit_Frequency(value).validate_scalar_unit()
+ elif type == Schema.SCALAR_UNIT_TIME:
+ return ScalarUnit_Time(value).validate_scalar_unit()
+ elif type == Schema.VERSION:
+ return validateutils.TOSCAVersionProperty(value).get_version()
+ elif type == Schema.MAP:
+ validateutils.validate_map(value)
+ if entry_schema:
+ DataEntity.validate_entry(value, entry_schema, custom_def)
+ return value
+ elif type == Schema.PORTSPEC:
+ # tODO(TBD) bug 1567063, validate source & target as PortDef type
+ # as complex types not just as integers
+ PortSpec.validate_additional_req(value, prop_name, custom_def)
+ else:
+ data = DataEntity(type, value, custom_def)
+ return data.validate()
+
+ @staticmethod
+ def validate_entry(value, entry_schema, custom_def=None):
+ '''Validate entries for map and list.'''
+ schema = Schema(None, entry_schema)
+ valuelist = value
+ if isinstance(value, dict):
+ valuelist = list(value.values())
+ for v in valuelist:
+ DataEntity.validate_datatype(schema.type, v, schema.entry_schema,
+ custom_def)
+ if schema.constraints:
+ for constraint in schema.constraints:
+ constraint.validate(v)
+ return value
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java
new file mode 100644
index 0000000..93bfe2b
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/EntityTemplate.java
@@ -0,0 +1,885 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.elements.*;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+
+import javax.annotation.Nullable;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public abstract class EntityTemplate {
+ // Base class for TOSCA templates
+
+ protected static final String DERIVED_FROM = "derived_from";
+ protected static final String PROPERTIES = "properties";
+ protected static final String REQUIREMENTS = "requirements";
+ protected static final String INTERFACES = "interfaces";
+ protected static final String CAPABILITIES = "capabilities";
+ protected static final String TYPE = "type";
+ protected static final String DESCRIPTION = "description";
+ protected static final String DIRECTIVES = "directives";
+ protected static final String ATTRIBUTES = "attributes";
+ protected static final String ARTIFACTS = "artifacts";
+ protected static final String NODE_FILTER = "node_filter";
+ protected static final String COPY = "copy";
+
+ protected static final String SECTIONS[] = {
+ DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES,
+ CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES,
+ ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY};
+
+ private static final String NODE = "node";
+ private static final String CAPABILITY = "capability";
+ private static final String RELATIONSHIP = "relationship";
+ private static final String OCCURRENCES = "occurrences";
+
+ protected static final String REQUIREMENTS_SECTION[] = {
+ NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER};
+
+ //# Special key names
+ private static final String METADATA = "metadata";
+ protected static final String SPECIAL_SECTIONS[] = {METADATA};
+
+ protected String name;
+ protected LinkedHashMap<String, Object> entityTpl;
+ protected LinkedHashMap<String, Object> customDef;
+ protected StatefulEntityType typeDefinition;
+ private ArrayList<Property> _properties;
+ private ArrayList<InterfacesDef> _interfaces;
+ private ArrayList<RequirementAssignment> _requirements;
+ private ArrayList<CapabilityAssignment> _capabilities;
+
+ @Nullable
+ private NodeTemplate _parentNodeTemplate;
+
+ // dummy constructor for subclasses that don't want super
+ public EntityTemplate() {
+ return;
+ }
+
+ public EntityTemplate(String _name,
+ LinkedHashMap<String, Object> _template,
+ String _entityName,
+ LinkedHashMap<String, Object> _customDef) {
+ this(_name, _template, _entityName, _customDef, null);
+ }
+
+ @SuppressWarnings("unchecked")
+ public EntityTemplate(String _name,
+ LinkedHashMap<String, Object> _template,
+ String _entityName,
+ LinkedHashMap<String, Object> _customDef,
+ NodeTemplate parentNodeTemplate) {
+ name = _name;
+ entityTpl = _template;
+ customDef = _customDef;
+ _validateField(entityTpl);
+ String type = (String) entityTpl.get("type");
+ UnsupportedType.validateType(type);
+ if (_entityName.equals("node_type")) {
+ if (type != null) {
+ typeDefinition = new NodeType(type, customDef);
+ } else {
+ typeDefinition = null;
+ }
+ }
+ if (_entityName.equals("relationship_type")) {
+ Object relationship = _template.get("relationship");
+ type = null;
+ if (relationship != null && relationship instanceof LinkedHashMap) {
+ type = (String) ((LinkedHashMap<String, Object>) relationship).get("type");
+ } else if (relationship instanceof String) {
+ type = (String) entityTpl.get("relationship");
+ } else {
+ type = (String) entityTpl.get("type");
+ }
+ UnsupportedType.validateType(type);
+ typeDefinition = new RelationshipType(type, null, customDef);
+ }
+ if (_entityName.equals("policy_type")) {
+ if (type == null) {
+ //msg = (_('Policy definition of "%(pname)s" must have'
+ // ' a "type" ''attribute.') % dict(pname=name))
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE140", String.format(
+ "ValidationError: Policy definition of \"%s\" must have a \"type\" attribute", name)));
+ }
+ typeDefinition = new PolicyType(type, customDef);
+ }
+ if (_entityName.equals("group_type")) {
+ if (type != null) {
+ typeDefinition = new GroupType(type, customDef);
+ } else {
+ typeDefinition = null;
+ }
+ }
+ _properties = null;
+ _interfaces = null;
+ _requirements = null;
+ _capabilities = null;
+ _parentNodeTemplate = parentNodeTemplate;
+ }
+
+ public NodeTemplate getParentNodeTemplate() {
+ return _parentNodeTemplate;
+ }
+
+ public String getType() {
+ if (typeDefinition != null) {
+ String clType = typeDefinition.getClass().getSimpleName();
+ if (clType.equals("NodeType")) {
+ return (String) ((NodeType) typeDefinition).getType();
+ } else if (clType.equals("PolicyType")) {
+ return (String) ((PolicyType) typeDefinition).getType();
+ } else if (clType.equals("GroupType")) {
+ return (String) ((GroupType) typeDefinition).getType();
+ } else if (clType.equals("RelationshipType")) {
+ return (String) ((RelationshipType) typeDefinition).getType();
+ }
+ }
+ return null;
+ }
+
+ public Object getParentType() {
+ if (typeDefinition != null) {
+ String clType = typeDefinition.getClass().getSimpleName();
+ if (clType.equals("NodeType")) {
+ return ((NodeType) typeDefinition).getParentType();
+ } else if (clType.equals("PolicyType")) {
+ return ((PolicyType) typeDefinition).getParentType();
+ } else if (clType.equals("GroupType")) {
+ return ((GroupType) typeDefinition).getParentType();
+ } else if (clType.equals("RelationshipType")) {
+ return ((RelationshipType) typeDefinition).getParentType();
+ }
+ }
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ public RequirementAssignments getRequirements() {
+ if (_requirements == null) {
+ _requirements = _createRequirements();
+ }
+ return new RequirementAssignments(_requirements);
+ }
+
+ private ArrayList<RequirementAssignment> _createRequirements() {
+ ArrayList<RequirementAssignment> reqs = new ArrayList<>();
+ ArrayList<Map<String, Object>> requirements = (ArrayList<Map<String, Object>>)
+ typeDefinition.getValue(REQUIREMENTS, entityTpl, false);
+ if (requirements == null) {
+ requirements = new ArrayList<>();
+ }
+ for (Map<String, Object> req : requirements) {
+ for (String reqName : req.keySet()) {
+ Object reqItem = req.get(reqName);
+ if (reqItem instanceof LinkedHashMap) {
+ Object rel = ((LinkedHashMap<String, Object>) reqItem).get("relationship");
+// LinkedHashMap relationship = rel instanceof LinkedHashMap ? (LinkedHashMap) rel : null;
+ String nodeName = ((LinkedHashMap<String, Object>) reqItem).get("node").toString();
+ Object capability = ((LinkedHashMap<String, Object>) reqItem).get("capability");
+ String capabilityString = capability != null ? capability.toString() : null;
+
+ reqs.add(new RequirementAssignment(reqName, nodeName, capabilityString, rel));
+ } else if (reqItem instanceof String) { //short notation
+ String nodeName = String.valueOf(reqItem);
+ reqs.add(new RequirementAssignment(reqName, nodeName));
+ }
+ }
+ }
+ return reqs;
+ }
+
+ public ArrayList<Property> getPropertiesObjects() {
+ // Return properties objects for this template
+ if (_properties == null) {
+ _properties = _createProperties();
+ }
+ return _properties;
+ }
+
+ public LinkedHashMap<String, Property> getProperties() {
+ LinkedHashMap<String, Property> props = new LinkedHashMap<>();
+ for (Property po : getPropertiesObjects()) {
+ props.put(po.getName(), po);
+ }
+ return props;
+ }
+
+ public Object getPropertyValue(String name) {
+ LinkedHashMap<String, Property> props = getProperties();
+ Property p = props.get(name);
+ return p != null ? p.getValue() : null;
+ }
+
+ public String getPropertyType(String name) {
+ Property property = getProperties().get(name);
+ if (property != null) {
+ return property.getType();
+ }
+ return null;
+ }
+
+ public ArrayList<InterfacesDef> getInterfaces() {
+ if (_interfaces == null) {
+ _interfaces = _createInterfaces();
+ }
+ return _interfaces;
+ }
+
+ public ArrayList<CapabilityAssignment> getCapabilitiesObjects() {
+ // Return capabilities objects for this template
+ if (_capabilities == null) {
+ _capabilities = _createCapabilities();
+ }
+ return _capabilities;
+
+ }
+
+ public CapabilityAssignments getCapabilities() {
+ LinkedHashMap<String, CapabilityAssignment> caps = new LinkedHashMap<String, CapabilityAssignment>();
+ for (CapabilityAssignment cap : getCapabilitiesObjects()) {
+ caps.put(cap.getName(), cap);
+ }
+ return new CapabilityAssignments(caps);
+ }
+
+ public boolean isDerivedFrom(String typeStr) {
+ // Returns true if this object is derived from 'type_str'.
+ // False otherwise
+
+ if (getType() == null) {
+ return false;
+ } else if (getType().equals(typeStr)) {
+ return true;
+ } else if (getParentType() != null) {
+ return ((EntityType) getParentType()).isDerivedFrom(typeStr);
+ }
+ return false;
+ }
+
+ @SuppressWarnings("unchecked")
+ private ArrayList<CapabilityAssignment> _createCapabilities() {
+ ArrayList<CapabilityAssignment> capability = new ArrayList<CapabilityAssignment>();
+ LinkedHashMap<String, Object> caps = (LinkedHashMap<String, Object>)
+ ((EntityType) typeDefinition).getValue(CAPABILITIES, entityTpl, true);
+ if (caps != null) {
+ //?!? getCapabilities defined only for NodeType...
+ LinkedHashMap<String, CapabilityTypeDef> capabilities = null;
+ if (typeDefinition instanceof NodeType) {
+ capabilities = ((NodeType) typeDefinition).getCapabilities();
+ } else if (typeDefinition instanceof GroupType) {
+ capabilities = ((GroupType) typeDefinition).getCapabilities();
+ }
+ for (Map.Entry<String, Object> me : caps.entrySet()) {
+ String name = me.getKey();
+ LinkedHashMap<String, Object> props = (LinkedHashMap<String, Object>) me.getValue();
+ if (capabilities.get(name) != null) {
+ CapabilityTypeDef c = capabilities.get(name); // a CapabilityTypeDef
+ LinkedHashMap<String, Object> properties = new LinkedHashMap<String, Object>();
+ // first use the definition default value
+ LinkedHashMap<String, Object> cprops = c.getProperties();
+ if (cprops != null) {
+ for (Map.Entry<String, Object> cpe : cprops.entrySet()) {
+ String propertyName = cpe.getKey();
+ LinkedHashMap<String, Object> propertyDef = (LinkedHashMap<String, Object>) cpe.getValue();
+ Object dob = propertyDef.get("default");
+ if (dob != null) {
+ properties.put(propertyName, dob);
+
+ }
+ }
+ }
+ // then update (if available) with the node properties
+ LinkedHashMap<String, Object> pp = (LinkedHashMap<String, Object>) props.get("properties");
+ if (pp != null) {
+ properties.putAll(pp);
+ }
+ CapabilityAssignment cap = new CapabilityAssignment(name, properties, c, customDef);
+ capability.add(cap);
+ }
+ }
+ }
+ return capability;
+ }
+
+ protected void _validateProperties(LinkedHashMap<String, Object> template, StatefulEntityType entityType) {
+ @SuppressWarnings("unchecked")
+ LinkedHashMap<String, Object> properties = (LinkedHashMap<String, Object>) entityType.getValue(PROPERTIES, template, false);
+ _commonValidateProperties(entityType, properties);
+ }
+
+ protected void _validateCapabilities() {
+ //BUG??? getCapabilities only defined in NodeType...
+ LinkedHashMap<String, CapabilityTypeDef> typeCapabilities = ((NodeType) typeDefinition).getCapabilities();
+ ArrayList<String> allowedCaps = new ArrayList<String>();
+ if (typeCapabilities != null) {
+ allowedCaps.addAll(typeCapabilities.keySet());
+ }
+ @SuppressWarnings("unchecked")
+ LinkedHashMap<String, Object> capabilities = (LinkedHashMap<String, Object>)
+ ((EntityType) typeDefinition).getValue(CAPABILITIES, entityTpl, false);
+ if (capabilities != null) {
+ _commonValidateField(capabilities, allowedCaps, "capabilities");
+ _validateCapabilitiesProperties(capabilities);
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void _validateCapabilitiesProperties(LinkedHashMap<String, Object> capabilities) {
+ for (Map.Entry<String, Object> me : capabilities.entrySet()) {
+ String cap = me.getKey();
+ LinkedHashMap<String, Object> props = (LinkedHashMap<String, Object>) me.getValue();
+ CapabilityAssignment capability = getCapability(cap);
+ if (capability == null) {
+ continue;
+ }
+ CapabilityTypeDef capabilitydef = capability.getDefinition();
+ _commonValidateProperties(capabilitydef, (LinkedHashMap<String, Object>) props.get(PROPERTIES));
+
+ // validating capability properties values
+ for (Property prop : getCapability(cap).getPropertiesObjects()) {
+ prop.validate();
+
+ if (cap.equals("scalable") && prop.getName().equals("default_instances")) {
+ LinkedHashMap<String, Object> propDict = (LinkedHashMap<String, Object>) props.get(PROPERTIES);
+ int minInstances = (int) propDict.get("min_instances");
+ int maxInstances = (int) propDict.get("max_instances");
+ int defaultInstances = (int) propDict.get("default_instances");
+ if (defaultInstances < minInstances || defaultInstances > maxInstances) {
+ //err_msg = ('"properties" of template "%s": '
+ // '"default_instances" value is not between '
+ // '"min_instances" and "max_instances".' %
+ // self.name)
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE141", String.format(
+ "ValidationError: \"properties\" of template \"%s\": \"default_instances\" value is not between \"min_instances\" and \"max_instances\"",
+ name)));
+ }
+ }
+ }
+ }
+ }
+
+ private void _commonValidateProperties(StatefulEntityType entityType, LinkedHashMap<String, Object> properties) {
+ ArrayList<String> allowedProps = new ArrayList<String>();
+ ArrayList<String> requiredProps = new ArrayList<String>();
+ for (PropertyDef p : entityType.getPropertiesDefObjects()) {
+ allowedProps.add(p.getName());
+ // If property is 'required' and has no 'default' value then record
+ if (p.isRequired() && p.getDefault() == null) {
+ requiredProps.add(p.getName());
+ }
+ }
+ // validate all required properties have values
+ if (properties != null) {
+ ArrayList<String> reqPropsNoValueOrDefault = new ArrayList<String>();
+ _commonValidateField(properties, allowedProps, "properties");
+ // make sure it's not missing any property required by a tosca type
+ for (String r : requiredProps) {
+ if (properties.get(r) == null) {
+ reqPropsNoValueOrDefault.add(r);
+ }
+ }
+ // Required properties found without value or a default value
+ if (!reqPropsNoValueOrDefault.isEmpty()) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003", String.format(
+ "MissingRequiredFieldError: properties of template \"%s\" are missing field(s): %s",
+ name, reqPropsNoValueOrDefault.toString())));
+ }
+ } else {
+ // Required properties in schema, but not in template
+ if (!requiredProps.isEmpty()) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE004", String.format(
+ "MissingRequiredFieldError2: properties of template \"%s\" are missing field(s): %s",
+ name, requiredProps.toString())));
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void _validateField(LinkedHashMap<String, Object> template) {
+ if (!(template instanceof LinkedHashMap)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE142", String.format(
+ "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"", name, TYPE)));
+ return;//???
+ }
+ boolean bBad = false;
+ Object relationship = ((LinkedHashMap<String, Object>) template).get("relationship");
+ if (relationship != null) {
+ if (!(relationship instanceof String)) {
+ bBad = (((LinkedHashMap<String, Object>) relationship).get(TYPE) == null);
+ } else if (relationship instanceof String) {
+ bBad = (template.get("relationship") == null);
+ }
+ } else {
+ bBad = (template.get(TYPE) == null);
+ }
+ if (bBad) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE143", String.format(
+ "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"", name, TYPE)));
+ }
+ }
+
+ protected void _commonValidateField(LinkedHashMap<String, Object> schema, ArrayList<String> allowedList, String section) {
+ for (String sname : schema.keySet()) {
+ boolean bFound = false;
+ for (String allowed : allowedList) {
+ if (sname.equals(allowed)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE144", String.format(
+ "UnknownFieldError: Section \"%s\" of template \"%s\" contains unknown field \"%s\"", section, name, sname)));
+ }
+ }
+
+ }
+
+ @SuppressWarnings("unchecked")
+ private ArrayList<Property> _createProperties() {
+ ArrayList<Property> props = new ArrayList<Property>();
+ LinkedHashMap<String, Object> properties = (LinkedHashMap<String, Object>)
+ ((EntityType) typeDefinition).getValue(PROPERTIES, entityTpl, false);
+ if (properties == null) {
+ properties = new LinkedHashMap<String, Object>();
+ }
+ for (Map.Entry<String, Object> me : properties.entrySet()) {
+ String pname = me.getKey();
+ Object pvalue = me.getValue();
+ LinkedHashMap<String, PropertyDef> propsDef = ((StatefulEntityType) typeDefinition).getPropertiesDef();
+ if (propsDef != null && propsDef.get(pname) != null) {
+ PropertyDef pd = (PropertyDef) propsDef.get(pname);
+ Property prop = new Property(pname, pvalue, pd.getSchema(), customDef);
+ props.add(prop);
+ }
+ }
+ ArrayList<PropertyDef> pds = ((StatefulEntityType) typeDefinition).getPropertiesDefObjects();
+ for (Object pdo : pds) {
+ PropertyDef pd = (PropertyDef) pdo;
+ if (pd.getDefault() != null && properties.get(pd.getName()) == null) {
+ Property prop = new Property(pd.getName(), pd.getDefault(), pd.getSchema(), customDef);
+ props.add(prop);
+ }
+ }
+ return props;
+ }
+
+ @SuppressWarnings("unchecked")
+ private ArrayList<InterfacesDef> _createInterfaces() {
+ ArrayList<InterfacesDef> interfaces = new ArrayList<>();
+ LinkedHashMap<String, Object> typeInterfaces = new LinkedHashMap<String, Object>();
+ if (typeDefinition instanceof RelationshipType) {
+ if (entityTpl instanceof LinkedHashMap) {
+ typeInterfaces = (LinkedHashMap<String, Object>) entityTpl.get(INTERFACES);
+ if (typeInterfaces == null) {
+ for (String relName : entityTpl.keySet()) {
+ Object relValue = entityTpl.get(relName);
+ if (!relName.equals("type")) {
+ Object relDef = relValue;
+ LinkedHashMap<String, Object> rel = null;
+ if (relDef instanceof LinkedHashMap) {
+ Object relob = ((LinkedHashMap<String, Object>) relDef).get("relationship");
+ if (relob instanceof LinkedHashMap) {
+ rel = (LinkedHashMap<String, Object>) relob;
+ }
+ }
+ if (rel != null) {
+ if (rel.get(INTERFACES) != null) {
+ typeInterfaces = (LinkedHashMap<String, Object>) rel.get(INTERFACES);
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
+ } else {
+ typeInterfaces = (LinkedHashMap<String, Object>)
+ ((EntityType) typeDefinition).getValue(INTERFACES, entityTpl, false);
+ }
+ if (typeInterfaces != null) {
+ for (Map.Entry<String, Object> me : typeInterfaces.entrySet()) {
+ String interfaceType = me.getKey();
+ LinkedHashMap<String, Object> value = (LinkedHashMap<String, Object>) me.getValue();
+ for (Map.Entry<String, Object> ve : value.entrySet()) {
+ String op = ve.getKey();
+ Object opDef = ve.getValue();
+ InterfacesDef iface = new InterfacesDef((EntityType) typeDefinition,
+ interfaceType,
+ this,
+ op,
+ opDef);
+ interfaces.add(iface);
+ }
+
+ }
+ }
+ return interfaces;
+ }
+
+ public CapabilityAssignment getCapability(String name) {
+ // Provide named capability
+ // :param name: name of capability
+ // :return: capability object if found, None otherwise
+ return getCapabilities().getCapabilityByName(name);
+ }
+
+ // getter
+ public String getName() {
+ return name;
+ }
+
+ public StatefulEntityType getTypeDefinition() {
+ return typeDefinition;
+ }
+
+ public LinkedHashMap<String, Object> getCustomDef() {
+ return customDef;
+ }
+
+ @Override
+ public String toString() {
+ return "EntityTemplate{" +
+ "name='" + name + '\'' +
+ ", entityTpl=" + entityTpl +
+ ", customDef=" + customDef +
+ ", typeDefinition=" + typeDefinition +
+ ", _properties=" + _properties +
+ ", _interfaces=" + _interfaces +
+ ", _requirements=" + _requirements +
+ ", _capabilities=" + _capabilities +
+ '}';
+ }
+}
+
+/*python
+
+class EntityTemplate(object):
+ '''Base class for TOSCA templates.'''
+
+ SECTIONS = (DERIVED_FROM, PROPERTIES, REQUIREMENTS,
+ INTERFACES, CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES,
+ ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY) = \
+ ('derived_from', 'properties', 'requirements', 'interfaces',
+ 'capabilities', 'type', 'description', 'directives',
+ 'attributes', 'artifacts', 'node_filter', 'copy')
+ REQUIREMENTS_SECTION = (NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER) = \
+ ('node', 'capability', 'relationship',
+ 'occurrences', 'node_filter')
+ # Special key names
+ SPECIAL_SECTIONS = (METADATA) = ('metadata')
+
+ def __init__(self, name, template, entity_name, custom_def=None):
+ self.name = name
+ self.entity_tpl = template
+ self.custom_def = custom_def
+ self._validate_field(self.entity_tpl)
+ type = self.entity_tpl.get('type')
+ UnsupportedType.validate_type(type)
+ if entity_name == 'node_type':
+ self.type_definition = NodeType(type, custom_def) \
+ if type is not None else None
+ if entity_name == 'relationship_type':
+ relationship = template.get('relationship')
+ type = None
+ if relationship and isinstance(relationship, dict):
+ type = relationship.get('type')
+ elif isinstance(relationship, str):
+ type = self.entity_tpl['relationship']
+ else:
+ type = self.entity_tpl['type']
+ UnsupportedType.validate_type(type)
+ self.type_definition = RelationshipType(type,
+ None, custom_def)
+ if entity_name == 'policy_type':
+ if not type:
+ msg = (_('Policy definition of "%(pname)s" must have'
+ ' a "type" ''attribute.') % dict(pname=name))
+ ValidationIssueCollector.appendException(
+ ValidationError(msg))
+
+ self.type_definition = PolicyType(type, custom_def)
+ if entity_name == 'group_type':
+ self.type_definition = GroupType(type, custom_def) \
+ if type is not None else None
+ self._properties = None
+ self._interfaces = None
+ self._requirements = None
+ self._capabilities = None
+
+ @property
+ def type(self):
+ if self.type_definition:
+ return self.type_definition.type
+
+ @property
+ def parent_type(self):
+ if self.type_definition:
+ return self.type_definition.parent_type
+
+ @property
+ def requirements(self):
+ if self._requirements is None:
+ self._requirements = self.type_definition.get_value(
+ self.REQUIREMENTS,
+ self.entity_tpl) or []
+ return self._requirements
+
+ def get_properties_objects(self):
+ '''Return properties objects for this template.'''
+ if self._properties is None:
+ self._properties = self._create_properties()
+ return self._properties
+
+ def get_properties(self):
+ '''Return a dictionary of property name-object pairs.'''
+ return {prop.name: prop
+ for prop in self.get_properties_objects()}
+
+ def get_property_value(self, name):
+ '''Return the value of a given property name.'''
+ props = self.get_properties()
+ if props and name in props.keys():
+ return props[name].value
+
+ @property
+ def interfaces(self):
+ if self._interfaces is None:
+ self._interfaces = self._create_interfaces()
+ return self._interfaces
+
+ def get_capabilities_objects(self):
+ '''Return capabilities objects for this template.'''
+ if not self._capabilities:
+ self._capabilities = self._create_capabilities()
+ return self._capabilities
+
+ def get_capabilities(self):
+ '''Return a dictionary of capability name-object pairs.'''
+ return {cap.name: cap
+ for cap in self.get_capabilities_objects()}
+
+ def is_derived_from(self, type_str):
+ '''Check if object inherits from the given type.
+
+ Returns true if this object is derived from 'type_str'.
+ False otherwise.
+ '''
+ if not self.type:
+ return False
+ elif self.type == type_str:
+ return True
+ elif self.parent_type:
+ return self.parent_type.is_derived_from(type_str)
+ else:
+ return False
+
+ def _create_capabilities(self):
+ capability = []
+ caps = self.type_definition.get_value(self.CAPABILITIES,
+ self.entity_tpl, True)
+ if caps:
+ for name, props in caps.items():
+ capabilities = self.type_definition.get_capabilities()
+ if name in capabilities.keys():
+ c = capabilities[name]
+ properties = {}
+ # first use the definition default value
+ if c.properties:
+ for property_name in c.properties.keys():
+ prop_def = c.properties[property_name]
+ if 'default' in prop_def:
+ properties[property_name] = prop_def['default']
+ # then update (if available) with the node properties
+ if 'properties' in props and props['properties']:
+ properties.update(props['properties'])
+
+ cap = CapabilityAssignment(name, properties, c)
+ capability.append(cap)
+ return capability
+
+ def _validate_properties(self, template, entitytype):
+ properties = entitytype.get_value(self.PROPERTIES, template)
+ self._common_validate_properties(entitytype, properties)
+
+ def _validate_capabilities(self):
+ type_capabilities = self.type_definition.get_capabilities()
+ allowed_caps = \
+ type_capabilities.keys() if type_capabilities else []
+ capabilities = self.type_definition.get_value(self.CAPABILITIES,
+ self.entity_tpl)
+ if capabilities:
+ self._common_validate_field(capabilities, allowed_caps,
+ 'capabilities')
+ self._validate_capabilities_properties(capabilities)
+
+ def _validate_capabilities_properties(self, capabilities):
+ for cap, props in capabilities.items():
+ capability = self.get_capability(cap)
+ if not capability:
+ continue
+ capabilitydef = capability.definition
+ self._common_validate_properties(capabilitydef,
+ props[self.PROPERTIES])
+
+ # validating capability properties values
+ for prop in self.get_capability(cap).get_properties_objects():
+ prop.validate()
+
+ # tODO(srinivas_tadepalli): temporary work around to validate
+ # default_instances until standardized in specification
+ if cap == "scalable" and prop.name == "default_instances":
+ prop_dict = props[self.PROPERTIES]
+ min_instances = prop_dict.get("min_instances")
+ max_instances = prop_dict.get("max_instances")
+ default_instances = prop_dict.get("default_instances")
+ if not (min_instances <= default_instances
+ <= max_instances):
+ err_msg = ('"properties" of template "%s": '
+ '"default_instances" value is not between '
+ '"min_instances" and "max_instances".' %
+ self.name)
+ ValidationIssueCollector.appendException(
+ ValidationError(message=err_msg))
+
+ def _common_validate_properties(self, entitytype, properties):
+ allowed_props = []
+ required_props = []
+ for p in entitytype.get_properties_def_objects():
+ allowed_props.append(p.name)
+ # If property is 'required' and has no 'default' value then record
+ if p.required and p.default is None:
+ required_props.append(p.name)
+ # validate all required properties have values
+ if properties:
+ req_props_no_value_or_default = []
+ self._common_validate_field(properties, allowed_props,
+ 'properties')
+ # make sure it's not missing any property required by a tosca type
+ for r in required_props:
+ if r not in properties.keys():
+ req_props_no_value_or_default.append(r)
+ # Required properties found without value or a default value
+ if req_props_no_value_or_default:
+ ValidationIssueCollector.appendException(
+ MissingRequiredFieldError(
+ what='"properties" of template "%s"' % self.name,
+ required=req_props_no_value_or_default))
+ else:
+ # Required properties in schema, but not in template
+ if required_props:
+ ValidationIssueCollector.appendException(
+ MissingRequiredFieldError(
+ what='"properties" of template "%s"' % self.name,
+ required=required_props))
+
+ def _validate_field(self, template):
+ if not isinstance(template, dict):
+ ValidationIssueCollector.appendException(
+ MissingRequiredFieldError(
+ what='Template "%s"' % self.name, required=self.TYPE))
+ try:
+ relationship = template.get('relationship')
+ if relationship and not isinstance(relationship, str):
+ relationship[self.TYPE]
+ elif isinstance(relationship, str):
+ template['relationship']
+ else:
+ template[self.TYPE]
+ except KeyError:
+ ValidationIssueCollector.appendException(
+ MissingRequiredFieldError(
+ what='Template "%s"' % self.name, required=self.TYPE))
+
+ def _common_validate_field(self, schema, allowedlist, section):
+ for name in schema:
+ if name not in allowedlist:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(
+ what=('"%(section)s" of template "%(nodename)s"'
+ % {'section': section, 'nodename': self.name}),
+ field=name))
+
+ def _create_properties(self):
+ props = []
+ properties = self.type_definition.get_value(self.PROPERTIES,
+ self.entity_tpl) or {}
+ for name, value in properties.items():
+ props_def = self.type_definition.get_properties_def()
+ if props_def and name in props_def:
+ prop = Property(name, value,
+ props_def[name].schema, self.custom_def)
+ props.append(prop)
+ for p in self.type_definition.get_properties_def_objects():
+ if p.default is not None and p.name not in properties.keys():
+ prop = Property(p.name, p.default, p.schema, self.custom_def)
+ props.append(prop)
+ return props
+
+ def _create_interfaces(self):
+ interfaces = []
+ type_interfaces = None
+ if isinstance(self.type_definition, RelationshipType):
+ if isinstance(self.entity_tpl, dict):
+ if self.INTERFACES in self.entity_tpl:
+ type_interfaces = self.entity_tpl[self.INTERFACES]
+ else:
+ for rel_def, value in self.entity_tpl.items():
+ if rel_def != 'type':
+ rel_def = self.entity_tpl.get(rel_def)
+ rel = None
+ if isinstance(rel_def, dict):
+ rel = rel_def.get('relationship')
+ if rel:
+ if self.INTERFACES in rel:
+ type_interfaces = rel[self.INTERFACES]
+ break
+ else:
+ type_interfaces = self.type_definition.get_value(self.INTERFACES,
+ self.entity_tpl)
+ if type_interfaces:
+ for interface_type, value in type_interfaces.items():
+ for op, op_def in value.items():
+ iface = InterfacesDef(self.type_definition,
+ interfacetype=interface_type,
+ node_template=self,
+ name=op,
+ value=op_def)
+ interfaces.append(iface)
+ return interfaces
+
+ def get_capability(self, name):
+ """Provide named capability
+
+ :param name: name of capability
+ :return: capability object if found, None otherwise
+ """
+ caps = self.get_capabilities()
+ if caps and name in caps.keys():
+ return caps[name]
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Group.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Group.java
new file mode 100644
index 0000000..0591d9a
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Group.java
@@ -0,0 +1,171 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.elements.Metadata;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+import org.onap.sdc.toscaparser.api.utils.ValidateUtils;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public class Group extends EntityTemplate {
+
+ private static final String TYPE = "type";
+ private static final String METADATA = "metadata";
+ private static final String DESCRIPTION = "description";
+ private static final String PROPERTIES = "properties";
+ private static final String MEMBERS = "members";
+ private static final String INTERFACES = "interfaces";
+ private static final String[] SECTIONS = {
+ TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES};
+
+ private String name;
+ private LinkedHashMap<String, Object> tpl;
+ private ArrayList<NodeTemplate> memberNodes;
+ private LinkedHashMap<String, Object> customDef;
+ private Metadata metaData;
+
+
+ public Group(String name, LinkedHashMap<String, Object> templates,
+ ArrayList<NodeTemplate> memberNodes,
+ LinkedHashMap<String, Object> customDef) {
+ this(name, templates, memberNodes, customDef, null);
+ }
+
+ public Group(String name, LinkedHashMap<String, Object> templates,
+ ArrayList<NodeTemplate> memberNodes,
+ LinkedHashMap<String, Object> customDef, NodeTemplate parentNodeTemplate) {
+ super(name, templates, "group_type", customDef, parentNodeTemplate);
+
+ this.name = name;
+ tpl = templates;
+ if (tpl.get(METADATA) != null) {
+ Object metadataObject = tpl.get(METADATA);
+ ValidateUtils.validateMap(metadataObject);
+ metaData = new Metadata((Map<String, Object>) metadataObject);
+ }
+ this.memberNodes = memberNodes;
+ validateKeys();
+ getCapabilities();
+ }
+
+ public Metadata getMetadata() {
+ return metaData;
+ }
+
+ public ArrayList<String> getMembers() {
+ return (ArrayList<String>) entityTpl.get("members");
+ }
+
+ public String getDescription() {
+ return (String) entityTpl.get("description");
+
+ }
+
+ public ArrayList<NodeTemplate> getMemberNodes() {
+ return memberNodes;
+ }
+
+ private void validateKeys() {
+ for (String key : entityTpl.keySet()) {
+ boolean bFound = false;
+ for (String sect : SECTIONS) {
+ if (key.equals(sect)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE183", String.format(
+ "UnknownFieldError: Groups \"%s\" contains unknown field \"%s\"",
+ name, key)));
+ }
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "Group{"
+ + "name='" + name + '\''
+ + ", tpl=" + tpl
+ + ", memberNodes=" + memberNodes
+ + ", customDef=" + customDef
+ + ", metaData=" + metaData
+ + '}';
+ }
+
+ public int compareTo(Group other) {
+ if (this.equals(other)) {
+ return 0;
+ }
+ return this.getName().compareTo(other.getName()) == 0 ? this.getType().compareTo(other.getType()) : this.getName().compareTo(other.getName());
+ }
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.entity_template import EntityTemplate
+from toscaparser.utils import validateutils
+
+SECTIONS = (TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES) = \
+ ('type', 'metadata', 'description',
+ 'properties', 'members', 'interfaces')
+
+
+class Group(EntityTemplate):
+
+ def __init__(self, name, group_templates, member_nodes, custom_defs=None):
+ super(Group, self).__init__(name,
+ group_templates,
+ 'group_type',
+ custom_defs)
+ self.name = name
+ self.tpl = group_templates
+ self.meta_data = None
+ if self.METADATA in self.tpl:
+ self.meta_data = self.tpl.get(self.METADATA)
+ validateutils.validate_map(self.meta_data)
+ self.member_nodes = member_nodes
+ self._validate_keys()
+
+ @property
+ def members(self):
+ return self.entity_tpl.get('members')
+
+ @property
+ def description(self):
+ return self.entity_tpl.get('description')
+
+ def get_member_nodes(self):
+ return self.member_nodes
+
+ def _validate_keys(self):
+ for key in self.entity_tpl.keys():
+ if key not in SECTIONS:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(what='Groups "%s"' % self.name,
+ field=key))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java
new file mode 100644
index 0000000..019adb3
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ImportsLoader.java
@@ -0,0 +1,748 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import com.google.common.base.Charsets;
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+import org.onap.sdc.toscaparser.api.utils.UrlUtils;
+
+import org.onap.sdc.toscaparser.api.elements.TypeValidation;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+
+import java.io.*;
+import java.net.URL;
+import java.nio.file.Paths;
+import java.util.*;
+
+public class ImportsLoader {
+
+ private static Logger log = LoggerFactory.getLogger(ImportsLoader.class.getName());
+ private static final String FILE = "file";
+ private static final String REPOSITORY = "repository";
+ private static final String NAMESPACE_URI = "namespace_uri";
+ private static final String NAMESPACE_PREFIX = "namespace_prefix";
+ private String IMPORTS_SECTION[] = {FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX};
+
+ private ArrayList<Object> importslist;
+ private String path;
+ private ArrayList<String> typeDefinitionList;
+
+ private LinkedHashMap<String, Object> customDefs;
+ private LinkedHashMap<String, Object> allCustomDefs;
+ private ArrayList<LinkedHashMap<String, Object>> nestedToscaTpls;
+ private LinkedHashMap<String, Object> repositories;
+
+ @SuppressWarnings("unchecked")
+ public ImportsLoader(ArrayList<Object> _importslist,
+ String _path,
+ Object _typeDefinitionList,
+ LinkedHashMap<String, Object> tpl) {
+
+ this.importslist = _importslist;
+ customDefs = new LinkedHashMap<String, Object>();
+ allCustomDefs = new LinkedHashMap<String, Object>();
+ nestedToscaTpls = new ArrayList<LinkedHashMap<String, Object>>();
+ if ((_path == null || _path.isEmpty()) && tpl == null) {
+ //msg = _('Input tosca template is not provided.')
+ //log.warning(msg)
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE184", "ValidationError: Input tosca template is not provided"));
+ }
+
+ this.path = _path;
+ this.repositories = new LinkedHashMap<String, Object>();
+
+ if (tpl != null && tpl.get("repositories") != null) {
+ this.repositories = (LinkedHashMap<String, Object>) tpl.get("repositories");
+ }
+ this.typeDefinitionList = new ArrayList<String>();
+ if (_typeDefinitionList != null) {
+ if (_typeDefinitionList instanceof ArrayList) {
+ this.typeDefinitionList = (ArrayList<String>) _typeDefinitionList;
+ } else {
+ this.typeDefinitionList.add((String) _typeDefinitionList);
+ }
+ }
+ _validateAndLoadImports();
+ }
+
+ public LinkedHashMap<String, Object> getCustomDefs() {
+ return allCustomDefs;
+ }
+
+ public ArrayList<LinkedHashMap<String, Object>> getNestedToscaTpls() {
+ return nestedToscaTpls;
+ }
+
+ @SuppressWarnings({"unchecked", "unused"})
+ public void _validateAndLoadImports() {
+ Set<String> importNames = new HashSet<String>();
+
+ if (importslist == null) {
+ //msg = _('"imports" keyname is defined without including templates.')
+ //log.error(msg)
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE185",
+ "ValidationError: \"imports\" keyname is defined without including templates"));
+ return;
+ }
+
+ for (Object importDef : importslist) {
+ String fullFileName = null;
+ LinkedHashMap<String, Object> customType = null;
+ if (importDef instanceof LinkedHashMap) {
+ for (Map.Entry<String, Object> me : ((LinkedHashMap<String, Object>) importDef).entrySet()) {
+ String importName = me.getKey();
+ Object importUri = me.getValue();
+ if (importNames.contains(importName)) {
+ //msg = (_('Duplicate import name "%s" was found.') % import_name)
+ //log.error(msg)
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE186", String.format(
+ "ValidationError: Duplicate import name \"%s\" was found", importName)));
+ }
+ importNames.add(importName); //???
+
+ // _loadImportTemplate returns 2 objects
+ Object ffnct[] = _loadImportTemplate(importName, importUri);
+ fullFileName = (String) ffnct[0];
+ customType = (LinkedHashMap<String, Object>) ffnct[1];
+ String namespacePrefix = "";
+ if (importUri instanceof LinkedHashMap) {
+ namespacePrefix = (String)
+ ((LinkedHashMap<String, Object>) importUri).get(NAMESPACE_PREFIX);
+ }
+
+ if (customType != null) {
+ TypeValidation tv = new TypeValidation(customType, importDef);
+ _updateCustomDefs(customType, namespacePrefix);
+ }
+ }
+ } else { // old style of imports
+ // _loadImportTemplate returns 2 objects
+ Object ffnct[] = _loadImportTemplate(null, importDef);
+ fullFileName = (String) ffnct[0];
+ customType = (LinkedHashMap<String, Object>) ffnct[1];
+ if (customType != null) {
+ TypeValidation tv = new TypeValidation(customType, importDef);
+ _updateCustomDefs(customType, null);
+ }
+ }
+ _updateNestedToscaTpls(fullFileName, customType);
+
+
+ }
+ }
+
+ /**
+ * This method is used to get consolidated custom definitions by passing custom Types from
+ * each import. The resultant collection is then passed back which contains all import
+ * definitions
+ *
+ * @param customType the custom type
+ * @param namespacePrefix the namespace prefix
+ */
+ @SuppressWarnings("unchecked")
+ private void _updateCustomDefs(LinkedHashMap<String, Object> customType, String namespacePrefix) {
+ LinkedHashMap<String, Object> outerCustomTypes;
+ for (String typeDef : typeDefinitionList) {
+ if (typeDef.equals("imports")) {
+ customDefs.put("imports", customType.get(typeDef));
+ if (allCustomDefs.isEmpty() || allCustomDefs.get("imports") == null) {
+ allCustomDefs.put("imports", customType.get(typeDef));
+ } else if (customType.get(typeDef) != null) {
+ Set<Object> allCustomImports = new HashSet<>((ArrayList<Object>) allCustomDefs.get("imports"));
+ allCustomImports.addAll((ArrayList<Object>) customType.get(typeDef));
+ allCustomDefs.put("imports", new ArrayList<>(allCustomImports));
+ }
+ } else {
+ outerCustomTypes = (LinkedHashMap<String, Object>) customType.get(typeDef);
+ if (outerCustomTypes != null) {
+ if (namespacePrefix != null && !namespacePrefix.isEmpty()) {
+ LinkedHashMap<String, Object> prefixCustomTypes = new LinkedHashMap<String, Object>();
+ for (Map.Entry<String, Object> me : outerCustomTypes.entrySet()) {
+ String typeDefKey = me.getKey();
+ String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey;
+ prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey));
+ }
+ customDefs.putAll(prefixCustomTypes);
+ allCustomDefs.putAll(prefixCustomTypes);
+ } else {
+ customDefs.putAll(outerCustomTypes);
+ allCustomDefs.putAll(outerCustomTypes);
+ }
+ }
+ }
+ }
+ }
+
+ private void _updateNestedToscaTpls(String fullFileName, LinkedHashMap<String, Object> customTpl) {
+ if (fullFileName != null && customTpl != null) {
+ LinkedHashMap<String, Object> tt = new LinkedHashMap<String, Object>();
+ tt.put(fullFileName, customTpl);
+ nestedToscaTpls.add(tt);
+ }
+ }
+
+ private void _validateImportKeys(String importName, LinkedHashMap<String, Object> importUri) {
+ if (importUri.get(FILE) == null) {
+ //log.warning(_('Missing keyname "file" in import "%(name)s".') % {'name': import_name})
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE187", String.format(
+ "MissingRequiredFieldError: Import of template \"%s\" is missing field %s", importName, FILE)));
+ }
+ for (String key : importUri.keySet()) {
+ boolean bFound = false;
+ for (String is : IMPORTS_SECTION) {
+ if (is.equals(key)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ //log.warning(_('Unknown keyname "%(key)s" error in '
+ // 'imported definition "%(def)s".')
+ // % {'key': key, 'def': import_name})
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE188", String.format(
+ "UnknownFieldError: Import of template \"%s\" has unknown fiels %s", importName, key)));
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private Object[] _loadImportTemplate(String importName, Object importUriDef) {
+ /*
+ This method loads the custom type definitions referenced in "imports"
+ section of the TOSCA YAML template by determining whether each import
+ is specified via a file reference (by relative or absolute path) or a
+ URL reference.
+
+ Possibilities:
+ +----------+--------+------------------------------+
+ | template | import | comment |
+ +----------+--------+------------------------------+
+ | file | file | OK |
+ | file | URL | OK |
+ | preparsed| file | file must be a full path |
+ | preparsed| URL | OK |
+ | URL | file | file must be a relative path |
+ | URL | URL | OK |
+ +----------+--------+------------------------------+
+ */
+ Object al[] = new Object[2];
+
+ boolean shortImportNotation = false;
+ String fileName;
+ String repository;
+ if (importUriDef instanceof LinkedHashMap) {
+ _validateImportKeys(importName, (LinkedHashMap<String, Object>) importUriDef);
+ fileName = (String) ((LinkedHashMap<String, Object>) importUriDef).get(FILE);
+ repository = (String) ((LinkedHashMap<String, Object>) importUriDef).get(REPOSITORY);
+ if (repository != null) {
+ if (!repositories.keySet().contains(repository)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE189", String.format(
+ "InvalidPropertyValueError: Repository \"%s\" not found in \"%s\"",
+ repository, repositories.keySet().toString())));
+ }
+ }
+ } else {
+ fileName = (String) importUriDef;
+ repository = null;
+ shortImportNotation = true;
+ }
+
+ if (fileName == null || fileName.isEmpty()) {
+ //msg = (_('A template file name is not provided with import '
+ // 'definition "%(import_name)s".')
+ // % {'import_name': import_name})
+ //log.error(msg)
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE190", String.format(
+ "ValidationError: A template file name is not provided with import definition \"%s\"", importName)));
+ al[0] = al[1] = null;
+ return al;
+ }
+
+ if (UrlUtils.validateUrl(fileName)) {
+ try (InputStream input = new URL(fileName).openStream();) {
+ al[0] = fileName;
+ Yaml yaml = new Yaml();
+ al[1] = yaml.load(input);
+ return al;
+ } catch (IOException e) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE191", String.format(
+ "ImportError: \"%s\" loading YAML import from \"%s\"", e.getClass().getSimpleName(), fileName)));
+ al[0] = al[1] = null;
+ return al;
+ }
+ } else if (repository == null || repository.isEmpty()) {
+ boolean aFile = false;
+ String importTemplate = null;
+ if (path != null && !path.isEmpty()) {
+ if (UrlUtils.validateUrl(path)) {
+ File fp = new File(path);
+ if (fp.isAbsolute()) {
+ String msg = String.format(
+ "ImportError: Absolute file name \"%s\" cannot be used in the URL-based input template \"%s\"",
+ fileName, path);
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE192", msg));
+ al[0] = al[1] = null;
+ return al;
+ }
+ importTemplate = UrlUtils.joinUrl(path, fileName);
+ aFile = false;
+ } else {
+
+ aFile = true;
+ File fp = new File(path);
+ if (fp.isFile()) {
+ File fn = new File(fileName);
+ if (fn.isFile()) {
+ importTemplate = fileName;
+ } else {
+ String fullPath = Paths.get(path).toAbsolutePath().getParent().toString() + File.separator + fileName;
+ File ffp = new File(fullPath);
+ if (ffp.isFile()) {
+ importTemplate = fullPath;
+ } else {
+ String dirPath = Paths.get(path).toAbsolutePath().getParent().toString();
+ String filePath;
+ if (Paths.get(fileName).getParent() != null) {
+ filePath = Paths.get(fileName).getParent().toString();
+ } else {
+ filePath = "";
+ }
+ if (!filePath.isEmpty() && dirPath.endsWith(filePath)) {
+ String sFileName = Paths.get(fileName).getFileName().toString();
+ importTemplate = dirPath + File.separator + sFileName;
+ File fit = new File(importTemplate);
+ if (!fit.isFile()) {
+ //msg = (_('"%(import_template)s" is'
+ // 'not a valid file')
+ // % {'import_template':
+ // import_template})
+ //log.error(msg)
+ String msg = String.format(
+ "ValueError: \"%s\" is not a valid file", importTemplate);
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE193", msg));
+ log.debug("ImportsLoader - _loadImportTemplate - {}", msg);
+ }
+ }
+ }
+ }
+ }
+ }
+ } else { // template is pre-parsed
+ File fn = new File(fileName);
+ if (fn.isAbsolute() && fn.isFile()) {
+ aFile = true;
+ importTemplate = fileName;
+ } else {
+ String msg = String.format(
+ "Relative file name \"%s\" cannot be used in a pre-parsed input template", fileName);
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE194", "ImportError: " + msg));
+ al[0] = al[1] = null;
+ return al;
+ }
+ }
+
+ if (importTemplate == null || importTemplate.isEmpty()) {
+ //log.error(_('Import "%(name)s" is not valid.') %
+ // {'name': import_uri_def})
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE195", String.format(
+ "ImportError: Import \"%s\" is not valid", importUriDef)));
+ al[0] = al[1] = null;
+ return al;
+ }
+
+ // for now, this must be a file
+ if (!aFile) {
+ log.error("ImportsLoader - _loadImportTemplate - Error!! Expected a file. importUriDef = {}, importTemplate = {}", importUriDef, importTemplate);
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE196", String.format(
+ "ImportError: Import \"%s\" is not a file", importName)));
+ al[0] = al[1] = null;
+ return al;
+ }
+ try (BufferedReader br = new BufferedReader(new FileReader(importTemplate));) {
+ al[0] = importTemplate;
+
+ Yaml yaml = new Yaml();
+ al[1] = yaml.load(br);
+ return al;
+ } catch (FileNotFoundException e) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE197", String.format(
+ "ImportError: Failed to load YAML from \"%s\"" + e, importName)));
+ al[0] = al[1] = null;
+ return al;
+ } catch (Exception e) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE198", String.format(
+ "ImportError: Exception from SnakeYAML file = \"%s\"" + e, importName)));
+ al[0] = al[1] = null;
+ return al;
+ }
+ }
+
+ if (shortImportNotation) {
+ //log.error(_('Import "%(name)s" is not valid.') % import_uri_def)
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE199", String.format(
+ "ImportError: Import \"%s\" is not valid", importName)));
+ al[0] = al[1] = null;
+ return al;
+ }
+
+ String fullUrl = "";
+ String repoUrl = "";
+ if (repository != null && !repository.isEmpty()) {
+ if (repositories != null) {
+ for (String repoName : repositories.keySet()) {
+ if (repoName.equals(repository)) {
+ Object repoDef = repositories.get(repoName);
+ if (repoDef instanceof String) {
+ repoUrl = (String) repoDef;
+ } else if (repoDef instanceof LinkedHashMap) {
+ repoUrl = (String) ((LinkedHashMap<String, Object>) repoDef).get("url");
+ }
+ // Remove leading, ending spaces and strip
+ // the last character if "/"
+ repoUrl = repoUrl.trim();
+ if (repoUrl.endsWith("/")) {
+ repoUrl = repoUrl.substring(0, repoUrl.length() - 1);
+ }
+ fullUrl = repoUrl + "/" + fileName;
+ break;
+ }
+ }
+ }
+ if (fullUrl.isEmpty()) {
+ String msg = String.format(
+ "referenced repository \"%s\" in import definition \"%s\" not found",
+ repository, importName);
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE200", "ImportError: " + msg));
+ al[0] = al[1] = null;
+ return al;
+ }
+ }
+ if (UrlUtils.validateUrl(fullUrl)) {
+ try (InputStream input = new URL(fullUrl).openStream();) {
+ al[0] = fullUrl;
+ Yaml yaml = new Yaml();
+ al[1] = yaml.load(input);
+ return al;
+ } catch (IOException e) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE201", String.format(
+ "ImportError: Exception loading YAML import from \"%s\"", fullUrl)));
+ al[0] = al[1] = null;
+ return al;
+ }
+ } else {
+ String msg = String.format(
+ "repository URL \"%s\" in import definition \"%s\" is not valid",
+ repoUrl, importName);
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE202", "ImportError: " + msg));
+ }
+
+ // if we got here something is wrong with the flow...
+ log.error("ImportsLoader - _loadImportTemplate - got to dead end (importName {})", importName);
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE203", String.format(
+ "ImportError: _loadImportTemplate got to dead end (importName %s)\n", importName)));
+ al[0] = al[1] = null;
+ return al;
+ }
+
+ @Override
+ public String toString() {
+ return "ImportsLoader{" +
+ "IMPORTS_SECTION=" + Arrays.toString(IMPORTS_SECTION) +
+ ", importslist=" + importslist +
+ ", path='" + path + '\'' +
+ ", typeDefinitionList=" + typeDefinitionList +
+ ", customDefs=" + customDefs +
+ ", nestedToscaTpls=" + nestedToscaTpls +
+ ", repositories=" + repositories +
+ '}';
+ }
+}
+
+/*python
+
+import logging
+import os
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import InvalidPropertyValueError
+from toscaparser.common.exception import MissingRequiredFieldError
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.common.exception import ValidationError
+from toscaparser.elements.tosca_type_validation import TypeValidation
+from toscaparser.utils.gettextutils import _
+import org.openecomp.sdc.toscaparser.api.utils.urlutils
+import org.openecomp.sdc.toscaparser.api.utils.yamlparser
+
+YAML_LOADER = toscaparser.utils.yamlparser.load_yaml
+log = logging.getLogger("tosca")
+
+
+class ImportsLoader(object):
+
+ IMPORTS_SECTION = (FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX) = \
+ ('file', 'repository', 'namespace_uri',
+ 'namespace_prefix')
+
+ def __init__(self, importslist, path, type_definition_list=None,
+ tpl=None):
+ self.importslist = importslist
+ self.custom_defs = {}
+ if not path and not tpl:
+ msg = _('Input tosca template is not provided.')
+ log.warning(msg)
+ ValidationIssueCollector.appendException(ValidationError(message=msg))
+ self.path = path
+ self.repositories = {}
+ if tpl and tpl.get('repositories'):
+ self.repositories = tpl.get('repositories')
+ self.type_definition_list = []
+ if type_definition_list:
+ if isinstance(type_definition_list, list):
+ self.type_definition_list = type_definition_list
+ else:
+ self.type_definition_list.append(type_definition_list)
+ self._validate_and_load_imports()
+
+ def get_custom_defs(self):
+ return self.custom_defs
+
+ def _validate_and_load_imports(self):
+ imports_names = set()
+
+ if not self.importslist:
+ msg = _('"imports" keyname is defined without including '
+ 'templates.')
+ log.error(msg)
+ ValidationIssueCollector.appendException(ValidationError(message=msg))
+ return
+
+ for import_def in self.importslist:
+ if isinstance(import_def, dict):
+ for import_name, import_uri in import_def.items():
+ if import_name in imports_names:
+ msg = (_('Duplicate import name "%s" was found.') %
+ import_name)
+ log.error(msg)
+ ValidationIssueCollector.appendException(
+ ValidationError(message=msg))
+ imports_names.add(import_name)
+
+ custom_type = self._load_import_template(import_name,
+ import_uri)
+ namespace_prefix = None
+ if isinstance(import_uri, dict):
+ namespace_prefix = import_uri.get(
+ self.NAMESPACE_PREFIX)
+ if custom_type:
+ TypeValidation(custom_type, import_def)
+ self._update_custom_def(custom_type, namespace_prefix)
+ else: # old style of imports
+ custom_type = self._load_import_template(None,
+ import_def)
+ if custom_type:
+ TypeValidation(
+ custom_type, import_def)
+ self._update_custom_def(custom_type, None)
+
+ def _update_custom_def(self, custom_type, namespace_prefix):
+ outer_custom_types = {}
+ for type_def in self.type_definition_list:
+ outer_custom_types = custom_type.get(type_def)
+ if outer_custom_types:
+ if type_def == "imports":
+ self.custom_defs.update({'imports': outer_custom_types})
+ else:
+ if namespace_prefix:
+ prefix_custom_types = {}
+ for type_def_key in outer_custom_types.keys():
+ namespace_prefix_to_key = (namespace_prefix +
+ "." + type_def_key)
+ prefix_custom_types[namespace_prefix_to_key] = \
+ outer_custom_types[type_def_key]
+ self.custom_defs.update(prefix_custom_types)
+ else:
+ self.custom_defs.update(outer_custom_types)
+
+ def _validate_import_keys(self, import_name, import_uri_def):
+ if self.FILE not in import_uri_def.keys():
+ log.warning(_('Missing keyname "file" in import "%(name)s".')
+ % {'name': import_name})
+ ValidationIssueCollector.appendException(
+ MissingRequiredFieldError(
+ what='Import of template "%s"' % import_name,
+ required=self.FILE))
+ for key in import_uri_def.keys():
+ if key not in self.IMPORTS_SECTION:
+ log.warning(_('Unknown keyname "%(key)s" error in '
+ 'imported definition "%(def)s".')
+ % {'key': key, 'def': import_name})
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(
+ what='Import of template "%s"' % import_name,
+ field=key))
+
+ def _load_import_template(self, import_name, import_uri_def):
+ """Handle custom types defined in imported template files
+
+ This method loads the custom type definitions referenced in "imports"
+ section of the TOSCA YAML template by determining whether each import
+ is specified via a file reference (by relative or absolute path) or a
+ URL reference.
+
+ Possibilities:
+ +----------+--------+------------------------------+
+ | template | import | comment |
+ +----------+--------+------------------------------+
+ | file | file | OK |
+ | file | URL | OK |
+ | preparsed| file | file must be a full path |
+ | preparsed| URL | OK |
+ | URL | file | file must be a relative path |
+ | URL | URL | OK |
+ +----------+--------+------------------------------+
+ """
+ short_import_notation = False
+ if isinstance(import_uri_def, dict):
+ self._validate_import_keys(import_name, import_uri_def)
+ file_name = import_uri_def.get(self.FILE)
+ repository = import_uri_def.get(self.REPOSITORY)
+ repos = self.repositories.keys()
+ if repository is not None:
+ if repository not in repos:
+ ValidationIssueCollector.appendException(
+ InvalidPropertyValueError(
+ what=_('Repository is not found in "%s"') % repos))
+ else:
+ file_name = import_uri_def
+ repository = None
+ short_import_notation = True
+
+ if not file_name:
+ msg = (_('A template file name is not provided with import '
+ 'definition "%(import_name)s".')
+ % {'import_name': import_name})
+ log.error(msg)
+ ValidationIssueCollector.appendException(ValidationError(message=msg))
+ return
+
+ if toscaparser.utils.urlutils.UrlUtils.validate_url(file_name):
+ return YAML_LOADER(file_name, False)
+ elif not repository:
+ import_template = None
+ if self.path:
+ if toscaparser.utils.urlutils.UrlUtils.validate_url(self.path):
+ if os.path.isabs(file_name):
+ msg = (_('Absolute file name "%(name)s" cannot be '
+ 'used in a URL-based input template '
+ '"%(template)s".')
+ % {'name': file_name, 'template': self.path})
+ log.error(msg)
+ ValidationIssueCollector.appendException(ImportError(msg))
+ return
+ import_template = toscaparser.utils.urlutils.UrlUtils.\
+ join_url(self.path, file_name)
+ a_file = False
+ else:
+ a_file = True
+ main_a_file = os.path.isfile(self.path)
+
+ if main_a_file:
+ if os.path.isfile(file_name):
+ import_template = file_name
+ else:
+ full_path = os.path.join(
+ os.path.dirname(os.path.abspath(self.path)),
+ file_name)
+ if os.path.isfile(full_path):
+ import_template = full_path
+ else:
+ file_path = file_name.rpartition("/")
+ dir_path = os.path.dirname(os.path.abspath(
+ self.path))
+ if file_path[0] != '' and dir_path.endswith(
+ file_path[0]):
+ import_template = dir_path + "/" +\
+ file_path[2]
+ if not os.path.isfile(import_template):
+ msg = (_('"%(import_template)s" is'
+ 'not a valid file')
+ % {'import_template':
+ import_template})
+ log.error(msg)
+ ValidationIssueCollector.appendException
+ (ValueError(msg))
+ else: # template is pre-parsed
+ if os.path.isabs(file_name) and os.path.isfile(file_name):
+ a_file = True
+ import_template = file_name
+ else:
+ msg = (_('Relative file name "%(name)s" cannot be used '
+ 'in a pre-parsed input template.')
+ % {'name': file_name})
+ log.error(msg)
+ ValidationIssueCollector.appendException(ImportError(msg))
+ return
+
+ if not import_template:
+ log.error(_('Import "%(name)s" is not valid.') %
+ {'name': import_uri_def})
+ ValidationIssueCollector.appendException(
+ ImportError(_('Import "%s" is not valid.') %
+ import_uri_def))
+ return
+ return YAML_LOADER(import_template, a_file)
+
+ if short_import_notation:
+ log.error(_('Import "%(name)s" is not valid.') % import_uri_def)
+ ValidationIssueCollector.appendException(
+ ImportError(_('Import "%s" is not valid.') % import_uri_def))
+ return
+
+ full_url = ""
+ if repository:
+ if self.repositories:
+ for repo_name, repo_def in self.repositories.items():
+ if repo_name == repository:
+ # Remove leading, ending spaces and strip
+ # the last character if "/"
+ repo_url = ((repo_def['url']).strip()).rstrip("//")
+ full_url = repo_url + "/" + file_name
+
+ if not full_url:
+ msg = (_('referenced repository "%(n_uri)s" in import '
+ 'definition "%(tpl)s" not found.')
+ % {'n_uri': repository, 'tpl': import_name})
+ log.error(msg)
+ ValidationIssueCollector.appendException(ImportError(msg))
+ return
+
+ if toscaparser.utils.urlutils.UrlUtils.validate_url(full_url):
+ return YAML_LOADER(full_url, False)
+ else:
+ msg = (_('repository url "%(n_uri)s" is not valid in import '
+ 'definition "%(tpl)s".')
+ % {'n_uri': repo_url, 'tpl': import_name})
+ log.error(msg)
+ ValidationIssueCollector.appendException(ImportError(msg))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java
new file mode 100644
index 0000000..4fabe38
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/NodeTemplate.java
@@ -0,0 +1,824 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.elements.EntityType;
+import org.onap.sdc.toscaparser.api.elements.InterfacesDef;
+import org.onap.sdc.toscaparser.api.elements.Metadata;
+import org.onap.sdc.toscaparser.api.elements.NodeType;
+import org.onap.sdc.toscaparser.api.elements.RelationshipType;
+import org.onap.sdc.toscaparser.api.utils.CopyUtils;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.onap.sdc.toscaparser.api.elements.EntityType.TOSCA_DEF;
+
+public class NodeTemplate extends EntityTemplate {
+
+ private LinkedHashMap<String, Object> templates;
+ private LinkedHashMap<String, Object> customDef;
+ private ArrayList<RelationshipTemplate> availableRelTpls;
+ private LinkedHashMap<String, Object> availableRelTypes;
+ private LinkedHashMap<NodeTemplate, RelationshipType> related;
+ private ArrayList<RelationshipTemplate> relationshipTpl;
+ private LinkedHashMap<RelationshipType, NodeTemplate> _relationships;
+ private SubstitutionMappings subMappingToscaTemplate;
+ private TopologyTemplate originComponentTemplate;
+ private Metadata metadata;
+
+ private static final String METADATA = "metadata";
+
+ public NodeTemplate(String name,
+ LinkedHashMap<String, Object> ntnodeTemplates,
+ LinkedHashMap<String, Object> ntcustomDef,
+ ArrayList<RelationshipTemplate> ntavailableRelTpls,
+ LinkedHashMap<String, Object> ntavailableRelTypes) {
+ this(name, ntnodeTemplates, ntcustomDef, ntavailableRelTpls,
+ ntavailableRelTypes, null);
+ }
+
+ @SuppressWarnings("unchecked")
+ public NodeTemplate(String name,
+ LinkedHashMap<String, Object> ntnodeTemplates,
+ LinkedHashMap<String, Object> ntcustomDef,
+ ArrayList<RelationshipTemplate> ntavailableRelTpls,
+ LinkedHashMap<String, Object> ntavailableRelTypes,
+ NodeTemplate parentNodeTemplate) {
+
+ super(name, (LinkedHashMap<String, Object>) ntnodeTemplates.get(name),
+ "node_type", ntcustomDef, parentNodeTemplate);
+
+ templates = ntnodeTemplates;
+ _validateFields((LinkedHashMap<String, Object>) templates.get(name));
+ customDef = ntcustomDef;
+ related = new LinkedHashMap<NodeTemplate, RelationshipType>();
+ relationshipTpl = new ArrayList<RelationshipTemplate>();
+ availableRelTpls = ntavailableRelTpls;
+ availableRelTypes = ntavailableRelTypes;
+ _relationships = new LinkedHashMap<RelationshipType, NodeTemplate>();
+ subMappingToscaTemplate = null;
+ metadata = _metaData();
+ }
+
+ @SuppressWarnings("unchecked")
+ public LinkedHashMap<RelationshipType, NodeTemplate> getRelationships() {
+ if (_relationships.isEmpty()) {
+ List<RequirementAssignment> requires = getRequirements().getAll();
+ if (requires != null && requires instanceof List) {
+ for (RequirementAssignment r : requires) {
+ LinkedHashMap<RelationshipType, NodeTemplate> explicit = _getExplicitRelationship(r);
+ if (explicit != null) {
+ // _relationships.putAll(explicit)...
+ for (Map.Entry<RelationshipType, NodeTemplate> ee : explicit.entrySet()) {
+ _relationships.put(ee.getKey(), ee.getValue());
+ }
+ }
+ }
+ }
+ }
+ return _relationships;
+ }
+
+ @SuppressWarnings("unchecked")
+ private LinkedHashMap<RelationshipType, NodeTemplate> _getExplicitRelationship(RequirementAssignment req) {
+ // Handle explicit relationship
+
+ // For example,
+ // - req:
+ // node: DBMS
+ // relationship: tosca.relationships.HostedOn
+
+ LinkedHashMap<RelationshipType, NodeTemplate> explicitRelation = new LinkedHashMap<RelationshipType, NodeTemplate>();
+ String node = req.getNodeTemplateName();
+
+ if (node != null && !node.isEmpty()) {
+ //msg = _('Lookup by TOSCA types is not supported. '
+ // 'Requirement for "%s" can not be full-filled.') % self.name
+ boolean bFound = false;
+ for (String k : EntityType.TOSCA_DEF.keySet()) {
+ if (k.equals(node)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (bFound || customDef.get(node) != null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE205", String.format(
+ "NotImplementedError: Lookup by TOSCA types is not supported. Requirement for \"%s\" can not be full-filled",
+ getName())));
+ return null;
+ }
+ if (templates.get(node) == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE206", String.format(
+ "KeyError: Node template \"%s\" was not found", node)));
+ return null;
+ }
+ NodeTemplate relatedTpl = new NodeTemplate(node, templates, customDef, null, null);
+ Object relationship = req.getRelationship();
+ String relationshipString = null;
+// // here relationship can be a string or a LHM with 'type':<relationship>
+
+ // check if its type has relationship defined
+ if (relationship == null) {
+ ArrayList<Object> parentReqs = ((NodeType) typeDefinition).getAllRequirements();
+ if (parentReqs == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE207", "ValidationError: parent_req is null"));
+ } else {
+// for(String key: req.keySet()) {
+// boolean bFoundRel = false;
+ for (Object rdo : parentReqs) {
+ LinkedHashMap<String, Object> reqDict = (LinkedHashMap<String, Object>) rdo;
+ LinkedHashMap<String, Object> relDict = (LinkedHashMap<String, Object>) reqDict.get(req.getName());
+ if (relDict != null) {
+ relationship = relDict.get("relationship");
+ //BUG-python??? need to break twice?
+// bFoundRel = true;
+ break;
+ }
+ }
+// if(bFoundRel) {
+// break;
+// }
+// }
+ }
+ }
+
+ if (relationship != null) {
+ // here relationship can be a string or a LHM with 'type':<relationship>
+ if (relationship instanceof String) {
+ relationshipString = (String) relationship;
+ } else if (relationship instanceof LinkedHashMap) {
+ relationshipString = (String) ((LinkedHashMap<String, Object>) relationship).get("type");
+ }
+
+ boolean foundRelationshipTpl = false;
+ // apply available relationship templates if found
+ if (availableRelTpls != null) {
+ for (RelationshipTemplate tpl : availableRelTpls) {
+ if (tpl.getName().equals(relationshipString)) {
+ RelationshipType rtype = new RelationshipType(tpl.getType(), null, customDef);
+ explicitRelation.put(rtype, relatedTpl);
+ tpl.setTarget(relatedTpl);
+ tpl.setSource(this);
+ relationshipTpl.add(tpl);
+ foundRelationshipTpl = true;
+ }
+ }
+ }
+ // create relationship template object.
+ String relPrfx = EntityType.RELATIONSHIP_PREFIX;
+ if (!foundRelationshipTpl) {
+ if (relationship instanceof LinkedHashMap) {
+ relationshipString = (String) ((LinkedHashMap<String, Object>) relationship).get("type");
+ if (relationshipString != null) {
+ if (availableRelTypes != null && !availableRelTypes.isEmpty() &&
+ availableRelTypes.get(relationshipString) != null) {
+ ;
+ } else if (!(relationshipString).startsWith(relPrfx)) {
+ relationshipString = relPrfx + relationshipString;
+ }
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE208", String.format(
+ "MissingRequiredFieldError: \"relationship\" used in template \"%s\" is missing required field \"type\"",
+ relatedTpl.getName())));
+ }
+ }
+ for (RelationshipType rtype : ((NodeType) typeDefinition).getRelationship().keySet()) {
+ if (rtype.getType().equals(relationshipString)) {
+ explicitRelation.put(rtype, relatedTpl);
+ relatedTpl._addRelationshipTemplate(req, rtype.getType(), this);
+ } else if (availableRelTypes != null && !availableRelTypes.isEmpty()) {
+ LinkedHashMap<String, Object> relTypeDef = (LinkedHashMap<String, Object>) availableRelTypes.get(relationshipString);
+ if (relTypeDef != null) {
+ String superType = (String) relTypeDef.get("derived_from");
+ if (superType != null) {
+ if (!superType.startsWith(relPrfx)) {
+ superType = relPrfx + superType;
+ }
+ if (rtype.getType().equals(superType)) {
+ explicitRelation.put(rtype, relatedTpl);
+ relatedTpl._addRelationshipTemplate(req, rtype.getType(), this);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ return explicitRelation;
+ }
+
+ @SuppressWarnings("unchecked")
+ private void _addRelationshipTemplate(RequirementAssignment requirement, String rtype, NodeTemplate source) {
+ LinkedHashMap<String, Object> req = new LinkedHashMap<>();
+ req.put("relationship", CopyUtils.copyLhmOrAl(requirement.getRelationship()));
+ req.put("type", rtype);
+ RelationshipTemplate tpl = new RelationshipTemplate(req, rtype, customDef, this, source, getParentNodeTemplate());
+ relationshipTpl.add(tpl);
+ }
+
+ public ArrayList<RelationshipTemplate> getRelationshipTemplate() {
+ return relationshipTpl;
+ }
+
+ void _addNext(NodeTemplate nodetpl, RelationshipType relationship) {
+ related.put(nodetpl, relationship);
+ }
+
+ public ArrayList<NodeTemplate> getRelatedNodes() {
+ if (related.isEmpty()) {
+ for (Map.Entry<RelationshipType, NodeType> me : ((NodeType) typeDefinition).getRelationship().entrySet()) {
+ RelationshipType relation = me.getKey();
+ NodeType node = me.getValue();
+ for (String tpl : templates.keySet()) {
+ if (tpl.equals(node.getType())) {
+ //BUG.. python has
+ // self.related[NodeTemplate(tpl)] = relation
+ // but NodeTemplate doesn't have a constructor with just name...
+ //????
+ related.put(new NodeTemplate(tpl, null, null, null, null), relation);
+ }
+ }
+ }
+ }
+ return new ArrayList<NodeTemplate>(related.keySet());
+ }
+
+ public void validate(/*tosca_tpl=none is not used...*/) {
+ _validateCapabilities();
+ _validateRequirements();
+ _validateProperties(entityTpl, (NodeType) typeDefinition);
+ _validateInterfaces();
+ for (Property prop : getPropertiesObjects()) {
+ prop.validate();
+ }
+ }
+
+ public Object getPropertyValueFromTemplatesByName(String propertyName) {
+ LinkedHashMap<String, Object> nodeObject = (LinkedHashMap<String, Object>) templates.get(name);
+ if (nodeObject != null) {
+ LinkedHashMap<String, Object> properties = (LinkedHashMap<String, Object>) nodeObject.get(PROPERTIES);
+ if (properties != null) {
+ return properties.get(propertyName);
+ }
+ }
+ return null;
+ }
+
+ private Metadata _metaData() {
+ if (entityTpl.get(METADATA) != null) {
+ return new Metadata((Map<String, Object>) entityTpl.get(METADATA));
+ } else {
+ return null;
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void _validateRequirements() {
+ ArrayList<Object> typeRequires = ((NodeType) typeDefinition).getAllRequirements();
+ ArrayList<String> allowedReqs = new ArrayList<>();
+ allowedReqs.add("template");
+ if (typeRequires != null) {
+ for (Object to : typeRequires) {
+ LinkedHashMap<String, Object> treq = (LinkedHashMap<String, Object>) to;
+ for (Map.Entry<String, Object> me : treq.entrySet()) {
+ String key = me.getKey();
+ Object value = me.getValue();
+ allowedReqs.add(key);
+ if (value instanceof LinkedHashMap) {
+ allowedReqs.addAll(((LinkedHashMap<String, Object>) value).keySet());
+ }
+ }
+
+ }
+ }
+
+ ArrayList<Object> requires = (ArrayList<Object>) ((NodeType) typeDefinition).getValue(REQUIREMENTS, entityTpl, false);
+ if (requires != null) {
+ if (!(requires instanceof ArrayList)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE209", String.format(
+ "TypeMismatchError: \"requirements\" of template \"%s\" are not of type \"list\"", name)));
+ } else {
+ for (Object ro : requires) {
+ LinkedHashMap<String, Object> req = (LinkedHashMap<String, Object>) ro;
+ for (Map.Entry<String, Object> me : req.entrySet()) {
+ String rl = me.getKey();
+ Object vo = me.getValue();
+ if (vo instanceof LinkedHashMap) {
+ LinkedHashMap<String, Object> value = (LinkedHashMap<String, Object>) vo;
+ _validateRequirementsKeys(value);
+ _validateRequirementsProperties(value);
+ allowedReqs.add(rl);
+ }
+ }
+ _commonValidateField(req, allowedReqs, "requirements");
+ }
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void _validateRequirementsProperties(LinkedHashMap<String, Object> reqs) {
+ // TO-DO(anyone): Only occurrences property of the requirements is
+ // validated here. Validation of other requirement properties are being
+ // validated in different files. Better to keep all the requirements
+ // properties validation here.
+ for (Map.Entry<String, Object> me : reqs.entrySet()) {
+ if (me.getKey().equals("occurrences")) {
+ ArrayList<Object> val = (ArrayList<Object>) me.getValue();
+ _validateOccurrences(val);
+ }
+
+ }
+ }
+
+ private void _validateOccurrences(ArrayList<Object> occurrences) {
+ DataEntity.validateDatatype("list", occurrences, null, null, null);
+ for (Object val : occurrences) {
+ DataEntity.validateDatatype("Integer", val, null, null, null);
+ }
+ if (occurrences.size() != 2 ||
+ !(0 <= (int) occurrences.get(0) && (int) occurrences.get(0) <= (int) occurrences.get(1)) ||
+ (int) occurrences.get(1) == 0) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE210", String.format(
+ "InvalidPropertyValueError: property has invalid value %s", occurrences.toString())));
+ }
+ }
+
+ private void _validateRequirementsKeys(LinkedHashMap<String, Object> reqs) {
+ for (String key : reqs.keySet()) {
+ boolean bFound = false;
+ for (int i = 0; i < REQUIREMENTS_SECTION.length; i++) {
+ if (key.equals(REQUIREMENTS_SECTION[i])) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE211", String.format(
+ "UnknownFieldError: \"requirements\" of template \"%s\" contains unknown field \"%s\"", name, key)));
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void _validateInterfaces() {
+ LinkedHashMap<String, Object> ifaces = (LinkedHashMap<String, Object>)
+ ((NodeType) typeDefinition).getValue(INTERFACES, entityTpl, false);
+ if (ifaces != null) {
+ for (Map.Entry<String, Object> me : ifaces.entrySet()) {
+ String iname = me.getKey();
+ LinkedHashMap<String, Object> value = (LinkedHashMap<String, Object>) me.getValue();
+ if (iname.equals(InterfacesDef.LIFECYCLE) || iname.equals(InterfacesDef.LIFECYCLE_SHORTNAME)) {
+ // maybe we should convert [] to arraylist???
+ ArrayList<String> inlo = new ArrayList<>();
+ for (int i = 0; i < InterfacesDef.INTERFACE_NODE_LIFECYCLE_OPERATIONS.length; i++) {
+ inlo.add(InterfacesDef.INTERFACE_NODE_LIFECYCLE_OPERATIONS[i]);
+ }
+ _commonValidateField(value, inlo, "interfaces");
+ } else if (iname.equals(InterfacesDef.CONFIGURE) || iname.equals(InterfacesDef.CONFIGURE_SHORTNAME)) {
+ // maybe we should convert [] to arraylist???
+ ArrayList<String> irco = new ArrayList<>();
+ for (int i = 0; i < InterfacesDef.INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS.length; i++) {
+ irco.add(InterfacesDef.INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS[i]);
+ }
+ _commonValidateField(value, irco, "interfaces");
+ } else if (((NodeType) typeDefinition).getInterfaces().keySet().contains(iname)) {
+ _commonValidateField(value, _collectCustomIfaceOperations(iname), "interfaces");
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE212", String.format(
+ "UnknownFieldError: \"interfaces\" of template \"%s\" contains unknown field %s", name, iname)));
+ }
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private ArrayList<String> _collectCustomIfaceOperations(String iname) {
+ ArrayList<String> allowedOperations = new ArrayList<>();
+ LinkedHashMap<String, Object> nodetypeIfaceDef = (LinkedHashMap<String, Object>) ((NodeType)
+ typeDefinition).getInterfaces().get(iname);
+ allowedOperations.addAll(nodetypeIfaceDef.keySet());
+ String ifaceType = (String) nodetypeIfaceDef.get("type");
+ if (ifaceType != null) {
+ LinkedHashMap<String, Object> ifaceTypeDef = null;
+ if (((NodeType) typeDefinition).customDef != null) {
+ ifaceTypeDef = (LinkedHashMap<String, Object>) ((NodeType) typeDefinition).customDef.get(ifaceType);
+ }
+ if (ifaceTypeDef == null) {
+ ifaceTypeDef = (LinkedHashMap<String, Object>) EntityType.TOSCA_DEF.get(ifaceType);
+ }
+ allowedOperations.addAll(ifaceTypeDef.keySet());
+ }
+ // maybe we should convert [] to arraylist???
+ ArrayList<String> idrw = new ArrayList<>();
+ for (int i = 0; i < InterfacesDef.INTERFACE_DEF_RESERVED_WORDS.length; i++) {
+ idrw.add(InterfacesDef.INTERFACE_DEF_RESERVED_WORDS[i]);
+ }
+ allowedOperations.removeAll(idrw);
+ return allowedOperations;
+ }
+
+ /**
+ * Get all interface details for given node template.<br>
+ *
+ * @return Map that contains the list of all interfaces and their definitions.
+ * If none found, an empty map will be returned.
+ */
+ public Map<String, List<InterfacesDef>> getAllInterfaceDetailsForNodeType() {
+ Map<String, List<InterfacesDef>> interfaceMap = new LinkedHashMap<>();
+
+ // Get custom interface details
+ Map<String, Object> customInterfacesDetails = ((NodeType) typeDefinition).getInterfaces();
+ // Get native interface details from tosca definitions
+ Object nativeInterfaceDetails = TOSCA_DEF.get(InterfacesDef.LIFECYCLE);
+ Map<String, Object> allInterfaceDetails = new LinkedHashMap<>();
+ allInterfaceDetails.putAll(customInterfacesDetails);
+ if (nativeInterfaceDetails != null) {
+ allInterfaceDetails.put(InterfacesDef.LIFECYCLE, nativeInterfaceDetails);
+ }
+
+ // Process all interface details from combined collection and return an interface Map with
+ // interface names and their definitions
+ for (Map.Entry<String, Object> me : allInterfaceDetails.entrySet()) {
+ ArrayList<InterfacesDef> interfaces = new ArrayList<>();
+ String interfaceType = me.getKey();
+ Map<String, Object> interfaceValue = (Map<String, Object>) me.getValue();
+ if (interfaceValue.containsKey("type")) {
+ interfaceType = (String) interfaceValue.get("type");
+ }
+
+ for (Map.Entry<String, Object> ve : interfaceValue.entrySet()) {
+ // Filter type as this is a reserved key and not an operation
+ if (!ve.getKey().equals("type")) {
+ InterfacesDef iface = new InterfacesDef(typeDefinition, interfaceType, this, ve.getKey(), ve.getValue());
+ interfaces.add(iface);
+ }
+ }
+ interfaceMap.put(interfaceType, interfaces);
+ }
+ return interfaceMap;
+ }
+
+ private void _validateFields(LinkedHashMap<String, Object> nodetemplate) {
+ for (String ntname : nodetemplate.keySet()) {
+ boolean bFound = false;
+ for (int i = 0; i < SECTIONS.length; i++) {
+ if (ntname.equals(SECTIONS[i])) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ for (int i = 0; i < SPECIAL_SECTIONS.length; i++) {
+ if (ntname.equals(SPECIAL_SECTIONS[i])) {
+ bFound = true;
+ break;
+ }
+ }
+
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE213", String.format(
+ "UnknownFieldError: Node template \"%s\" has unknown field \"%s\"", name, ntname)));
+ }
+ }
+ }
+
+ // getter/setter
+
+ // multilevel nesting
+ public SubstitutionMappings getSubMappingToscaTemplate() {
+ return subMappingToscaTemplate;
+ }
+
+ public void setSubMappingToscaTemplate(SubstitutionMappings sm) {
+ subMappingToscaTemplate = sm;
+ }
+
+ public Metadata getMetaData() {
+ return metadata;
+ }
+
+ public void setMetaData(Metadata metadata) {
+ this.metadata = metadata;
+ }
+
+ @Override
+ public String toString() {
+ return getName();
+ }
+
+ public TopologyTemplate getOriginComponentTemplate() {
+ return originComponentTemplate;
+ }
+
+ public void setOriginComponentTemplate(TopologyTemplate originComponentTemplate) {
+ this.originComponentTemplate = originComponentTemplate;
+ }
+
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import InvalidPropertyValueError
+from toscaparser.common.exception import MissingRequiredFieldError
+from toscaparser.common.exception import TypeMismatchError
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.common.exception import ValidationError
+from toscaparser.dataentity import DataEntity
+from toscaparser.elements.interfaces import CONFIGURE
+from toscaparser.elements.interfaces import CONFIGURE_SHORTNAME
+from toscaparser.elements.interfaces import INTERFACE_DEF_RESERVED_WORDS
+from toscaparser.elements.interfaces import InterfacesDef
+from toscaparser.elements.interfaces import LIFECYCLE
+from toscaparser.elements.interfaces import LIFECYCLE_SHORTNAME
+from toscaparser.elements.relationshiptype import RelationshipType
+from toscaparser.entity_template import EntityTemplate
+from toscaparser.relationship_template import RelationshipTemplate
+from toscaparser.utils.gettextutils import _
+
+log = logging.getLogger('tosca')
+
+
+class NodeTemplate(EntityTemplate):
+ '''Node template from a Tosca profile.'''
+ def __init__(self, name, node_templates, custom_def=None,
+ available_rel_tpls=None, available_rel_types=None):
+ super(NodeTemplate, self).__init__(name, node_templates[name],
+ 'node_type',
+ custom_def)
+ self.templates = node_templates
+ self._validate_fields(node_templates[name])
+ self.custom_def = custom_def
+ self.related = {}
+ self.relationship_tpl = []
+ self.available_rel_tpls = available_rel_tpls
+ self.available_rel_types = available_rel_types
+ self._relationships = {}
+ self.sub_mapping_tosca_template = None
+
+ @property
+ def relationships(self):
+ if not self._relationships:
+ requires = self.requirements
+ if requires and isinstance(requires, list):
+ for r in requires:
+ for r1, value in r.items():
+ explicit = self._get_explicit_relationship(r, value)
+ if explicit:
+ for key, value in explicit.items():
+ self._relationships[key] = value
+ return self._relationships
+
+ def _get_explicit_relationship(self, req, value):
+ """Handle explicit relationship
+
+ For example,
+ - req:
+ node: DBMS
+ relationship: tosca.relationships.HostedOn
+ """
+ explicit_relation = {}
+ node = value.get('node') if isinstance(value, dict) else value
+
+ if node:
+ # TO-DO(spzala) implement look up once Glance meta data is available
+ # to find a matching TOSCA node using the TOSCA types
+ msg = _('Lookup by TOSCA types is not supported. '
+ 'Requirement for "%s" can not be full-filled.') % self.name
+ if (node in list(self.type_definition.TOSCA_DEF.keys())
+ or node in self.custom_def):
+ ValidationIssueCollector.appendException(NotImplementedError(msg))
+ return
+
+ if node not in self.templates:
+ ValidationIssueCollector.appendException(
+ KeyError(_('Node template "%s" was not found.') % node))
+ return
+
+ related_tpl = NodeTemplate(node, self.templates, self.custom_def)
+ relationship = value.get('relationship') \
+ if isinstance(value, dict) else None
+ # check if it's type has relationship defined
+ if not relationship:
+ parent_reqs = self.type_definition.get_all_requirements()
+ if parent_reqs is None:
+ ValidationIssueCollector.appendException(
+ ValidationError(message='parent_req is ' +
+ str(parent_reqs)))
+ else:
+ for key in req.keys():
+ for req_dict in parent_reqs:
+ if key in req_dict.keys():
+ relationship = (req_dict.get(key).
+ get('relationship'))
+ break
+ if relationship:
+ found_relationship_tpl = False
+ # apply available relationship templates if found
+ if self.available_rel_tpls:
+ for tpl in self.available_rel_tpls:
+ if tpl.name == relationship:
+ rtype = RelationshipType(tpl.type, None,
+ self.custom_def)
+ explicit_relation[rtype] = related_tpl
+ tpl.target = related_tpl
+ tpl.source = self
+ self.relationship_tpl.append(tpl)
+ found_relationship_tpl = True
+ # create relationship template object.
+ rel_prfx = self.type_definition.RELATIONSHIP_PREFIX
+ if not found_relationship_tpl:
+ if isinstance(relationship, dict):
+ relationship = relationship.get('type')
+ if relationship:
+ if self.available_rel_types and \
+ relationship in self.available_rel_types.keys():
+ pass
+ elif not relationship.startswith(rel_prfx):
+ relationship = rel_prfx + relationship
+ else:
+ ValidationIssueCollector.appendException(
+ MissingRequiredFieldError(
+ what=_('"relationship" used in template '
+ '"%s"') % related_tpl.name,
+ required=self.TYPE))
+ for rtype in self.type_definition.relationship.keys():
+ if rtype.type == relationship:
+ explicit_relation[rtype] = related_tpl
+ related_tpl._add_relationship_template(req,
+ rtype.type,
+ self)
+ elif self.available_rel_types:
+ if relationship in self.available_rel_types.keys():
+ rel_type_def = self.available_rel_types.\
+ get(relationship)
+ if 'derived_from' in rel_type_def:
+ super_type = \
+ rel_type_def.get('derived_from')
+ if not super_type.startswith(rel_prfx):
+ super_type = rel_prfx + super_type
+ if rtype.type == super_type:
+ explicit_relation[rtype] = related_tpl
+ related_tpl.\
+ _add_relationship_template(
+ req, rtype.type, self)
+ return explicit_relation
+
+ def _add_relationship_template(self, requirement, rtype, source):
+ req = requirement.copy()
+ req['type'] = rtype
+ tpl = RelationshipTemplate(req, rtype, self.custom_def, self, source)
+ self.relationship_tpl.append(tpl)
+
+ def get_relationship_template(self):
+ return self.relationship_tpl
+
+ def _add_next(self, nodetpl, relationship):
+ self.related[nodetpl] = relationship
+
+ @property
+ def related_nodes(self):
+ if not self.related:
+ for relation, node in self.type_definition.relationship.items():
+ for tpl in self.templates:
+ if tpl == node.type:
+ self.related[NodeTemplate(tpl)] = relation
+ return self.related.keys()
+
+ def validate(self, tosca_tpl=None):
+ self._validate_capabilities()
+ self._validate_requirements()
+ self._validate_properties(self.entity_tpl, self.type_definition)
+ self._validate_interfaces()
+ for prop in self.get_properties_objects():
+ prop.validate()
+
+ def _validate_requirements(self):
+ type_requires = self.type_definition.get_all_requirements()
+ allowed_reqs = ["template"]
+ if type_requires:
+ for treq in type_requires:
+ for key, value in treq.items():
+ allowed_reqs.append(key)
+ if isinstance(value, dict):
+ for key in value:
+ allowed_reqs.append(key)
+
+ requires = self.type_definition.get_value(self.REQUIREMENTS,
+ self.entity_tpl)
+ if requires:
+ if not isinstance(requires, list):
+ ValidationIssueCollector.appendException(
+ TypeMismatchError(
+ what='"requirements" of template "%s"' % self.name,
+ type='list'))
+ else:
+ for req in requires:
+ for r1, value in req.items():
+ if isinstance(value, dict):
+ self._validate_requirements_keys(value)
+ self._validate_requirements_properties(value)
+ allowed_reqs.append(r1)
+ self._common_validate_field(req, allowed_reqs,
+ 'requirements')
+
+ def _validate_requirements_properties(self, requirements):
+ # TO-DO(anyone): Only occurrences property of the requirements is
+ # validated here. Validation of other requirement properties are being
+ # validated in different files. Better to keep all the requirements
+ # properties validation here.
+ for key, value in requirements.items():
+ if key == 'occurrences':
+ self._validate_occurrences(value)
+ break
+
+ def _validate_occurrences(self, occurrences):
+ DataEntity.validate_datatype('list', occurrences)
+ for value in occurrences:
+ DataEntity.validate_datatype('integer', value)
+ if len(occurrences) != 2 or not (0 <= occurrences[0] <= occurrences[1]) \
+ or occurrences[1] == 0:
+ ValidationIssueCollector.appendException(
+ InvalidPropertyValueError(what=(occurrences)))
+
+ def _validate_requirements_keys(self, requirement):
+ for key in requirement.keys():
+ if key not in self.REQUIREMENTS_SECTION:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(
+ what='"requirements" of template "%s"' % self.name,
+ field=key))
+
+ def _validate_interfaces(self):
+ ifaces = self.type_definition.get_value(self.INTERFACES,
+ self.entity_tpl)
+ if ifaces:
+ for name, value in ifaces.items():
+ if name in (LIFECYCLE, LIFECYCLE_SHORTNAME):
+ self._common_validate_field(
+ value, InterfacesDef.
+ interfaces_node_lifecycle_operations,
+ 'interfaces')
+ elif name in (CONFIGURE, CONFIGURE_SHORTNAME):
+ self._common_validate_field(
+ value, InterfacesDef.
+ interfaces_relationship_configure_operations,
+ 'interfaces')
+ elif name in self.type_definition.interfaces.keys():
+ self._common_validate_field(
+ value,
+ self._collect_custom_iface_operations(name),
+ 'interfaces')
+ else:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(
+ what='"interfaces" of template "%s"' %
+ self.name, field=name))
+
+ def _collect_custom_iface_operations(self, name):
+ allowed_operations = []
+ nodetype_iface_def = self.type_definition.interfaces[name]
+ allowed_operations.extend(nodetype_iface_def.keys())
+ if 'type' in nodetype_iface_def:
+ iface_type = nodetype_iface_def['type']
+ if iface_type in self.type_definition.custom_def:
+ iface_type_def = self.type_definition.custom_def[iface_type]
+ else:
+ iface_type_def = self.type_definition.TOSCA_DEF[iface_type]
+ allowed_operations.extend(iface_type_def.keys())
+ allowed_operations = [op for op in allowed_operations if
+ op not in INTERFACE_DEF_RESERVED_WORDS]
+ return allowed_operations
+
+ def _validate_fields(self, nodetemplate):
+ for name in nodetemplate.keys():
+ if name not in self.SECTIONS and name not in self.SPECIAL_SECTIONS:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(what='Node template "%s"' % self.name,
+ field=name))*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Policy.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Policy.java
new file mode 100644
index 0000000..ca8ac55
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Policy.java
@@ -0,0 +1,232 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.onap.sdc.toscaparser.api.elements.Metadata;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+import org.onap.sdc.toscaparser.api.utils.ValidateUtils;
+
+public class Policy extends EntityTemplate {
+
+
+ static final String TYPE = "type";
+ static final String METADATA = "metadata";
+ static final String DESCRIPTION = "description";
+ static final String PROPERTIES = "properties";
+ static final String TARGETS = "targets";
+ private static final String TRIGGERS = "triggers";
+ private static final String SECTIONS[] = {
+ TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS};
+
+ Metadata metaDataObject;
+ LinkedHashMap<String, Object> metaData = null;
+ ArrayList<Object> targetsList; // *** a list of NodeTemplate OR a list of Group ***
+ String targetsType;
+ ArrayList<Object> triggers;
+ LinkedHashMap<String, Object> properties;
+
+ public Policy(String _name,
+ LinkedHashMap<String, Object> _policy,
+ ArrayList<Object> targetObjects,
+ String _targetsType,
+ LinkedHashMap<String, Object> _customDef) {
+ this(_name, _policy, targetObjects, _targetsType, _customDef, null);
+ }
+
+ public Policy(String _name,
+ LinkedHashMap<String, Object> _policy,
+// ArrayList<NodeTemplate> targetObjects,
+ ArrayList<Object> targetObjects,
+ String _targetsType,
+ LinkedHashMap<String, Object> _customDef, NodeTemplate parentNodeTemplate) {
+ super(_name, _policy, "policy_type", _customDef, parentNodeTemplate);
+
+ if (_policy.get(METADATA) != null) {
+ metaData = (LinkedHashMap<String, Object>) _policy.get(METADATA);
+ ValidateUtils.validateMap(metaData);
+ metaDataObject = new Metadata(metaData);
+ }
+
+ targetsList = targetObjects;
+ targetsType = _targetsType;
+ triggers = _triggers((LinkedHashMap<String, Object>) _policy.get(TRIGGERS));
+ properties = null;
+ if (_policy.get("properties") != null) {
+ properties = (LinkedHashMap<String, Object>) _policy.get("properties");
+ }
+ _validateKeys();
+ }
+
+ public ArrayList<String> getTargets() {
+ return (ArrayList<String>) entityTpl.get("targets");
+ }
+
+ public ArrayList<String> getDescription() {
+ return (ArrayList<String>) entityTpl.get("description");
+ }
+
+ public ArrayList<String> getmetadata() {
+ return (ArrayList<String>) entityTpl.get("metadata");
+ }
+
+ public String getTargetsType() {
+ return targetsType;
+ }
+
+ public Metadata getMetaDataObj() {
+ return metaDataObject;
+ }
+
+ public LinkedHashMap<String, Object> getMetaData() {
+ return metaData;
+ }
+
+ // public ArrayList<NodeTemplate> getTargetsList() {
+ public ArrayList<Object> getTargetsList() {
+ return targetsList;
+ }
+
+ // entityTemplate already has a different getProperties...
+ // this is to access the local properties variable
+ public LinkedHashMap<String, Object> getPolicyProperties() {
+ return properties;
+ }
+
+ private ArrayList<Object> _triggers(LinkedHashMap<String, Object> triggers) {
+ ArrayList<Object> triggerObjs = new ArrayList<>();
+ if (triggers != null) {
+ for (Map.Entry<String, Object> me : triggers.entrySet()) {
+ String tname = me.getKey();
+ LinkedHashMap<String, Object> ttriggerTpl =
+ (LinkedHashMap<String, Object>) me.getValue();
+ Triggers triggersObj = new Triggers(tname, ttriggerTpl);
+ triggerObjs.add(triggersObj);
+ }
+ }
+ return triggerObjs;
+ }
+
+ private void _validateKeys() {
+ for (String key : entityTpl.keySet()) {
+ boolean bFound = false;
+ for (int i = 0; i < SECTIONS.length; i++) {
+ if (key.equals(SECTIONS[i])) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE219", String.format(
+ "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"",
+ name, key)));
+ }
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "Policy{" +
+ "metaData=" + metaData +
+ ", targetsList=" + targetsList +
+ ", targetsType='" + targetsType + '\'' +
+ ", triggers=" + triggers +
+ ", properties=" + properties +
+ '}';
+ }
+
+ public int compareTo(Policy other) {
+ if (this.equals(other))
+ return 0;
+ return this.getName().compareTo(other.getName()) == 0 ? this.getType().compareTo(other.getType()) : this.getName().compareTo(other.getName());
+ }
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.entity_template import EntityTemplate
+from toscaparser.triggers import Triggers
+from toscaparser.utils import validateutils
+
+
+SECTIONS = (TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS) = \
+ ('type', 'metadata', 'description',
+ 'properties', 'targets', 'triggers')
+
+log = logging.getLogger('tosca')
+
+
+class Policy(EntityTemplate):
+ '''Policies defined in Topology template.'''
+ def __init__(self, name, policy, targets, targets_type, custom_def=None):
+ super(Policy, self).__init__(name,
+ policy,
+ 'policy_type',
+ custom_def)
+ self.meta_data = None
+ if self.METADATA in policy:
+ self.meta_data = policy.get(self.METADATA)
+ validateutils.validate_map(self.meta_data)
+ self.targets_list = targets
+ self.targets_type = targets_type
+ self.triggers = self._triggers(policy.get(TRIGGERS))
+ self._validate_keys()
+
+ @property
+ def targets(self):
+ return self.entity_tpl.get('targets')
+
+ @property
+ def description(self):
+ return self.entity_tpl.get('description')
+
+ @property
+ def metadata(self):
+ return self.entity_tpl.get('metadata')
+
+ def get_targets_type(self):
+ return self.targets_type
+
+ def get_targets_list(self):
+ return self.targets_list
+
+ def _triggers(self, triggers):
+ triggerObjs = []
+ if triggers:
+ for name, trigger_tpl in triggers.items():
+ triggersObj = Triggers(name, trigger_tpl)
+ triggerObjs.append(triggersObj)
+ return triggerObjs
+
+ def _validate_keys(self):
+ for key in self.entity_tpl.keys():
+ if key not in SECTIONS:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(what='Policy "%s"' % self.name,
+ field=key))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Property.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Property.java
new file mode 100644
index 0000000..e20bd2f
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Property.java
@@ -0,0 +1,401 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import com.google.common.collect.Lists;
+import org.onap.sdc.toscaparser.api.elements.constraints.Constraint;
+import org.onap.sdc.toscaparser.api.elements.constraints.Schema;
+import org.onap.sdc.toscaparser.api.functions.Function;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.stream.Collectors;
+
+public class Property {
+ // TOSCA built-in Property type
+ private static final Logger LOGGER = LoggerFactory.getLogger(Property.class.getName());
+
+ private static final String TYPE = "type";
+ private static final String REQUIRED = "required";
+ private static final String DESCRIPTION = "description";
+ private static final String DEFAULT = "default";
+ private static final String CONSTRAINTS = "constraints";
+ private static String entrySchema = "entry_schema";
+ private static String dataType = "datatypes";
+
+ private static final String[] PROPERTY_KEYS = {
+ TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS};
+
+ private static final String ENTRYTYPE = "type";
+ private static final String ENTRYPROPERTIES = "properties";
+ private static final String PATH_DELIMITER = "#";
+ private static final String[] ENTRY_SCHEMA_KEYS = {
+ ENTRYTYPE, ENTRYPROPERTIES};
+
+ private String name;
+ private Object value;
+ private Schema schema;
+ private LinkedHashMap<String, Object> customDef;
+
+ public Property(Map.Entry<String, Object> propertyEntry) {
+ name = propertyEntry.getKey();
+ value = propertyEntry.getValue();
+ }
+
+ public Property(String propname,
+ Object propvalue,
+ LinkedHashMap<String, Object> propschemaDict,
+ LinkedHashMap<String, Object> propcustomDef) {
+
+ name = propname;
+ value = propvalue;
+ customDef = propcustomDef;
+ schema = new Schema(propname, propschemaDict);
+ }
+
+ public String getType() {
+ return schema.getType();
+ }
+
+ public boolean isRequired() {
+ return schema.isRequired();
+ }
+
+ public String getDescription() {
+ return schema.getDescription();
+ }
+
+ public Object getDefault() {
+ return schema.getDefault();
+ }
+
+ public ArrayList<Constraint> getConstraints() {
+ return schema.getConstraints();
+ }
+
+ public LinkedHashMap<String, Object> getEntrySchema() {
+ return schema.getEntrySchema();
+ }
+
+
+ public String getName() {
+ return name;
+ }
+
+ public Object getValue() {
+ return value;
+ }
+
+ // setter
+ public Object setValue(Object vob) {
+ value = vob;
+ return value;
+ }
+
+ public void validate() {
+ // Validate if not a reference property
+ if (!Function.isFunction(value)) {
+ if (getType().equals(Schema.STRING)) {
+ value = value.toString();
+ }
+ value = DataEntity.validateDatatype(getType(), value,
+ getEntrySchema(),
+ customDef,
+ name);
+ validateConstraints();
+ }
+ }
+
+ private void validateConstraints() {
+ if (getConstraints() != null) {
+ for (Constraint constraint : getConstraints()) {
+ constraint.validate(value);
+ }
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "Property{"
+ + "name='" + name + '\''
+ + ", value=" + value
+ + ", schema=" + schema
+ + ", customDef=" + customDef
+ + '}';
+ }
+
+ /**
+ * Retrieves property value as list of strings if<br>
+ * - the value is simple<br>
+ * - the value is list of simple values<br>
+ * - the provided path refers to a simple property inside a data type<br>
+ *
+ * @param propertyPath valid name of property for search.<br>
+ * If a name refers to a simple field inside a datatype, the property name should be defined with # delimiter.<br>
+ * @return List of property values. If not found, empty list will be returned.<br>
+ * If property value is a list either of simple fields or of simple fields inside a datatype, all values from the list should be returned
+ */
+ public List<String> getLeafPropertyValue(String propertyPath) {
+ List<String> propertyValueList = Collections.emptyList();
+
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("getLeafPropertyValue=> A new request: propertyPath: {}, value: {}", propertyPath, getValue());
+ }
+ if (propertyPath == null || getValue() == null
+ //if entry_schema disappears, it is datatype,
+ // otherwise it is map of simple types - should be ignored
+ || isValueMapOfSimpleTypes()) {
+ LOGGER.error("It is a wrong request - ignoring! propertyPath: {}, value: {}", propertyPath, getValue());
+ return propertyValueList;
+ }
+ String[] path = propertyPath.split(PATH_DELIMITER);
+
+ if (Schema.isRequestedTypeSimple(getPropertyTypeByPath(path))) {
+ //the internal property type in the path is either simple or list of simple types
+ if (isValueInsideDataType()) {
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("The requested is an internal simple property inside of a data type");
+ }
+ //requested value is an internal simple property inside of a data type
+ propertyValueList = getSimplePropertyValueForComplexType(path);
+ } else {
+ if (LOGGER.isDebugEnabled()) {
+ LOGGER.debug("The requested property has simple type or list of simple types");
+ }
+ //the requested property is simple type or list of simple types
+ propertyValueList = getSimplePropertyValueForSimpleType();
+ }
+ }
+ return propertyValueList;
+ }
+
+ private boolean isValueMapOfSimpleTypes() {
+ if (getValue() instanceof Map && getEntrySchema() != null) {
+ LOGGER.warn("This property value is a map of simple types");
+ return true;
+ }
+ return false;
+ }
+
+ private boolean isValueInsideDataType() {
+ //value is either a list of values for data type
+ //or data type
+ return (Schema.LIST.equals(getType()) && isDataTypeInEntrySchema())
+ || (getEntrySchema() == null && getType().contains(dataType));
+ }
+
+ private Object getSimpleValueFromComplexObject(Object current, String[] path) {
+ if (current == null) {
+ return null;
+ }
+ int index = 0;
+
+ if (path.length > index) {
+ for (int i = index; i < path.length; i++) {
+ if (current instanceof Map) {
+ current = ((Map<String, Object>) current).get(path[i]);
+ } else if (current instanceof List) {
+ current = ((List) current).get(0);
+ i--;
+ } else {
+ return null;
+ }
+ }
+ }
+ if (current != null) {
+ return current;
+ }
+ return null;
+ }
+
+ private List<String> getSimplePropertyValueForSimpleType() {
+ if (getValue() instanceof List || getValue() instanceof Map) {
+ return getSimplePropertyValueForComplexType(null);
+ }
+ return Lists.newArrayList(String.valueOf(value));
+ }
+
+ private List<String> getSimplePropertyValueForComplexType(String[] path) {
+ if (getValue() instanceof List) {
+ return ((List<Object>) getValue()).stream()
+ .map(v -> {
+ if (path != null) {
+ return getSimpleValueFromComplexObject(v, path);
+ } else {
+ return v;
+ }
+ })
+ //it might be null when get_input can't be resolved
+ // e.g.:
+ // - get_input has two parameters: 1. list and 2. index in this list
+ //and list has no value
+ // - neither value no default is defined for get_input
+ .filter(Objects::nonNull)
+ .map(String::valueOf)
+ .collect(Collectors.toList());
+ }
+ //it is data type
+ List<String> valueList = Lists.newArrayList();
+ String valueString = String.valueOf(getSimpleValueFromComplexObject(getValue(), path));
+ if (Objects.nonNull(valueString)) {
+ valueList.add(valueString);
+ }
+ return valueList;
+ }
+
+ private String getPropertyTypeByPath(String[] path) {
+ String propertyType = calculatePropertyType();
+
+ if (path.length > 0 && !path[0].isEmpty()) {
+ return getInternalPropertyType(propertyType, path, 0);
+ }
+ return propertyType;
+ }
+
+ private String calculatePropertyType() {
+ String propertyType = getType();
+ if (Schema.LIST.equals(propertyType)) {
+ //if it is list, return entry schema type
+ return (String) getEntrySchema().get(ENTRYTYPE);
+ }
+ return propertyType;
+ }
+
+ private String calculatePropertyType(LinkedHashMap<String, Object> property) {
+ String type = (String) property.get(TYPE);
+ if (Schema.LIST.equals(type)) {
+ //it might be a data type
+ return getEntrySchemaType(property);
+ }
+ return type;
+ }
+
+ private String getInternalPropertyType(String dataTypeName, String[] path, int index) {
+ if (path.length > index) {
+ LinkedHashMap<String, Object> complexProperty = (LinkedHashMap<String, Object>) customDef.get(dataTypeName);
+ if (complexProperty != null) {
+ LinkedHashMap<String, Object> dataTypeProperties = (LinkedHashMap<String, Object>) complexProperty.get(ENTRYPROPERTIES);
+ return getPropertyTypeFromCustomDefDeeply(path, index, dataTypeProperties);
+ }
+ }
+ //stop searching - seems as wrong flow: the path is finished but the value is not found yet
+ return null;
+ }
+
+ private String getEntrySchemaType(LinkedHashMap<String, Object> property) {
+ LinkedHashMap<String, Object> entrySchema = (LinkedHashMap<String, Object>) property.get(Property.entrySchema);
+ if (entrySchema != null) {
+ return (String) entrySchema.get(TYPE);
+ }
+ return null;
+ }
+
+ private String getPropertyTypeFromCustomDefDeeply(String[] path, int index, LinkedHashMap<String, Object> properties) {
+ if (properties != null) {
+ LinkedHashMap<String, Object> foundProperty = (LinkedHashMap<String, Object>) (properties).get(path[index]);
+ if (foundProperty != null) {
+ String propertyType = calculatePropertyType(foundProperty);
+ if (propertyType == null || index == path.length - 1) {
+ return propertyType;
+ }
+ return getInternalPropertyType(propertyType, path, index + 1);
+ }
+ }
+ return null;
+ }
+
+ private boolean isDataTypeInEntrySchema() {
+ String entrySchemaType = (String) getEntrySchema().get(ENTRYTYPE);
+ return entrySchemaType != null && entrySchemaType.contains(dataType);
+ }
+
+
+}
+
+/*python
+
+class Property(object):
+ '''TOSCA built-in Property type.'''
+
+ PROPERTY_KEYS = (
+ TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS
+ ) = (
+ 'type', 'required', 'description', 'default', 'constraints'
+ )
+
+ ENTRY_SCHEMA_KEYS = (
+ ENTRYTYPE, ENTRYPROPERTIES
+ ) = (
+ 'type', 'properties'
+ )
+
+ def __init__(self, property_name, value, schema_dict, custom_def=None):
+ self.name = property_name
+ self.value = value
+ self.custom_def = custom_def
+ self.schema = Schema(property_name, schema_dict)
+
+ @property
+ def type(self):
+ return self.schema.type
+
+ @property
+ def required(self):
+ return self.schema.required
+
+ @property
+ def description(self):
+ return self.schema.description
+
+ @property
+ def default(self):
+ return self.schema.default
+
+ @property
+ def constraints(self):
+ return self.schema.constraints
+
+ @property
+ def entry_schema(self):
+ return self.schema.entry_schema
+
+ def validate(self):
+ '''Validate if not a reference property.'''
+ if not is_function(self.value):
+ if self.type == Schema.STRING:
+ self.value = str(self.value)
+ self.value = DataEntity.validate_datatype(self.type, self.value,
+ self.entry_schema,
+ self.custom_def,
+ self.name)
+ self._validate_constraints()
+
+ def _validate_constraints(self):
+ if self.constraints:
+ for constraint in self.constraints:
+ constraint.validate(self.value)
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java
new file mode 100644
index 0000000..d1a1383
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RelationshipTemplate.java
@@ -0,0 +1,227 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.onap.sdc.toscaparser.api.elements.PropertyDef;
+import org.onap.sdc.toscaparser.api.elements.StatefulEntityType;
+import org.onap.sdc.toscaparser.api.elements.EntityType;
+
+public class RelationshipTemplate extends EntityTemplate {
+
+ private static final String DERIVED_FROM = "derived_from";
+ private static final String PROPERTIES = "properties";
+ private static final String REQUIREMENTS = "requirements";
+ private static final String INTERFACES = "interfaces";
+ private static final String CAPABILITIES = "capabilities";
+ private static final String TYPE = "type";
+ @SuppressWarnings("unused")
+ private static final String SECTIONS[] = {
+ DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES, CAPABILITIES, TYPE};
+
+ private String name;
+ private NodeTemplate target;
+ private NodeTemplate source;
+ private ArrayList<Property> _properties;
+
+ public RelationshipTemplate(LinkedHashMap<String, Object> rtrelationshipTemplate,
+ String rtname,
+ LinkedHashMap<String, Object> rtcustomDef,
+ NodeTemplate rttarget,
+ NodeTemplate rtsource) {
+ this(rtrelationshipTemplate, rtname, rtcustomDef, rttarget, rtsource, null);
+ }
+
+ public RelationshipTemplate(LinkedHashMap<String, Object> rtrelationshipTemplate,
+ String rtname,
+ LinkedHashMap<String, Object> rtcustomDef,
+ NodeTemplate rttarget,
+ NodeTemplate rtsource, NodeTemplate parentNodeTemplate) {
+ super(rtname, rtrelationshipTemplate, "relationship_type", rtcustomDef, parentNodeTemplate);
+
+ name = rtname;
+ target = rttarget;
+ source = rtsource;
+ _properties = null;
+ }
+
+ public ArrayList<Property> getPropertiesObjects() {
+ // Return properties objects for this template
+ if (_properties == null) {
+ _properties = _createRelationshipProperties();
+ }
+ return _properties;
+ }
+
+ @SuppressWarnings({"unchecked", "unused"})
+ public ArrayList<Property> _createRelationshipProperties() {
+ ArrayList<Property> props = new ArrayList<Property>();
+ LinkedHashMap<String, Object> properties = new LinkedHashMap<String, Object>();
+ LinkedHashMap<String, Object> relationship = (LinkedHashMap<String, Object>) entityTpl.get("relationship");
+
+ if (relationship == null) {
+ for (Object val : entityTpl.values()) {
+ if (val instanceof LinkedHashMap) {
+ relationship = (LinkedHashMap<String, Object>) ((LinkedHashMap<String, Object>) val).get("relationship");
+ break;
+ }
+ }
+ }
+
+ if (relationship != null) {
+ properties = (LinkedHashMap<String, Object>) ((EntityType) typeDefinition).getValue(PROPERTIES, relationship, false);
+ }
+ if (properties == null) {
+ properties = new LinkedHashMap<String, Object>();
+ }
+ if (properties == null) {
+ properties = (LinkedHashMap<String, Object>) entityTpl.get(PROPERTIES);
+ }
+ if (properties == null) {
+ properties = new LinkedHashMap<String, Object>();
+ }
+
+ if (properties != null) {
+ for (Map.Entry<String, Object> me : properties.entrySet()) {
+ String pname = me.getKey();
+ Object pvalue = me.getValue();
+ LinkedHashMap<String, PropertyDef> propsDef = ((StatefulEntityType) typeDefinition).getPropertiesDef();
+ if (propsDef != null && propsDef.get(pname) != null) {
+ if (properties.get(pname) != null) {
+ pvalue = properties.get(name);
+ }
+ PropertyDef pd = (PropertyDef) propsDef.get(pname);
+ Property prop = new Property(pname, pvalue, pd.getSchema(), customDef);
+ props.add(prop);
+ }
+ }
+ }
+ ArrayList<PropertyDef> pds = ((StatefulEntityType) typeDefinition).getPropertiesDefObjects();
+ for (PropertyDef p : pds) {
+ if (p.getDefault() != null && properties.get(p.getName()) == null) {
+ Property prop = new Property(p.getName(), (LinkedHashMap<String, Object>) p.getDefault(), p.getSchema(), customDef);
+ props.add(prop);
+ }
+ }
+ return props;
+ }
+
+ public void validate() {
+ _validateProperties(entityTpl, (StatefulEntityType) typeDefinition);
+ }
+
+ // getters/setters
+ public NodeTemplate getTarget() {
+ return target;
+ }
+
+ public NodeTemplate getSource() {
+ return source;
+ }
+
+ public void setSource(NodeTemplate nt) {
+ source = nt;
+ }
+
+ public void setTarget(NodeTemplate nt) {
+ target = nt;
+ }
+
+ @Override
+ public String toString() {
+ return "RelationshipTemplate{" +
+ "name='" + name + '\'' +
+ ", target=" + target.getName() +
+ ", source=" + source.getName() +
+ ", _properties=" + _properties +
+ '}';
+ }
+
+}
+
+/*python
+
+from toscaparser.entity_template import EntityTemplate
+from toscaparser.properties import Property
+
+SECTIONS = (DERIVED_FROM, PROPERTIES, REQUIREMENTS,
+ INTERFACES, CAPABILITIES, TYPE) = \
+ ('derived_from', 'properties', 'requirements', 'interfaces',
+ 'capabilities', 'type')
+
+log = logging.getLogger('tosca')
+
+
+class RelationshipTemplate(EntityTemplate):
+ '''Relationship template.'''
+ def __init__(self, relationship_template, name, custom_def=None,
+ target=None, source=None):
+ super(RelationshipTemplate, self).__init__(name,
+ relationship_template,
+ 'relationship_type',
+ custom_def)
+ self.name = name.lower()
+ self.target = target
+ self.source = source
+
+ def get_properties_objects(self):
+ '''Return properties objects for this template.'''
+ if self._properties is None:
+ self._properties = self._create_relationship_properties()
+ return self._properties
+
+ def _create_relationship_properties(self):
+ props = []
+ properties = {}
+ relationship = self.entity_tpl.get('relationship')
+
+ if not relationship:
+ for value in self.entity_tpl.values():
+ if isinstance(value, dict):
+ relationship = value.get('relationship')
+ break
+
+ if relationship:
+ properties = self.type_definition.get_value(self.PROPERTIES,
+ relationship) or {}
+ if not properties:
+ properties = self.entity_tpl.get(self.PROPERTIES) or {}
+
+ if properties:
+ for name, value in properties.items():
+ props_def = self.type_definition.get_properties_def()
+ if props_def and name in props_def:
+ if name in properties.keys():
+ value = properties.get(name)
+ prop = Property(name, value,
+ props_def[name].schema, self.custom_def)
+ props.append(prop)
+ for p in self.type_definition.get_properties_def_objects():
+ if p.default is not None and p.name not in properties.keys():
+ prop = Property(p.name, p.default, p.schema, self.custom_def)
+ props.append(prop)
+ return props
+
+ def validate(self):
+ self._validate_properties(self.entity_tpl, self.type_definition)*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Repository.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Repository.java
new file mode 100644
index 0000000..ee5e5bc
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Repository.java
@@ -0,0 +1,137 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+import org.onap.sdc.toscaparser.api.utils.UrlUtils;
+
+import java.util.LinkedHashMap;
+
+public class Repository {
+
+ private static final String DESCRIPTION = "description";
+ private static final String URL = "url";
+ private static final String CREDENTIAL = "credential";
+ private static final String SECTIONS[] = {DESCRIPTION, URL, CREDENTIAL};
+
+ private String name;
+ private Object reposit;
+ private String url;
+
+ @SuppressWarnings("unchecked")
+ public Repository(String repName, Object repValue) {
+ name = repName;
+ reposit = repValue;
+ if (reposit instanceof LinkedHashMap) {
+ url = (String) ((LinkedHashMap<String, Object>) reposit).get("url");
+ if (url == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE229", String.format(
+ "MissingRequiredFieldError: Repository \"%s\" is missing required field \"url\"",
+ name)));
+ }
+ }
+ loadAndValidate(name, reposit);
+ }
+
+ @SuppressWarnings("unchecked")
+ private void loadAndValidate(String val, Object repositDef) {
+ String keyname = val;
+ if (repositDef instanceof LinkedHashMap) {
+ for (String key : ((LinkedHashMap<String, Object>) reposit).keySet()) {
+ boolean bFound = false;
+ for (String sect : SECTIONS) {
+ if (key.equals(sect)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE230", String.format(
+ "UnknownFieldError: repositories \"%s\" contains unknown field \"%s\"",
+ keyname, key)));
+ }
+ }
+
+ String repositUrl = (String) ((LinkedHashMap<String, Object>) repositDef).get("url");
+ if (repositUrl != null) {
+ boolean urlVal = UrlUtils.validateUrl(repositUrl);
+ if (!urlVal) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE231", String.format(
+ "URLException: repsositories \"%s\" Invalid Url", keyname)));
+ }
+ }
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "Repository{" +
+ "name='" + name + '\'' +
+ ", reposit=" + reposit +
+ ", url='" + url + '\'' +
+ '}';
+ }
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import MissingRequiredFieldError
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.common.exception import URLException
+from toscaparser.utils.gettextutils import _
+import org.openecomp.sdc.toscaparser.api.utils.urlutils
+
+SECTIONS = (DESCRIPTION, URL, CREDENTIAL) = \
+ ('description', 'url', 'credential')
+
+
+class Repository(object):
+ def __init__(self, repositories, values):
+ self.name = repositories
+ self.reposit = values
+ if isinstance(self.reposit, dict):
+ if 'url' not in self.reposit.keys():
+ ValidationIssueCollector.appendException(
+ MissingRequiredFieldError(what=_('Repository "%s"')
+ % self.name, required='url'))
+ self.url = self.reposit['url']
+ self.load_and_validate(self.name, self.reposit)
+
+ def load_and_validate(self, val, reposit_def):
+ self.keyname = val
+ if isinstance(reposit_def, dict):
+ for key in reposit_def.keys():
+ if key not in SECTIONS:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(what=_('repositories "%s"')
+ % self.keyname, field=key))
+
+ if URL in reposit_def.keys():
+ reposit_url = reposit_def.get(URL)
+ url_val = toscaparser.utils.urlutils.UrlUtils.\
+ validate_url(reposit_url)
+ if url_val is not True:
+ ValidationIssueCollector.appendException(
+ URLException(what=_('repsositories "%s" Invalid Url')
+ % self.keyname))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java
new file mode 100644
index 0000000..227b2a9
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignment.java
@@ -0,0 +1,111 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+
+public class RequirementAssignment {
+
+ private String name;
+ private String nodeName;
+ private String capabilityName;
+ private Object relationship;
+
+ public RequirementAssignment(String reqName, String nodeName) {
+ this.name = reqName;
+ this.nodeName = nodeName;
+ }
+
+ public RequirementAssignment(String reqName, String nodeName, String capabilityName) {
+ this.name = reqName;
+ this.nodeName = nodeName;
+ this.capabilityName = capabilityName;
+ }
+
+ public RequirementAssignment(String reqName, String nodeName, String capabilityName, Object relationship) {
+ this.name = reqName;
+ this.nodeName = nodeName;
+ this.capabilityName = capabilityName;
+ this.relationship = relationship;
+ }
+
+ /**
+ * Get the name for requirement assignment.
+ *
+ * @return the name for requirement assignment.
+ */
+ public String getName() {
+ return name;
+ }
+
+ /**
+ * Set the name for requirement
+ *
+ * @param name - the name for requirement to set
+ */
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ /**
+ * Get the node name for requirement assignment.
+ *
+ * @return the node name for requirement
+ */
+ public String getNodeTemplateName() {
+ return nodeName;
+ }
+
+ /**
+ * Set the node name for requirement
+ *
+ * @param nodeName - the node name for requirement to set
+ */
+ public void setNodeTemplateName(String nodeName) {
+ this.nodeName = nodeName;
+ }
+
+ /**
+ * Get the capability name for requirement assignment.
+ *
+ * @return the capability name for requirement
+ */
+ public String getCapabilityName() {
+ return capabilityName;
+ }
+
+ /**
+ * Set the capability name for requirement assignment.
+ *
+ * @param capabilityName - the capability name for requirement to set
+ */
+ public void setCapabilityName(String capabilityName) {
+ this.capabilityName = capabilityName;
+ }
+
+ /**
+ * Get the relationship object for requirement
+ *
+ * @return the relationship object for requirement
+ */
+ public Object getRelationship() {
+ return relationship;
+ }
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java
new file mode 100644
index 0000000..2ba6230
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/RequirementAssignments.java
@@ -0,0 +1,59 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class RequirementAssignments {
+
+ private List<RequirementAssignment> requirementAssignmentList;
+
+ public RequirementAssignments(List<RequirementAssignment> requirementAssignments) {
+ this.requirementAssignmentList = requirementAssignments != null ? new ArrayList<>(requirementAssignments) : new ArrayList<>();
+ }
+
+ /**
+ * Get all requirement assignments for Node Template.<br>
+ * This object can be either the original one, holding all requirement assignments for this node template,or a filtered one, holding a filtered subset.<br>
+ *
+ * @return list of requirement assignments for the node template. <br>
+ * If there are no requirement assignments, empty list is returned.
+ */
+ public List<RequirementAssignment> getAll() {
+ return new ArrayList<>(requirementAssignmentList);
+ }
+
+ /**
+ * Filter requirement assignments by requirement name.
+ *
+ * @param reqName - The name of requirement
+ * @return RequirementAssignments object, containing requirement assignments of this type.<br>
+ * If no such found, filtering will result in an empty collection.
+ */
+ public RequirementAssignments getRequirementsByName(String reqName) {
+ List<RequirementAssignment> requirementAssignments = requirementAssignmentList.stream()
+ .filter(req -> req.getName().equals(reqName)).collect(Collectors.toList());
+
+ return new RequirementAssignments(requirementAssignments);
+ }
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java
new file mode 100644
index 0000000..a622a9a
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/SubstitutionMappings.java
@@ -0,0 +1,539 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.elements.NodeType;
+import org.onap.sdc.toscaparser.api.elements.PropertyDef;
+import org.onap.sdc.toscaparser.api.parameters.Input;
+import org.onap.sdc.toscaparser.api.parameters.Output;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+
+
+public class SubstitutionMappings {
+ // SubstitutionMappings class declaration
+
+ // SubstitutionMappings exports the topology template as an
+ // implementation of a Node type.
+
+ private static final String NODE_TYPE = "node_type";
+ private static final String REQUIREMENTS = "requirements";
+ private static final String CAPABILITIES = "capabilities";
+
+ private static final String SECTIONS[] = {NODE_TYPE, REQUIREMENTS, CAPABILITIES};
+
+ private static final String OPTIONAL_OUTPUTS[] = {"tosca_id", "tosca_name", "state"};
+
+ private LinkedHashMap<String, Object> subMappingDef;
+ private ArrayList<NodeTemplate> nodetemplates;
+ private ArrayList<Input> inputs;
+ private ArrayList<Output> outputs;
+ private ArrayList<Group> groups;
+ private NodeTemplate subMappedNodeTemplate;
+ private LinkedHashMap<String, Object> customDefs;
+ private LinkedHashMap<String, Object> _capabilities;
+ private LinkedHashMap<String, Object> _requirements;
+
+ public SubstitutionMappings(LinkedHashMap<String, Object> smsubMappingDef,
+ ArrayList<NodeTemplate> smnodetemplates,
+ ArrayList<Input> sminputs,
+ ArrayList<Output> smoutputs,
+ ArrayList<Group> smgroups,
+ NodeTemplate smsubMappedNodeTemplate,
+ LinkedHashMap<String, Object> smcustomDefs) {
+
+ subMappingDef = smsubMappingDef;
+ nodetemplates = smnodetemplates;
+ inputs = sminputs != null ? sminputs : new ArrayList<Input>();
+ outputs = smoutputs != null ? smoutputs : new ArrayList<Output>();
+ groups = smgroups != null ? smgroups : new ArrayList<Group>();
+ subMappedNodeTemplate = smsubMappedNodeTemplate;
+ customDefs = smcustomDefs != null ? smcustomDefs : new LinkedHashMap<String, Object>();
+ _validate();
+
+ _capabilities = null;
+ _requirements = null;
+ }
+
+ public String getType() {
+ if (subMappingDef != null) {
+ return (String) subMappingDef.get(NODE_TYPE);
+ }
+ return null;
+ }
+
+ public ArrayList<NodeTemplate> getNodeTemplates() {
+ return nodetemplates;
+ }
+
+ /*
+ @classmethod
+ def get_node_type(cls, sub_mapping_def):
+ if isinstance(sub_mapping_def, dict):
+ return sub_mapping_def.get(cls.NODE_TYPE)
+ */
+
+ public static String stGetNodeType(LinkedHashMap<String, Object> _subMappingDef) {
+ if (_subMappingDef instanceof LinkedHashMap) {
+ return (String) _subMappingDef.get(NODE_TYPE);
+ }
+ return null;
+ }
+
+ public String getNodeType() {
+ return (String) subMappingDef.get(NODE_TYPE);
+ }
+
+ public ArrayList<Input> getInputs() {
+ return inputs;
+ }
+
+ public ArrayList<Group> getGroups() {
+ return groups;
+ }
+
+ public LinkedHashMap<String, Object> getCapabilities() {
+ return (LinkedHashMap<String, Object>) subMappingDef.get(CAPABILITIES);
+ }
+
+ public LinkedHashMap<String, Object> getRequirements() {
+ return (LinkedHashMap<String, Object>) subMappingDef.get(REQUIREMENTS);
+ }
+
+ public NodeType getNodeDefinition() {
+ return new NodeType(getNodeType(), customDefs);
+ }
+
+ private void _validate() {
+ // Basic validation
+ _validateKeys();
+ _validateType();
+
+ // SubstitutionMapping class syntax validation
+ _validateInputs();
+ _validateCapabilities();
+ _validateRequirements();
+ _validateOutputs();
+ }
+
+ private void _validateKeys() {
+ // validate the keys of substitution mappings
+ for (String key : subMappingDef.keySet()) {
+ boolean bFound = false;
+ for (String s : SECTIONS) {
+ if (s.equals(key)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE232", String.format(
+ "UnknownFieldError: SubstitutionMappings contain unknown field \"%s\"",
+ key)));
+ }
+ }
+ }
+
+ private void _validateType() {
+ // validate the node_type of substitution mappings
+ String nodeType = (String) subMappingDef.get(NODE_TYPE);
+ if (nodeType == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE233", String.format(
+ "MissingRequiredFieldError: SubstitutionMappings used in topology_template is missing required field \"%s\"",
+ NODE_TYPE)));
+ }
+ Object nodeTypeDef = customDefs.get(nodeType);
+ if (nodeTypeDef == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE234", String.format(
+ "InvalidNodeTypeError: \"%s\" is invalid", nodeType)));
+ }
+ }
+
+ private void _validateInputs() {
+ // validate the inputs of substitution mappings.
+
+ // The inputs defined by the topology template have to match the
+ // properties of the node type or the substituted node. If there are
+ // more inputs than the substituted node has properties, default values
+ //must be defined for those inputs.
+
+ HashSet<String> allInputs = new HashSet<>();
+ for (Input inp : inputs) {
+ allInputs.add(inp.getName());
+ }
+ HashSet<String> requiredProperties = new HashSet<>();
+ for (PropertyDef pd : getNodeDefinition().getPropertiesDefObjects()) {
+ if (pd.isRequired() && pd.getDefault() == null) {
+ requiredProperties.add(pd.getName());
+ }
+ }
+ // Must provide inputs for required properties of node type.
+ for (String property : requiredProperties) {
+ // Check property which is 'required' and has no 'default' value
+ if (!allInputs.contains(property)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE235", String.format(
+ "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"",
+ getNodeType(), property)));
+ }
+ }
+ // If the optional properties of node type need to be customized by
+ // substituted node, it also is necessary to define inputs for them,
+ // otherwise they are not mandatory to be defined.
+ HashSet<String> customizedParameters = new HashSet<>();
+ if (subMappedNodeTemplate != null) {
+ customizedParameters.addAll(subMappedNodeTemplate.getProperties().keySet());
+ }
+ HashSet<String> allProperties = new HashSet<String>(
+ getNodeDefinition().getPropertiesDef().keySet());
+ HashSet<String> diffset = customizedParameters;
+ diffset.removeAll(allInputs);
+ for (String parameter : diffset) {
+ if (allProperties.contains(parameter)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE236", String.format(
+ "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"",
+ getNodeType(), parameter)));
+ }
+ }
+ // Additional inputs are not in the properties of node type must
+ // provide default values. Currently the scenario may not happen
+ // because of parameters validation in nodetemplate, here is a
+ // guarantee.
+ for (Input inp : inputs) {
+ diffset = allInputs;
+ diffset.removeAll(allProperties);
+ if (diffset.contains(inp.getName()) && inp.getDefault() == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE237", String.format(
+ "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing rquired input \"%s\"",
+ getNodeType(), inp.getName())));
+ }
+ }
+ }
+
+ private void _validateCapabilities() {
+ // validate the capabilities of substitution mappings
+
+ // The capabilities must be in node template which be mapped.
+ LinkedHashMap<String, Object> tplsCapabilities =
+ (LinkedHashMap<String, Object>) subMappingDef.get(CAPABILITIES);
+ List<CapabilityAssignment> nodeCapabilities = null;
+ if (subMappedNodeTemplate != null) {
+ nodeCapabilities = subMappedNodeTemplate.getCapabilities().getAll();
+ }
+ if (nodeCapabilities != null) {
+ for (CapabilityAssignment cap : nodeCapabilities) {
+ if (tplsCapabilities != null && tplsCapabilities.get(cap.getName()) == null) {
+ ; //pass
+ // ValidationIssueCollector.appendException(
+ // UnknownFieldError(what='SubstitutionMappings',
+ // field=cap))
+ }
+ }
+ }
+ }
+
+ private void _validateRequirements() {
+ // validate the requirements of substitution mappings
+ //*****************************************************
+ //TO-DO - Different from Python code!! one is a bug...
+ //*****************************************************
+ // The requirements must be in node template which be mapped.
+ LinkedHashMap<String, Object> tplsRequirements =
+ (LinkedHashMap<String, Object>) subMappingDef.get(REQUIREMENTS);
+ List<RequirementAssignment> nodeRequirements = null;
+ if (subMappedNodeTemplate != null) {
+ nodeRequirements = subMappedNodeTemplate.getRequirements().getAll();
+ }
+ if (nodeRequirements != null) {
+ for (RequirementAssignment ro : nodeRequirements) {
+ String cap = ro.getName();
+ if (tplsRequirements != null && tplsRequirements.get(cap) == null) {
+ ; //pass
+ // ValidationIssueCollector.appendException(
+ // UnknownFieldError(what='SubstitutionMappings',
+ // field=cap))
+ }
+ }
+ }
+ }
+
+ private void _validateOutputs() {
+ // validate the outputs of substitution mappings.
+
+ // The outputs defined by the topology template have to match the
+ // attributes of the node type or the substituted node template,
+ // and the observable attributes of the substituted node template
+ // have to be defined as attributes of the node type or outputs in
+ // the topology template.
+
+ // The outputs defined by the topology template have to match the
+ // attributes of the node type according to the specification, but
+ // it's reasonable that there are more inputs than the node type
+ // has properties, the specification will be amended?
+
+ for (Output output : outputs) {
+ Object ado = getNodeDefinition().getAttributesDef();
+ if (ado != null && ((LinkedHashMap<String, Object>) ado).get(output.getName()) == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE238", String.format(
+ "UnknownOutputError: Unknown output \"%s\" in SubstitutionMappings with node_type \"%s\"",
+ output.getName(), getNodeType())));
+ }
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "SubstitutionMappings{" +
+// "subMappingDef=" + subMappingDef +
+// ", nodetemplates=" + nodetemplates +
+// ", inputs=" + inputs +
+// ", outputs=" + outputs +
+// ", groups=" + groups +
+ ", subMappedNodeTemplate=" + (subMappedNodeTemplate == null ? "" : subMappedNodeTemplate.getName()) +
+// ", customDefs=" + customDefs +
+// ", _capabilities=" + _capabilities +
+// ", _requirements=" + _requirements +
+ '}';
+ }
+
+ @Deprecated
+ public String toLimitedString() {
+ return "SubstitutionMappings{" +
+ "subMappingDef=" + subMappingDef +
+ ", nodetemplates=" + nodetemplates +
+ ", inputs=" + inputs +
+ ", outputs=" + outputs +
+ ", groups=" + groups +
+ ", subMappedNodeTemplate=" + (subMappedNodeTemplate == null ? "" : subMappedNodeTemplate.getName()) +
+ ", customDefs=" + customDefs +
+ ", _capabilities=" + _capabilities +
+ ", _requirements=" + _requirements +
+ '}';
+ }
+}
+
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import InvalidNodeTypeError
+from toscaparser.common.exception import MissingDefaultValueError
+from toscaparser.common.exception import MissingRequiredFieldError
+from toscaparser.common.exception import MissingRequiredInputError
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.common.exception import UnknownOutputError
+from toscaparser.elements.nodetype import NodeType
+from toscaparser.utils.gettextutils import _
+
+log = logging.getLogger('tosca')
+
+
+class SubstitutionMappings(object):
+ '''SubstitutionMappings class declaration
+
+ SubstitutionMappings exports the topology template as an
+ implementation of a Node type.
+ '''
+
+ SECTIONS = (NODE_TYPE, REQUIREMENTS, CAPABILITIES) = \
+ ('node_type', 'requirements', 'capabilities')
+
+ OPTIONAL_OUTPUTS = ['tosca_id', 'tosca_name', 'state']
+
+ def __init__(self, sub_mapping_def, nodetemplates, inputs, outputs,
+ sub_mapped_node_template, custom_defs):
+ self.nodetemplates = nodetemplates
+ self.sub_mapping_def = sub_mapping_def
+ self.inputs = inputs or []
+ self.outputs = outputs or []
+ self.sub_mapped_node_template = sub_mapped_node_template
+ self.custom_defs = custom_defs or {}
+ self._validate()
+
+ self._capabilities = None
+ self._requirements = None
+
+ @property
+ def type(self):
+ if self.sub_mapping_def:
+ return self.sub_mapping_def.get(self.NODE_TYPE)
+
+ @classmethod
+ def get_node_type(cls, sub_mapping_def):
+ if isinstance(sub_mapping_def, dict):
+ return sub_mapping_def.get(cls.NODE_TYPE)
+
+ @property
+ def node_type(self):
+ return self.sub_mapping_def.get(self.NODE_TYPE)
+
+ @property
+ def capabilities(self):
+ return self.sub_mapping_def.get(self.CAPABILITIES)
+
+ @property
+ def requirements(self):
+ return self.sub_mapping_def.get(self.REQUIREMENTS)
+
+ @property
+ def node_definition(self):
+ return NodeType(self.node_type, self.custom_defs)
+
+ def _validate(self):
+ # Basic validation
+ self._validate_keys()
+ self._validate_type()
+
+ # SubstitutionMapping class syntax validation
+ self._validate_inputs()
+ self._validate_capabilities()
+ self._validate_requirements()
+ self._validate_outputs()
+
+ def _validate_keys(self):
+ """validate the keys of substitution mappings."""
+ for key in self.sub_mapping_def.keys():
+ if key not in self.SECTIONS:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(what=_('SubstitutionMappings'),
+ field=key))
+
+ def _validate_type(self):
+ """validate the node_type of substitution mappings."""
+ node_type = self.sub_mapping_def.get(self.NODE_TYPE)
+ if not node_type:
+ ValidationIssueCollector.appendException(
+ MissingRequiredFieldError(
+ what=_('SubstitutionMappings used in topology_template'),
+ required=self.NODE_TYPE))
+
+ node_type_def = self.custom_defs.get(node_type)
+ if not node_type_def:
+ ValidationIssueCollector.appendException(
+ InvalidNodeTypeError(what=node_type))
+
+ def _validate_inputs(self):
+ """validate the inputs of substitution mappings.
+
+ The inputs defined by the topology template have to match the
+ properties of the node type or the substituted node. If there are
+ more inputs than the substituted node has properties, default values
+ must be defined for those inputs.
+ """
+
+ all_inputs = set([input.name for input in self.inputs])
+ required_properties = set([p.name for p in
+ self.node_definition.
+ get_properties_def_objects()
+ if p.required and p.default is None])
+ # Must provide inputs for required properties of node type.
+ for property in required_properties:
+ # Check property which is 'required' and has no 'default' value
+ if property not in all_inputs:
+ ValidationIssueCollector.appendException(
+ MissingRequiredInputError(
+ what=_('SubstitutionMappings with node_type ')
+ + self.node_type,
+ input_name=property))
+
+ # If the optional properties of node type need to be customized by
+ # substituted node, it also is necessary to define inputs for them,
+ # otherwise they are not mandatory to be defined.
+ customized_parameters = set(self.sub_mapped_node_template
+ .get_properties().keys()
+ if self.sub_mapped_node_template else [])
+ all_properties = set(self.node_definition.get_properties_def())
+ for parameter in customized_parameters - all_inputs:
+ if parameter in all_properties:
+ ValidationIssueCollector.appendException(
+ MissingRequiredInputError(
+ what=_('SubstitutionMappings with node_type ')
+ + self.node_type,
+ input_name=parameter))
+
+ # Additional inputs are not in the properties of node type must
+ # provide default values. Currently the scenario may not happen
+ # because of parameters validation in nodetemplate, here is a
+ # guarantee.
+ for input in self.inputs:
+ if input.name in all_inputs - all_properties \
+ and input.default is None:
+ ValidationIssueCollector.appendException(
+ MissingDefaultValueError(
+ what=_('SubstitutionMappings with node_type ')
+ + self.node_type,
+ input_name=input.name))
+
+ def _validate_capabilities(self):
+ """validate the capabilities of substitution mappings."""
+
+ # The capabilites must be in node template wchich be mapped.
+ tpls_capabilities = self.sub_mapping_def.get(self.CAPABILITIES)
+ node_capabiliteys = self.sub_mapped_node_template.get_capabilities() \
+ if self.sub_mapped_node_template else None
+ for cap in node_capabiliteys.keys() if node_capabiliteys else []:
+ if (tpls_capabilities and
+ cap not in list(tpls_capabilities.keys())):
+ pass
+ # ValidationIssueCollector.appendException(
+ # UnknownFieldError(what='SubstitutionMappings',
+ # field=cap))
+
+ def _validate_requirements(self):
+ """validate the requirements of substitution mappings."""
+
+ # The requirements must be in node template wchich be mapped.
+ tpls_requirements = self.sub_mapping_def.get(self.REQUIREMENTS)
+ node_requirements = self.sub_mapped_node_template.requirements \
+ if self.sub_mapped_node_template else None
+ for req in node_requirements if node_requirements else []:
+ if (tpls_requirements and
+ req not in list(tpls_requirements.keys())):
+ pass
+ # ValidationIssueCollector.appendException(
+ # UnknownFieldError(what='SubstitutionMappings',
+ # field=req))
+
+ def _validate_outputs(self):
+ """validate the outputs of substitution mappings.
+
+ The outputs defined by the topology template have to match the
+ attributes of the node type or the substituted node template,
+ and the observable attributes of the substituted node template
+ have to be defined as attributes of the node type or outputs in
+ the topology template.
+ """
+
+ # The outputs defined by the topology template have to match the
+ # attributes of the node type according to the specification, but
+ # it's reasonable that there are more inputs than the node type
+ # has properties, the specification will be amended?
+ for output in self.outputs:
+ if output.name not in self.node_definition.get_attributes_def():
+ ValidationIssueCollector.appendException(
+ UnknownOutputError(
+ where=_('SubstitutionMappings with node_type ')
+ + self.node_type,
+ output_name=output.name))*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java
new file mode 100644
index 0000000..efc6948
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/TopologyTemplate.java
@@ -0,0 +1,866 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.elements.InterfacesDef;
+import org.onap.sdc.toscaparser.api.elements.NodeType;
+import org.onap.sdc.toscaparser.api.elements.RelationshipType;
+import org.onap.sdc.toscaparser.api.functions.Function;
+import org.onap.sdc.toscaparser.api.functions.GetAttribute;
+import org.onap.sdc.toscaparser.api.functions.GetInput;
+import org.onap.sdc.toscaparser.api.parameters.Input;
+import org.onap.sdc.toscaparser.api.parameters.Output;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public class TopologyTemplate {
+
+ private static final String DESCRIPTION = "description";
+ private static final String INPUTS = "inputs";
+ private static final String NODE_TEMPLATES = "node_templates";
+ private static final String RELATIONSHIP_TEMPLATES = "relationship_templates";
+ private static final String OUTPUTS = "outputs";
+ private static final String GROUPS = "groups";
+ private static final String SUBSTITUTION_MAPPINGS = "substitution_mappings";
+ private static final String POLICIES = "policies";
+ private static final String METADATA = "metadata";
+
+ private static String[] SECTIONS = {
+ DESCRIPTION, INPUTS, NODE_TEMPLATES, RELATIONSHIP_TEMPLATES,
+ OUTPUTS, GROUPS, SUBSTITUTION_MAPPINGS, POLICIES, METADATA
+ };
+
+ private LinkedHashMap<String, Object> tpl;
+ LinkedHashMap<String, Object> metaData;
+ private ArrayList<Input> inputs;
+ private ArrayList<Output> outputs;
+ private ArrayList<RelationshipTemplate> relationshipTemplates;
+ private ArrayList<NodeTemplate> nodeTemplates;
+ private LinkedHashMap<String, Object> customDefs;
+ private LinkedHashMap<String, Object> relTypes;//TYPE
+ private NodeTemplate subMappedNodeTemplate;
+ private ArrayList<Group> groups;
+ private ArrayList<Policy> policies;
+ private LinkedHashMap<String, Object> parsedParams = null;//TYPE
+ private String description;
+ private ToscaGraph graph;
+ private SubstitutionMappings substitutionMappings;
+ private boolean resolveGetInput;
+
+ public TopologyTemplate(
+ LinkedHashMap<String, Object> _template,
+ LinkedHashMap<String, Object> _customDefs,
+ LinkedHashMap<String, Object> _relTypes,//TYPE
+ LinkedHashMap<String, Object> _parsedParams,
+ NodeTemplate _subMappedNodeTemplate,
+ boolean _resolveGetInput) {
+
+ tpl = _template;
+ if (tpl != null) {
+ subMappedNodeTemplate = _subMappedNodeTemplate;
+ metaData = _metaData();
+ customDefs = _customDefs;
+ relTypes = _relTypes;
+ parsedParams = _parsedParams;
+ resolveGetInput = _resolveGetInput;
+ _validateField();
+ description = _tplDescription();
+ inputs = _inputs();
+ relationshipTemplates = _relationshipTemplates();
+ //todo: pass subMappedNodeTemplate to ET constractor
+ nodeTemplates = _nodeTemplates();
+ outputs = _outputs();
+ if (nodeTemplates != null) {
+ graph = new ToscaGraph(nodeTemplates);
+ }
+ groups = _groups();
+ policies = _policies();
+ _processIntrinsicFunctions();
+ substitutionMappings = _substitutionMappings();
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private ArrayList<Input> _inputs() {
+ ArrayList<Input> alInputs = new ArrayList<>();
+ for (String name : _tplInputs().keySet()) {
+ Object attrs = _tplInputs().get(name);
+ Input input = new Input(name, (LinkedHashMap<String, Object>) attrs, customDefs);
+ if (parsedParams != null && parsedParams.get(name) != null) {
+ input.validate(parsedParams.get(name));
+ } else {
+ Object _default = input.getDefault();
+ if (_default != null) {
+ input.validate(_default);
+ }
+ }
+ if ((parsedParams != null && parsedParams.get(input.getName()) == null || parsedParams == null)
+ && input.isRequired() && input.getDefault() == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE003",
+ String.format("MissingRequiredFieldError: The required input \"%s\" was not provided"
+ , input.getName()))
+ );
+ }
+ alInputs.add(input);
+ }
+ return alInputs;
+
+ }
+
+ private LinkedHashMap<String, Object> _metaData() {
+ if (tpl.get(METADATA) != null) {
+ return (LinkedHashMap<String, Object>) tpl.get(METADATA);
+ } else {
+ return new LinkedHashMap<String, Object>();
+ }
+
+ }
+
+ private ArrayList<NodeTemplate> _nodeTemplates() {
+ ArrayList<NodeTemplate> alNodeTemplates = new ArrayList<>();
+ LinkedHashMap<String, Object> tpls = _tplNodeTemplates();
+ if (tpls != null) {
+ for (String name : tpls.keySet()) {
+ NodeTemplate tpl = new NodeTemplate(name,
+ tpls,
+ customDefs,
+ relationshipTemplates,
+ relTypes,
+ subMappedNodeTemplate);
+ if (tpl.getTypeDefinition() != null) {
+ boolean b = NodeType.TOSCA_DEF.get(tpl.getType()) != null;
+ if (b || (tpl.getCustomDef() != null && !tpl.getCustomDef().isEmpty())) {
+ tpl.validate();
+ alNodeTemplates.add(tpl);
+ }
+ }
+ }
+ }
+ return alNodeTemplates;
+ }
+
+ @SuppressWarnings("unchecked")
+ private ArrayList<RelationshipTemplate> _relationshipTemplates() {
+ ArrayList<RelationshipTemplate> alRelationshipTemplates = new ArrayList<>();
+ LinkedHashMap<String, Object> tpls = _tplRelationshipTemplates();
+ if (tpls != null) {
+ for (String name : tpls.keySet()) {
+ RelationshipTemplate tpl = new RelationshipTemplate(
+ (LinkedHashMap<String, Object>) tpls.get(name), name, customDefs, null, null, subMappedNodeTemplate);
+
+ alRelationshipTemplates.add(tpl);
+ }
+ }
+ return alRelationshipTemplates;
+ }
+
+ private ArrayList<Output> _outputs() {
+ ArrayList<Output> alOutputs = new ArrayList<>();
+ for (Map.Entry<String, Object> me : _tplOutputs().entrySet()) {
+ String oname = me.getKey();
+ LinkedHashMap<String, Object> oattrs = (LinkedHashMap<String, Object>) me.getValue();
+ Output o = new Output(oname, oattrs);
+ o.validate();
+ alOutputs.add(o);
+ }
+ return alOutputs;
+ }
+
+ private SubstitutionMappings _substitutionMappings() {
+ LinkedHashMap<String, Object> tplSubstitutionMapping = (LinkedHashMap<String, Object>) _tplSubstitutionMappings();
+
+ //*** the commenting-out below and the weaker condition are in the Python source
+ // #if tpl_substitution_mapping and self.sub_mapped_node_template:
+ if (tplSubstitutionMapping != null && tplSubstitutionMapping.size() > 0) {
+ return new SubstitutionMappings(tplSubstitutionMapping,
+ nodeTemplates,
+ inputs,
+ outputs,
+ groups,
+ subMappedNodeTemplate,
+ customDefs);
+ }
+ return null;
+
+ }
+
+ @SuppressWarnings("unchecked")
+ private ArrayList<Policy> _policies() {
+ ArrayList<Policy> alPolicies = new ArrayList<>();
+ for (Map.Entry<String, Object> me : _tplPolicies().entrySet()) {
+ String policyName = me.getKey();
+ LinkedHashMap<String, Object> policyTpl = (LinkedHashMap<String, Object>) me.getValue();
+ ArrayList<String> targetList = (ArrayList<String>) policyTpl.get("targets");
+ ArrayList<NodeTemplate> targetNodes = new ArrayList<>();
+ ArrayList<Object> targetObjects = new ArrayList<>();
+ ArrayList<Group> targetGroups = new ArrayList<>();
+ String targetsType = "groups";
+ if (targetList != null && targetList.size() >= 1) {
+ targetGroups = _getPolicyGroups(targetList);
+ if (targetGroups == null || targetGroups.isEmpty()) {
+ targetsType = "node_templates";
+ targetNodes = _getGroupMembers(targetList);
+ for (NodeTemplate nt : targetNodes) {
+ targetObjects.add(nt);
+ }
+ } else {
+ for (Group gr : targetGroups) {
+ targetObjects.add(gr);
+ }
+ }
+ }
+ Policy policyObj = new Policy(policyName,
+ policyTpl,
+ targetObjects,
+ targetsType,
+ customDefs,
+ subMappedNodeTemplate);
+ alPolicies.add(policyObj);
+ }
+ return alPolicies;
+ }
+
+ private ArrayList<Group> _groups() {
+ ArrayList<Group> groups = new ArrayList<>();
+ ArrayList<NodeTemplate> memberNodes = null;
+ for (Map.Entry<String, Object> me : _tplGroups().entrySet()) {
+ String groupName = me.getKey();
+ LinkedHashMap<String, Object> groupTpl = (LinkedHashMap<String, Object>) me.getValue();
+ ArrayList<String> memberNames = (ArrayList<String>) groupTpl.get("members");
+ if (memberNames != null) {
+ DataEntity.validateDatatype("list", memberNames, null, null, null);
+ if (memberNames.size() < 1 ||
+ (new HashSet<String>(memberNames)).size() != memberNames.size()) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE005", String.format(
+ "InvalidGroupTargetException: Member nodes \"%s\" should be >= 1 and not repeated",
+ memberNames.toString())));
+ } else {
+ memberNodes = _getGroupMembers(memberNames);
+ }
+ }
+ Group group = new Group(groupName,
+ groupTpl,
+ memberNodes,
+ customDefs, subMappedNodeTemplate);
+ groups.add(group);
+ }
+ return groups;
+ }
+
+ private ArrayList<NodeTemplate> _getGroupMembers(ArrayList<String> memberNames) {
+ ArrayList<NodeTemplate> memberNodes = new ArrayList<>();
+ _validateGroupMembers(memberNames);
+ for (String member : memberNames) {
+ for (NodeTemplate node : nodeTemplates) {
+ if (member.equals(node.getName())) {
+ memberNodes.add(node);
+ }
+ }
+ }
+ return memberNodes;
+ }
+
+ private ArrayList<Group> _getPolicyGroups(ArrayList<String> memberNames) {
+ ArrayList<Group> memberGroups = new ArrayList<>();
+ for (String member : memberNames) {
+ for (Group group : groups) {
+ if (member.equals(group.getName())) {
+ memberGroups.add(group);
+ }
+ }
+ }
+ return memberGroups;
+ }
+
+ private void _validateGroupMembers(ArrayList<String> members) {
+ ArrayList<String> nodeNames = new ArrayList<>();
+ for (NodeTemplate node : nodeTemplates) {
+ nodeNames.add(node.getName());
+ }
+ for (String member : members) {
+ if (!nodeNames.contains(member)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE239", String.format(
+ "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"", member)));
+ }
+ }
+ }
+
+ // topology template can act like node template
+ // it is exposed by substitution_mappings.
+
+ public String nodetype() {
+ return substitutionMappings.getNodeType();
+ }
+
+ public LinkedHashMap<String, Object> capabilities() {
+ return substitutionMappings.getCapabilities();
+ }
+
+ public LinkedHashMap<String, Object> requirements() {
+ return substitutionMappings.getRequirements();
+ }
+
+ private String _tplDescription() {
+ return (String) tpl.get(DESCRIPTION);
+ //if description:
+ // return description.rstrip()
+ }
+
+ @SuppressWarnings("unchecked")
+ private LinkedHashMap<String, Object> _tplInputs() {
+ if (tpl.get(INPUTS) != null) {
+ return (LinkedHashMap<String, Object>) tpl.get(INPUTS);
+ }
+ return new LinkedHashMap<String, Object>();
+ }
+
+ @SuppressWarnings("unchecked")
+ private LinkedHashMap<String, Object> _tplNodeTemplates() {
+ return (LinkedHashMap<String, Object>) tpl.get(NODE_TEMPLATES);
+ }
+
+ @SuppressWarnings("unchecked")
+ private LinkedHashMap<String, Object> _tplRelationshipTemplates() {
+ if (tpl.get(RELATIONSHIP_TEMPLATES) != null) {
+ return (LinkedHashMap<String, Object>) tpl.get(RELATIONSHIP_TEMPLATES);
+ }
+ return new LinkedHashMap<String, Object>();
+ }
+
+ @SuppressWarnings("unchecked")
+ private LinkedHashMap<String, Object> _tplOutputs() {
+ if (tpl.get(OUTPUTS) != null) {
+ return (LinkedHashMap<String, Object>) tpl.get(OUTPUTS);
+ }
+ return new LinkedHashMap<String, Object>();
+ }
+
+ @SuppressWarnings("unchecked")
+ private LinkedHashMap<String, Object> _tplSubstitutionMappings() {
+ if (tpl.get(SUBSTITUTION_MAPPINGS) != null) {
+ return (LinkedHashMap<String, Object>) tpl.get(SUBSTITUTION_MAPPINGS);
+ }
+ return new LinkedHashMap<String, Object>();
+ }
+
+ @SuppressWarnings("unchecked")
+ private LinkedHashMap<String, Object> _tplGroups() {
+ if (tpl.get(GROUPS) != null) {
+ return (LinkedHashMap<String, Object>) tpl.get(GROUPS);
+ }
+ return new LinkedHashMap<String, Object>();
+ }
+
+ @SuppressWarnings("unchecked")
+ private LinkedHashMap<String, Object> _tplPolicies() {
+ if (tpl.get(POLICIES) != null) {
+ return (LinkedHashMap<String, Object>) tpl.get(POLICIES);
+ }
+ return new LinkedHashMap<>();
+ }
+
+ private void _validateField() {
+ for (String name : tpl.keySet()) {
+ boolean bFound = false;
+ for (String section : SECTIONS) {
+ if (name.equals(section)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE240", String.format(
+ "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"", name)));
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void _processIntrinsicFunctions() {
+ // Process intrinsic functions
+
+ // Current implementation processes functions within node template
+ // properties, requirements, interfaces inputs and template outputs.
+
+ if (nodeTemplates != null) {
+ for (NodeTemplate nt : nodeTemplates) {
+ for (Property prop : nt.getPropertiesObjects()) {
+ prop.setValue(Function.getFunction(this, nt, prop.getValue(), resolveGetInput));
+ }
+ for (InterfacesDef ifd : nt.getInterfaces()) {
+ LinkedHashMap<String, Object> ifin = ifd.getInputs();
+ if (ifin != null) {
+ for (Map.Entry<String, Object> me : ifin.entrySet()) {
+ String name = me.getKey();
+ Object value = Function.getFunction(this, nt, me.getValue(), resolveGetInput);
+ ifd.setInput(name, value);
+ }
+ }
+ }
+ if (nt.getRequirements() != null) {
+ for (RequirementAssignment req : nt.getRequirements().getAll()) {
+ LinkedHashMap<String, Object> rel;
+ Object t = req.getRelationship();
+ // it can be a string or a LHM...
+ if (t instanceof LinkedHashMap) {
+ rel = (LinkedHashMap<String, Object>) t;
+ } else {
+ // we set it to null to fail the next test
+ // and avoid the get("proprties")
+ rel = null;
+ }
+
+ if (rel != null && rel.get("properties") != null) {
+ LinkedHashMap<String, Object> relprops =
+ (LinkedHashMap<String, Object>) rel.get("properties");
+ for (String key : relprops.keySet()) {
+ Object value = relprops.get(key);
+ Object func = Function.getFunction(this, req, value, resolveGetInput);
+ relprops.put(key, func);
+ }
+ }
+ }
+ }
+ if (nt.getCapabilitiesObjects() != null) {
+ for (CapabilityAssignment cap : nt.getCapabilitiesObjects()) {
+ if (cap.getPropertiesObjects() != null) {
+ for (Property prop : cap.getPropertiesObjects()) {
+ Object propvalue = Function.getFunction(this, nt, prop.getValue(), resolveGetInput);
+ if (propvalue instanceof GetInput) {
+ propvalue = ((GetInput) propvalue).result();
+ for (String p : cap.getProperties().keySet()) {
+ //Object v = cap.getProperties().get(p);
+ if (p.equals(prop.getName())) {
+ cap.setProperty(p, propvalue);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ for (RelationshipType rel : nt.getRelationships().keySet()) {
+ NodeTemplate node = nt.getRelationships().get(rel);
+ ArrayList<RelationshipTemplate> relTpls = node.getRelationshipTemplate();
+ if (relTpls != null) {
+ for (RelationshipTemplate relTpl : relTpls) {
+ // TT 5
+ for (InterfacesDef iface : relTpl.getInterfaces()) {
+ if (iface.getInputs() != null) {
+ for (String name : iface.getInputs().keySet()) {
+ Object value = iface.getInputs().get(name);
+ Object func = Function.getFunction(
+ this,
+ relTpl,
+ value,
+ resolveGetInput);
+ iface.setInput(name, func);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ for (Output output : outputs) {
+ Object func = Function.getFunction(this, outputs, output.getValue(), resolveGetInput);
+ if (func instanceof GetAttribute) {
+ output.setAttr(Output.VALUE, func);
+ }
+ }
+ }
+
+ public static String getSubMappingNodeType(LinkedHashMap<String, Object> topologyTpl) {
+ if (topologyTpl != null && topologyTpl instanceof LinkedHashMap) {
+ Object submapTpl = topologyTpl.get(SUBSTITUTION_MAPPINGS);
+ return SubstitutionMappings.stGetNodeType((LinkedHashMap<String, Object>) submapTpl);
+ }
+ return null;
+ }
+
+ // getters
+
+ public LinkedHashMap<String, Object> getTpl() {
+ return tpl;
+ }
+
+ public LinkedHashMap<String, Object> getMetadata() {
+ return metaData;
+ }
+
+ public ArrayList<Input> getInputs() {
+ return inputs;
+ }
+
+ public ArrayList<Output> getOutputs() {
+ return outputs;
+ }
+
+ public ArrayList<Policy> getPolicies() {
+ return policies;
+ }
+
+ public ArrayList<RelationshipTemplate> getRelationshipTemplates() {
+ return relationshipTemplates;
+ }
+
+ public ArrayList<NodeTemplate> getNodeTemplates() {
+ return nodeTemplates;
+ }
+
+ public ArrayList<Group> getGroups() {
+ return groups;
+ }
+
+ public SubstitutionMappings getSubstitutionMappings() {
+ return substitutionMappings;
+ }
+
+ public LinkedHashMap<String, Object> getParsedParams() {
+ return parsedParams;
+ }
+
+ public boolean getResolveGetInput() {
+ return resolveGetInput;
+ }
+
+ public LinkedHashMap<String, Object> getCustomDefs() {
+ return customDefs;
+ }
+}
+
+/*python
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+import logging
+
+from toscaparser.common import exception
+from toscaparser.dataentity import DataEntity
+from toscaparser import functions
+from toscaparser.groups import Group
+from toscaparser.nodetemplate import NodeTemplate
+from toscaparser.parameters import Input
+from toscaparser.parameters import Output
+from toscaparser.policy import Policy
+from toscaparser.relationship_template import RelationshipTemplate
+from toscaparser.substitution_mappings import SubstitutionMappings
+from toscaparser.tpl_relationship_graph import ToscaGraph
+from toscaparser.utils.gettextutils import _
+
+
+# Topology template key names
+SECTIONS = (DESCRIPTION, INPUTS, NODE_TEMPLATES,
+ RELATIONSHIP_TEMPLATES, OUTPUTS, GROUPS,
+ SUBSTITUION_MAPPINGS, POLICIES) = \
+ ('description', 'inputs', 'node_templates',
+ 'relationship_templates', 'outputs', 'groups',
+ 'substitution_mappings', 'policies')
+
+log = logging.getLogger("tosca.model")
+
+
+class TopologyTemplate(object):
+
+ '''Load the template data.'''
+ def __init__(self, template, custom_defs,
+ rel_types=None, parsed_params=None,
+ sub_mapped_node_template=None):
+ self.tpl = template
+ self.sub_mapped_node_template = sub_mapped_node_template
+ if self.tpl:
+ self.custom_defs = custom_defs
+ self.rel_types = rel_types
+ self.parsed_params = parsed_params
+ self._validate_field()
+ self.description = self._tpl_description()
+ self.inputs = self._inputs()
+ self.relationship_templates = self._relationship_templates()
+ self.nodetemplates = self._nodetemplates()
+ self.outputs = self._outputs()
+ if hasattr(self, 'nodetemplates'):
+ self.graph = ToscaGraph(self.nodetemplates)
+ self.groups = self._groups()
+ self.policies = self._policies()
+ self._process_intrinsic_functions()
+ self.substitution_mappings = self._substitution_mappings()
+
+ def _inputs(self):
+ inputs = []
+ for name, attrs in self._tpl_inputs().items():
+ input = Input(name, attrs)
+ if self.parsed_params and name in self.parsed_params:
+ input.validate(self.parsed_params[name])
+ else:
+ default = input.default
+ if default:
+ input.validate(default)
+ if (self.parsed_params and input.name not in self.parsed_params
+ or self.parsed_params is None) and input.required \
+ and input.default is None:
+ log.warning(_('The required parameter %s '
+ 'is not provided') % input.name)
+
+ inputs.append(input)
+ return inputs
+
+ def _nodetemplates(self):
+ nodetemplates = []
+ tpls = self._tpl_nodetemplates()
+ if tpls:
+ for name in tpls:
+ tpl = NodeTemplate(name, tpls, self.custom_defs,
+ self.relationship_templates,
+ self.rel_types)
+ if (tpl.type_definition and
+ (tpl.type in tpl.type_definition.TOSCA_DEF or
+ (tpl.type not in tpl.type_definition.TOSCA_DEF and
+ bool(tpl.custom_def)))):
+ tpl.validate(self)
+ nodetemplates.append(tpl)
+ return nodetemplates
+
+ def _relationship_templates(self):
+ rel_templates = []
+ tpls = self._tpl_relationship_templates()
+ for name in tpls:
+ tpl = RelationshipTemplate(tpls[name], name, self.custom_defs)
+ rel_templates.append(tpl)
+ return rel_templates
+
+ def _outputs(self):
+ outputs = []
+ for name, attrs in self._tpl_outputs().items():
+ output = Output(name, attrs)
+ output.validate()
+ outputs.append(output)
+ return outputs
+
+ def _substitution_mappings(self):
+ tpl_substitution_mapping = self._tpl_substitution_mappings()
+ # if tpl_substitution_mapping and self.sub_mapped_node_template:
+ if tpl_substitution_mapping:
+ return SubstitutionMappings(tpl_substitution_mapping,
+ self.nodetemplates,
+ self.inputs,
+ self.outputs,
+ self.sub_mapped_node_template,
+ self.custom_defs)
+
+ def _policies(self):
+ policies = []
+ for policy in self._tpl_policies():
+ for policy_name, policy_tpl in policy.items():
+ target_list = policy_tpl.get('targets')
+ if target_list and len(target_list) >= 1:
+ target_objects = []
+ targets_type = "groups"
+ target_objects = self._get_policy_groups(target_list)
+ if not target_objects:
+ targets_type = "node_templates"
+ target_objects = self._get_group_members(target_list)
+ policyObj = Policy(policy_name, policy_tpl,
+ target_objects, targets_type,
+ self.custom_defs)
+ policies.append(policyObj)
+ return policies
+
+ def _groups(self):
+ groups = []
+ member_nodes = None
+ for group_name, group_tpl in self._tpl_groups().items():
+ member_names = group_tpl.get('members')
+ if member_names is not None:
+ DataEntity.validate_datatype('list', member_names)
+ if len(member_names) < 1 or \
+ len(member_names) != len(set(member_names)):
+ exception.ValidationIssueCollector.appendException(
+ exception.InvalidGroupTargetException(
+ message=_('Member nodes "%s" should be >= 1 '
+ 'and not repeated') % member_names))
+ else:
+ member_nodes = self._get_group_members(member_names)
+ group = Group(group_name, group_tpl,
+ member_nodes,
+ self.custom_defs)
+ groups.append(group)
+ return groups
+
+ def _get_group_members(self, member_names):
+ member_nodes = []
+ self._validate_group_members(member_names)
+ for member in member_names:
+ for node in self.nodetemplates:
+ if node.name == member:
+ member_nodes.append(node)
+ return member_nodes
+
+ def _get_policy_groups(self, member_names):
+ member_groups = []
+ for member in member_names:
+ for group in self.groups:
+ if group.name == member:
+ member_groups.append(group)
+ return member_groups
+
+ def _validate_group_members(self, members):
+ node_names = []
+ for node in self.nodetemplates:
+ node_names.append(node.name)
+ for member in members:
+ if member not in node_names:
+ exception.ValidationIssueCollector.appendException(
+ exception.InvalidGroupTargetException(
+ message=_('Target member "%s" is not found in '
+ 'node_templates') % member))
+
+ # topology template can act like node template
+ # it is exposed by substitution_mappings.
+ def nodetype(self):
+ return self.substitution_mappings.node_type \
+ if self.substitution_mappings else None
+
+ def capabilities(self):
+ return self.substitution_mappings.capabilities \
+ if self.substitution_mappings else None
+
+ def requirements(self):
+ return self.substitution_mappings.requirements \
+ if self.substitution_mappings else None
+
+ def _tpl_description(self):
+ description = self.tpl.get(DESCRIPTION)
+ if description:
+ return description.rstrip()
+
+ def _tpl_inputs(self):
+ return self.tpl.get(INPUTS) or {}
+
+ def _tpl_nodetemplates(self):
+ return self.tpl.get(NODE_TEMPLATES)
+
+ def _tpl_relationship_templates(self):
+ return self.tpl.get(RELATIONSHIP_TEMPLATES) or {}
+
+ def _tpl_outputs(self):
+ return self.tpl.get(OUTPUTS) or {}
+
+ def _tpl_substitution_mappings(self):
+ return self.tpl.get(SUBSTITUION_MAPPINGS) or {}
+
+ def _tpl_groups(self):
+ return self.tpl.get(GROUPS) or {}
+
+ def _tpl_policies(self):
+ return self.tpl.get(POLICIES) or {}
+
+ def _validate_field(self):
+ for name in self.tpl:
+ if name not in SECTIONS:
+ exception.ValidationIssueCollector.appendException(
+ exception.UnknownFieldError(what='Template', field=name))
+
+ def _process_intrinsic_functions(self):
+ """Process intrinsic functions
+
+ Current implementation processes functions within node template
+ properties, requirements, interfaces inputs and template outputs.
+ """
+ if hasattr(self, 'nodetemplates'):
+ for node_template in self.nodetemplates:
+ for prop in node_template.get_properties_objects():
+ prop.value = functions.get_function(self,
+ node_template,
+ prop.value)
+ for interface in node_template.interfaces:
+ if interface.inputs:
+ for name, value in interface.inputs.items():
+ interface.inputs[name] = functions.get_function(
+ self,
+ node_template,
+ value)
+ if node_template.requirements and \
+ isinstance(node_template.requirements, list):
+ for req in node_template.requirements:
+ rel = req
+ for req_name, req_item in req.items():
+ if isinstance(req_item, dict):
+ rel = req_item.get('relationship')
+ break
+ if rel and 'properties' in rel:
+ for key, value in rel['properties'].items():
+ rel['properties'][key] = \
+ functions.get_function(self,
+ req,
+ value)
+ if node_template.get_capabilities_objects():
+ for cap in node_template.get_capabilities_objects():
+ if cap.get_properties_objects():
+ for prop in cap.get_properties_objects():
+ propvalue = functions.get_function(
+ self,
+ node_template,
+ prop.value)
+ if isinstance(propvalue, functions.GetInput):
+ propvalue = propvalue.result()
+ for p, v in cap._properties.items():
+ if p == prop.name:
+ cap._properties[p] = propvalue
+ for rel, node in node_template.relationships.items():
+ rel_tpls = node.relationship_tpl
+ if rel_tpls:
+ for rel_tpl in rel_tpls:
+ for interface in rel_tpl.interfaces:
+ if interface.inputs:
+ for name, value in \
+ interface.inputs.items():
+ interface.inputs[name] = \
+ functions.get_function(self,
+ rel_tpl,
+ value)
+ for output in self.outputs:
+ func = functions.get_function(self, self.outputs, output.value)
+ if isinstance(func, functions.GetAttribute):
+ output.attrs[output.VALUE] = func
+
+ @classmethod
+ def get_sub_mapping_node_type(cls, topology_tpl):
+ if topology_tpl and isinstance(topology_tpl, dict):
+ submap_tpl = topology_tpl.get(SUBSTITUION_MAPPINGS)
+ return SubstitutionMappings.get_node_type(submap_tpl)
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java
new file mode 100644
index 0000000..1706cdc
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaGraph.java
@@ -0,0 +1,129 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import org.onap.sdc.toscaparser.api.elements.RelationshipType;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+
+//import java.util.Iterator;
+
+public class ToscaGraph {
+ // Graph of Tosca Node Templates
+
+ private ArrayList<NodeTemplate> nodeTemplates;
+ private LinkedHashMap<String, NodeTemplate> vertices;
+
+ public ToscaGraph(ArrayList<NodeTemplate> inodeTemplates) {
+ nodeTemplates = inodeTemplates;
+ vertices = new LinkedHashMap<String, NodeTemplate>();
+ create();
+ }
+
+ private void createVertex(NodeTemplate node) {
+ if (vertices.get(node.getName()) == null) {
+ vertices.put(node.getName(), node);
+ }
+ }
+
+ private void createEdge(NodeTemplate node1,
+ NodeTemplate node2,
+ RelationshipType relation) {
+ if (vertices.get(node1.getName()) == null) {
+ createVertex(node1);
+ vertices.get(node1.name)._addNext(node2, relation);
+ }
+ }
+
+ public NodeTemplate vertex(String name) {
+ if (vertices.get(name) != null) {
+ return vertices.get(name);
+ }
+ return null;
+ }
+
+// public Iterator getIter() {
+// return vertices.values().iterator();
+// }
+
+ private void create() {
+ for (NodeTemplate node : nodeTemplates) {
+ LinkedHashMap<RelationshipType, NodeTemplate> relation = node.getRelationships();
+ if (relation != null) {
+ for (RelationshipType rel : relation.keySet()) {
+ NodeTemplate nodeTpls = relation.get(rel);
+ for (NodeTemplate tpl : nodeTemplates) {
+ if (tpl.getName().equals(nodeTpls.getName())) {
+ createEdge(node, tpl, rel);
+ }
+ }
+ }
+ }
+ createVertex(node);
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "ToscaGraph{"
+ + "nodeTemplates=" + nodeTemplates
+ + ", vertices=" + vertices
+ + '}';
+ }
+}
+
+/*python
+
+class ToscaGraph(object):
+ '''Graph of Tosca Node Templates.'''
+ def __init__(self, nodetemplates):
+ self.nodetemplates = nodetemplates
+ self.vertices = {}
+ self._create()
+
+ def _create_vertex(self, node):
+ if node not in self.vertices:
+ self.vertices[node.name] = node
+
+ def _create_edge(self, node1, node2, relationship):
+ if node1 not in self.vertices:
+ self._create_vertex(node1)
+ self.vertices[node1.name]._add_next(node2,
+ relationship)
+
+ def vertex(self, node):
+ if node in self.vertices:
+ return self.vertices[node]
+
+ def __iter__(self):
+ return iter(self.vertices.values())
+
+ def _create(self):
+ for node in self.nodetemplates:
+ relation = node.relationships
+ if relation:
+ for rel, nodetpls in relation.items():
+ for tpl in self.nodetemplates:
+ if tpl.name == nodetpls.name:
+ self._create_edge(node, tpl, rel)
+ self._create_vertex(node)
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java
new file mode 100644
index 0000000..ddb8ddb
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/ToscaTemplate.java
@@ -0,0 +1,1267 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (c) 2017 AT&T Intellectual Property.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ * Modifications copyright (c) 2019 Fujitsu Limited.
+ * ================================================================================
+ */
+package org.onap.sdc.toscaparser.api;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.function.Predicate;
+
+import org.onap.sdc.toscaparser.api.common.JToscaException;
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.common.ValidationIssueCollector;
+import org.onap.sdc.toscaparser.api.elements.EntityType;
+import org.onap.sdc.toscaparser.api.elements.DataType;
+import org.onap.sdc.toscaparser.api.elements.Metadata;
+import org.onap.sdc.toscaparser.api.extensions.ExtTools;
+import org.onap.sdc.toscaparser.api.parameters.Input;
+import org.onap.sdc.toscaparser.api.parameters.Output;
+import org.onap.sdc.toscaparser.api.prereq.CSAR;
+import org.onap.sdc.toscaparser.api.utils.JToscaErrorCodes;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+
+public class ToscaTemplate extends Object {
+
+ public static final int MAX_LEVELS = 20;
+ private static Logger log = LoggerFactory.getLogger(ToscaTemplate.class.getName());
+
+ // TOSCA template key names
+ private static final String DEFINITION_VERSION = "tosca_definitions_version";
+ private static final String DEFAULT_NAMESPACE = "tosca_default_namespace";
+ private static final String TEMPLATE_NAME = "template_name";
+ private static final String TOPOLOGY_TEMPLATE = "topology_template";
+ private static final String TEMPLATE_AUTHOR = "template_author";
+ private static final String TEMPLATE_VERSION = "template_version";
+ private static final String DESCRIPTION = "description";
+ private static final String IMPORTS = "imports";
+ private static final String DSL_DEFINITIONS = "dsl_definitions";
+ private static final String NODE_TYPES = "node_types";
+ private static final String RELATIONSHIP_TYPES = "relationship_types";
+ private static final String RELATIONSHIP_TEMPLATES = "relationship_templates";
+ private static final String CAPABILITY_TYPES = "capability_types";
+ private static final String ARTIFACT_TYPES = "artifact_types";
+ private static final String DATA_TYPES = "data_types";
+ private static final String INTERFACE_TYPES = "interface_types";
+ private static final String POLICY_TYPES = "policy_types";
+ private static final String GROUP_TYPES = "group_types";
+ private static final String REPOSITORIES = "repositories";
+
+ private static String SECTIONS[] = {
+ DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME,
+ TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION,
+ DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES,
+ RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES,
+ CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES,
+ INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES, REPOSITORIES
+ };
+
+ // Sections that are specific to individual template definitions
+ private static final String METADATA = "metadata";
+ private static ArrayList<String> SPECIAL_SECTIONS;
+
+ private ExtTools exttools = new ExtTools();
+
+ private ArrayList<String> VALID_TEMPLATE_VERSIONS;
+ private LinkedHashMap<String, ArrayList<String>> ADDITIONAL_SECTIONS;
+
+ private boolean isFile;
+ private String path;
+ private String inputPath;
+ private String rootPath;
+ private LinkedHashMap<String, Object> parsedParams;
+ private boolean resolveGetInput;
+ private LinkedHashMap<String, Object> tpl;
+ private String version;
+ private ArrayList<Object> imports;
+ private LinkedHashMap<String, Object> relationshipTypes;
+ private Metadata metaData;
+ private String description;
+ private TopologyTemplate topologyTemplate;
+ private ArrayList<Repository> repositories;
+ private ArrayList<Input> inputs;
+ private ArrayList<RelationshipTemplate> relationshipTemplates;
+ private ArrayList<NodeTemplate> nodeTemplates;
+ private ArrayList<Output> outputs;
+ private ArrayList<Policy> policies;
+ private ArrayList<Group> groups;
+ private ConcurrentHashMap<String, Object> nestedToscaTplsWithTopology;
+ private ArrayList<TopologyTemplate> nestedToscaTemplatesWithTopology;
+ private ToscaGraph graph;
+ private String csarTempDir;
+ private int nestingLoopCounter;
+ private LinkedHashMap<String, LinkedHashMap<String, Object>> metaProperties;
+ private Set<String> processedImports;
+ private LinkedHashMap<String, Object> customDefsFinal = new LinkedHashMap<>();
+ private HashSet<DataType> dataTypes;
+
+ public ToscaTemplate(String _path,
+ LinkedHashMap<String, Object> _parsedParams,
+ boolean aFile,
+ LinkedHashMap<String, Object> yamlDictTpl) throws JToscaException {
+ init(_path, _parsedParams, aFile, yamlDictTpl, true);
+ }
+
+ public ToscaTemplate(String _path,
+ LinkedHashMap<String, Object> _parsedParams,
+ boolean aFile,
+ LinkedHashMap<String, Object> yamlDictTpl, boolean resolveGetInput) throws JToscaException {
+ init(_path, _parsedParams, aFile, yamlDictTpl, resolveGetInput);
+ }
+
+ @SuppressWarnings("unchecked")
+ private void init(String _path,
+ LinkedHashMap<String, Object> _parsedParams,
+ boolean aFile,
+ LinkedHashMap<String, Object> yamlDictTpl, boolean _resolveGetInput) throws JToscaException {
+
+ ThreadLocalsHolder.setCollector(new ValidationIssueCollector());
+
+ VALID_TEMPLATE_VERSIONS = new ArrayList<>();
+ VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_0");
+ VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_1");
+ VALID_TEMPLATE_VERSIONS.addAll(exttools.getVersions());
+ ADDITIONAL_SECTIONS = new LinkedHashMap<>();
+ SPECIAL_SECTIONS = new ArrayList<>();
+ SPECIAL_SECTIONS.add(METADATA);
+ ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_0", SPECIAL_SECTIONS);
+ ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_1", SPECIAL_SECTIONS);
+ ADDITIONAL_SECTIONS.putAll(exttools.getSections());
+
+ //long startTime = System.nanoTime();
+
+
+ isFile = aFile;
+ inputPath = null;
+ path = null;
+ tpl = null;
+ csarTempDir = null;
+ nestedToscaTplsWithTopology = new ConcurrentHashMap<>();
+ nestedToscaTemplatesWithTopology = new ArrayList<TopologyTemplate>();
+ resolveGetInput = _resolveGetInput;
+ metaProperties = new LinkedHashMap<>();
+
+ if (_path != null && !_path.isEmpty()) {
+ // save the original input path
+ inputPath = _path;
+ // get the actual path (will change with CSAR)
+ path = _getPath(_path);
+ // load the YAML template
+ if (path != null && !path.isEmpty()) {
+ try (InputStream input = new FileInputStream(new File(path));) {
+ //System.out.println("Loading YAML file " + path);
+ log.debug("ToscaTemplate Loading YAMEL file {}", path);
+ Yaml yaml = new Yaml();
+ Object data = yaml.load(input);
+ this.tpl = (LinkedHashMap<String, Object>) data;
+ } catch (FileNotFoundException e) {
+ log.error("ToscaTemplate - Exception loading yaml: {}", e.getMessage());
+ log.error("Exception", e);
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275",
+ "ToscaTemplate - Exception loading yaml: -> " + e.getMessage()));
+ return;
+ } catch (Exception e) {
+ log.error("ToscaTemplate - Error loading yaml, aborting -> ", e.getMessage());
+ log.error("Exception", e);
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE275",
+ "ToscaTemplate - Error loading yaml, aborting -> " + e.getMessage()));
+ return;
+ }
+
+ if (yamlDictTpl != null) {
+ //msg = (_('Both path and yaml_dict_tpl arguments were '
+ // 'provided. Using path and ignoring yaml_dict_tpl.'))
+ //log.info(msg)
+ log.debug("ToscaTemplate - Both path and yaml_dict_tpl arguments were provided. Using path and ignoring yaml_dict_tpl");
+ }
+ } else {
+ // no input to process...
+ _abort();
+ }
+ } else {
+ if (yamlDictTpl != null) {
+ tpl = yamlDictTpl;
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE244",
+ "ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse"));
+ log.debug("ToscaTemplate ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse");
+
+ }
+ }
+
+ if (tpl != null) {
+ parsedParams = _parsedParams;
+ _validateField();
+ this.rootPath = path;
+ this.processedImports = new HashSet<String>();
+ this.imports = _tplImports();
+ this.version = _tplVersion();
+ this.metaData = _tplMetaData();
+ this.relationshipTypes = _tplRelationshipTypes();
+ this.description = _tplDescription();
+ this.dataTypes = getTopologyDataTypes();
+ this.topologyTemplate = _topologyTemplate();
+ this.repositories = _tplRepositories();
+ if (topologyTemplate.getTpl() != null) {
+ this.inputs = _inputs();
+ this.relationshipTemplates = _relationshipTemplates();
+ this.nodeTemplates = _nodeTemplates();
+ this.outputs = _outputs();
+ this.policies = _policies();
+ this.groups = _groups();
+// _handleNestedToscaTemplatesWithTopology();
+ _handleNestedToscaTemplatesWithTopology(topologyTemplate);
+ graph = new ToscaGraph(nodeTemplates);
+ }
+ }
+
+ if (csarTempDir != null) {
+ CSAR.deleteDir(new File(csarTempDir));
+ csarTempDir = null;
+ }
+
+ verifyTemplate();
+
+ }
+
+ private void _abort() throws JToscaException {
+ // print out all exceptions caught
+ verifyTemplate();
+ throw new JToscaException("jtosca aborting", JToscaErrorCodes.PATH_NOT_VALID.getValue());
+ }
+
+ private TopologyTemplate _topologyTemplate() {
+ return new TopologyTemplate(
+ _tplTopologyTemplate(),
+ _getAllCustomDefs(imports),
+ relationshipTypes,
+ parsedParams,
+ null,
+ resolveGetInput);
+ }
+
+ private ArrayList<Input> _inputs() {
+ return topologyTemplate.getInputs();
+ }
+
+ private ArrayList<NodeTemplate> _nodeTemplates() {
+ return topologyTemplate.getNodeTemplates();
+ }
+
+ private ArrayList<RelationshipTemplate> _relationshipTemplates() {
+ return topologyTemplate.getRelationshipTemplates();
+ }
+
+ private ArrayList<Output> _outputs() {
+ return topologyTemplate.getOutputs();
+ }
+
+ private String _tplVersion() {
+ return (String) tpl.get(DEFINITION_VERSION);
+ }
+
+ @SuppressWarnings("unchecked")
+ private Metadata _tplMetaData() {
+ Object mdo = tpl.get(METADATA);
+ if (mdo instanceof LinkedHashMap) {
+ return new Metadata((Map<String, Object>) mdo);
+ } else {
+ return null;
+ }
+ }
+
+ private String _tplDescription() {
+ return (String) tpl.get(DESCRIPTION);
+ }
+
+ @SuppressWarnings("unchecked")
+ private ArrayList<Object> _tplImports() {
+ return (ArrayList<Object>) tpl.get(IMPORTS);
+ }
+
+ @SuppressWarnings("unchecked")
+ private ArrayList<Repository> _tplRepositories() {
+ LinkedHashMap<String, Object> repositories =
+ (LinkedHashMap<String, Object>) tpl.get(REPOSITORIES);
+ ArrayList<Repository> reposit = new ArrayList<>();
+ if (repositories != null) {
+ for (Map.Entry<String, Object> me : repositories.entrySet()) {
+ Repository reposits = new Repository(me.getKey(), me.getValue());
+ reposit.add(reposits);
+ }
+ }
+ return reposit;
+ }
+
+ private LinkedHashMap<String, Object> _tplRelationshipTypes() {
+ return (LinkedHashMap<String, Object>) _getCustomTypes(RELATIONSHIP_TYPES, null);
+ }
+
+ @SuppressWarnings("unchecked")
+ private LinkedHashMap<String, Object> _tplTopologyTemplate() {
+ return (LinkedHashMap<String, Object>) tpl.get(TOPOLOGY_TEMPLATE);
+ }
+
+ private ArrayList<Policy> _policies() {
+ return topologyTemplate.getPolicies();
+ }
+
+ private ArrayList<Group> _groups() {
+ return topologyTemplate.getGroups();
+ }
+
+ /**
+ * Read datatypes field
+ *
+ * @return return list of datatypes.
+ */
+ @SuppressWarnings("unchecked")
+ private HashSet<DataType> getTopologyDataTypes() {
+ LinkedHashMap<String, Object> value =
+ (LinkedHashMap<String, Object>) tpl.get(DATA_TYPES);
+ HashSet<DataType> datatypes = new HashSet<>();
+ if (value != null) {
+ customDefsFinal.putAll(value);
+ for (Map.Entry<String, Object> me : value.entrySet()) {
+ DataType datatype = new DataType(me.getKey(), value);
+ datatypes.add(datatype);
+ }
+ }
+
+
+ return datatypes;
+ }
+
+ /**
+ * This method is used to get consolidated custom definitions from all imports
+ * It is logically divided in two parts to handle imports; map and list formats.
+ * Before processing the imports; it sorts them to make sure the current directory imports are
+ * being processed first and then others. Once sorted; it processes each import one by one in
+ * recursive manner.
+ * To avoid cyclic dependency among imports; this method uses a set to keep track of all
+ * imports which are already processed and filters the imports which occurs more than once.
+ *
+ * @param alImports all imports which needs to be processed
+ * @return the linked hash map containing all import definitions
+ */
+
+ @SuppressWarnings("unchecked")
+ private LinkedHashMap<String, Object> _getAllCustomDefs(Object alImports) {
+
+
+ String types[] = {
+ IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES,
+ DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES
+ };
+
+ List<Map<String, Object>> imports = (List<Map<String, Object>>) alImports;
+ if (imports != null && !imports.isEmpty()) {
+ if (imports.get(0) instanceof LinkedHashMap) {
+ imports = sortImports(imports);
+
+ for (Map<String, Object> map : imports) {
+ List<Map<String, Object>> singleImportList = new ArrayList<>();
+ singleImportList.add(map);
+
+ Map<String, String> importNameDetails = getValidFileNameForImportReference(singleImportList);
+ singleImportList = filterImportsForRecursion(singleImportList, importNameDetails);
+
+ if (!singleImportList.get(0).isEmpty()) {
+ LinkedHashMap<String, Object> customDefs = _getCustomTypes(types, new ArrayList<>(singleImportList));
+ processedImports.add(importNameDetails.get("importFileName"));
+
+ if (customDefs != null) {
+ customDefsFinal.putAll(customDefs);
+
+ if (customDefs.get(IMPORTS) != null) {
+ resetPathForRecursiveImports(importNameDetails.get("importRelativeName"));
+ LinkedHashMap<String, Object> importDefs = _getAllCustomDefs(customDefs.get(IMPORTS));
+ customDefsFinal.putAll(importDefs);
+ }
+ }
+ }
+ }
+ } else {
+ LinkedHashMap<String, Object> customDefs = _getCustomTypes(types, new ArrayList<>(imports));
+ if (customDefs != null) {
+ customDefsFinal.putAll(customDefs);
+
+ if (customDefs.get(IMPORTS) != null) {
+ LinkedHashMap<String, Object> importDefs = _getAllCustomDefs(customDefs.get(IMPORTS));
+ customDefsFinal.putAll(importDefs);
+ }
+ }
+ }
+ }
+
+ // As imports are not custom_types, remove from the dict
+ customDefsFinal.remove(IMPORTS);
+
+ return customDefsFinal;
+ }
+
+ /**
+ * This method is used to sort the imports in order so that same directory
+ * imports will be processed first
+ *
+ * @param customImports the custom imports
+ * @return the sorted list of imports
+ */
+ private List<Map<String, Object>> sortImports(List<Map<String, Object>> customImports) {
+ List<Map<String, Object>> finalList1 = new ArrayList<>();
+ List<Map<String, Object>> finalList2 = new ArrayList<>();
+ Iterator<Map<String, Object>> itr = customImports.iterator();
+ while (itr.hasNext()) {
+ Map<String, Object> innerMap = itr.next();
+ if (innerMap.toString().contains("../")) {
+ finalList2.add(innerMap);
+ itr.remove();
+ } else if (innerMap.toString().contains("/")) {
+ finalList1.add(innerMap);
+ itr.remove();
+ }
+ }
+
+ customImports.addAll(finalList1);
+ customImports.addAll(finalList2);
+ return customImports;
+ }
+
+ /**
+ * This method is used to reset PATH variable after processing of current import file is done
+ * This is required because of relative path nature of imports present in files.
+ *
+ * @param currImportRelativeName the current import relative name
+ */
+ private void resetPathForRecursiveImports(String currImportRelativeName) {
+ path = getPath(path, currImportRelativeName);
+ }
+
+ /**
+ * This is a recursive method which starts from current import and then recursively finds a
+ * valid path relative to current import file name.
+ * By doing this it handles all nested hierarchy of imports defined in CSARs
+ *
+ * @param path the path
+ * @param importFileName the import file name
+ * @return the string containing updated path value
+ */
+ private String getPath(String path, String importFileName) {
+ String tempFullPath = (Paths.get(path).toAbsolutePath().getParent()
+ .toString() + File.separator + importFileName.replace("../", "")).replace('\\', '/');
+ String tempPartialPath = (Paths.get(path).toAbsolutePath().getParent().toString()).replace('\\', '/');
+ if (Files.exists(Paths.get(tempFullPath)))
+ return tempFullPath;
+ else
+ return getPath(tempPartialPath, importFileName);
+ }
+
+ /**
+ * This method is used to get full path name for the file which needs to be processed. It helps
+ * in situation where files are present in different directory and are references as relative
+ * paths.
+ *
+ * @param customImports the custom imports
+ * @return the map containing import file full and relative paths
+ */
+ private Map<String, String> getValidFileNameForImportReference(List<Map<String, Object>> customImports) {
+ String importFileName;
+ Map<String, String> retMap = new HashMap<>();
+ for (Map<String, Object> map1 : customImports) {
+ for (Map.Entry<String, Object> entry : map1.entrySet()) {
+ Map innerMostMap = (Map) entry.getValue();
+ Iterator<Map.Entry<String, String>> it = innerMostMap.entrySet().iterator();
+ while (it.hasNext()) {
+ Map.Entry<String, String> val = it.next();
+ if (val.getValue().contains("/")) {
+ importFileName = (Paths.get(rootPath).toAbsolutePath().getParent().toString() + File
+ .separator + val.getValue().replace("../", "")).replace('\\', '/');
+ } else {
+ importFileName = (Paths.get(path).toAbsolutePath().getParent().toString() + File
+ .separator + val.getValue().replace("../", "")).replace('\\', '/');
+ }
+ retMap.put("importFileName", importFileName);
+ retMap.put("importRelativeName", val.getValue());
+ }
+ }
+ }
+ return retMap;
+ }
+
+ /**
+ * This method is used to filter the imports which already gets processed in previous step.
+ * It handles the use case of cyclic dependency in imports which may cause Stack Overflow
+ * exception
+ *
+ * @param customImports the custom imports
+ * @param importNameDetails the import name details
+ * @return the list containing filtered imports
+ */
+ private List<Map<String, Object>> filterImportsForRecursion(List<Map<String, Object>>
+ customImports, Map<String,
+ String> importNameDetails) {
+ for (Map<String, Object> map1 : customImports) {
+ for (Map.Entry<String, Object> entry : map1.entrySet()) {
+ Map innerMostMap = (Map) entry.getValue();
+ Iterator<Map.Entry<String, String>> it = innerMostMap.entrySet().iterator();
+ while (it.hasNext()) {
+ it.next();
+ if (processedImports.contains(importNameDetails.get("importFileName"))) {
+ it.remove();
+ }
+ }
+ }
+ }
+
+ // Remove Empty elements
+ Iterator<Map<String, Object>> itr = customImports.iterator();
+ while (itr.hasNext()) {
+ Map innerMap = itr.next();
+ Predicate<Map> predicate = p -> p.values().isEmpty();
+ innerMap.values().removeIf(predicate);
+ }
+
+ return customImports;
+ }
+
+ @SuppressWarnings("unchecked")
+ private LinkedHashMap<String, Object> _getCustomTypes(Object typeDefinitions, ArrayList<Object> alImports) {
+
+ // Handle custom types defined in imported template files
+ // This method loads the custom type definitions referenced in "imports"
+ // section of the TOSCA YAML template.
+
+ LinkedHashMap<String, Object> customDefs = new LinkedHashMap<String, Object>();
+ ArrayList<String> typeDefs = new ArrayList<String>();
+ if (typeDefinitions instanceof String[]) {
+ for (String s : (String[]) typeDefinitions) {
+ typeDefs.add(s);
+ }
+ } else {
+ typeDefs.add((String) typeDefinitions);
+ }
+
+ if (alImports == null) {
+ alImports = _tplImports();
+ }
+
+ if (alImports != null) {
+ ImportsLoader customService = new ImportsLoader(alImports, path, typeDefs, tpl);
+ ArrayList<LinkedHashMap<String, Object>> nestedToscaTpls = customService.getNestedToscaTpls();
+ _updateNestedToscaTplsWithTopology(nestedToscaTpls);
+
+ customDefs = customService.getCustomDefs();
+ if (customDefs == null) {
+ return null;
+ }
+ }
+
+ //Handle custom types defined in current template file
+ for (String td : typeDefs) {
+ if (!td.equals(IMPORTS)) {
+ LinkedHashMap<String, Object> innerCustomTypes = (LinkedHashMap<String, Object>) tpl.get(td);
+ if (innerCustomTypes != null) {
+ customDefs.putAll(innerCustomTypes);
+ }
+ }
+ }
+ return customDefs;
+ }
+
+ private void _updateNestedToscaTplsWithTopology(ArrayList<LinkedHashMap<String, Object>> nestedToscaTpls) {
+ for (LinkedHashMap<String, Object> ntpl : nestedToscaTpls) {
+ // there is just one key:value pair in ntpl
+ for (Map.Entry<String, Object> me : ntpl.entrySet()) {
+ String fileName = me.getKey();
+ @SuppressWarnings("unchecked")
+ LinkedHashMap<String, Object> toscaTpl = (LinkedHashMap<String, Object>) me.getValue();
+ if (toscaTpl.get(TOPOLOGY_TEMPLATE) != null) {
+ if (nestedToscaTplsWithTopology.get(fileName) == null) {
+ nestedToscaTplsWithTopology.putAll(ntpl);
+ }
+ }
+ }
+ }
+ }
+
+ // multi level nesting - RECURSIVE
+ @SuppressWarnings("unchecked")
+ private void _handleNestedToscaTemplatesWithTopology(TopologyTemplate tt) {
+ if (++nestingLoopCounter > MAX_LEVELS) {
+ log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology - Nested Topologies Loop: too many levels, aborting");
+ return;
+ }
+ // Reset Processed Imports for nested templates
+ this.processedImports = new HashSet<>();
+ for (Map.Entry<String, Object> me : nestedToscaTplsWithTopology.entrySet()) {
+ LinkedHashMap<String, Object> toscaTpl =
+ (LinkedHashMap<String, Object>) me.getValue();
+ for (NodeTemplate nt : tt.getNodeTemplates()) {
+ if (_isSubMappedNode(nt, toscaTpl)) {
+ parsedParams = _getParamsForNestedTemplate(nt);
+ ArrayList<Object> alim = (ArrayList<Object>) toscaTpl.get(IMPORTS);
+ LinkedHashMap<String, Object> topologyTpl =
+ (LinkedHashMap<String, Object>) toscaTpl.get(TOPOLOGY_TEMPLATE);
+ TopologyTemplate topologyWithSubMapping =
+ new TopologyTemplate(topologyTpl,
+ _getAllCustomDefs(alim),
+ relationshipTypes,
+ parsedParams,
+ nt,
+ resolveGetInput);
+ nt.setOriginComponentTemplate(topologyWithSubMapping);
+ if (topologyWithSubMapping.getSubstitutionMappings() != null) {
+ // Record nested topology templates in top level template
+ //nestedToscaTemplatesWithTopology.add(topologyWithSubMapping);
+ // Set substitution mapping object for mapped node
+ nt.setSubMappingToscaTemplate(
+ topologyWithSubMapping.getSubstitutionMappings());
+ _handleNestedToscaTemplatesWithTopology(topologyWithSubMapping);
+ }
+ }
+ }
+ }
+ }
+
+// private void _handleNestedToscaTemplatesWithTopology() {
+// for(Map.Entry<String,Object> me: nestedToscaTplsWithTopology.entrySet()) {
+// String fname = me.getKey();
+// LinkedHashMap<String,Object> toscaTpl =
+// (LinkedHashMap<String,Object>)me.getValue();
+// for(NodeTemplate nt: nodeTemplates) {
+// if(_isSubMappedNode(nt,toscaTpl)) {
+// parsedParams = _getParamsForNestedTemplate(nt);
+// ArrayList<Object> alim = (ArrayList<Object>)toscaTpl.get(IMPORTS);
+// LinkedHashMap<String,Object> topologyTpl =
+// (LinkedHashMap<String,Object>)toscaTpl.get(TOPOLOGY_TEMPLATE);
+// TopologyTemplate topologyWithSubMapping =
+// new TopologyTemplate(topologyTpl,
+// //_getAllCustomDefs(null),
+// _getAllCustomDefs(alim),
+// relationshipTypes,
+// parsedParams,
+// nt);
+// if(topologyWithSubMapping.getSubstitutionMappings() != null) {
+// // Record nested topology templates in top level template
+// nestedToscaTemplatesWithTopology.add(topologyWithSubMapping);
+// // Set substitution mapping object for mapped node
+// nt.setSubMappingToscaTemplate(
+// topologyWithSubMapping.getSubstitutionMappings());
+// }
+// }
+// }
+// }
+// }
+
+ private void _validateField() {
+ String sVersion = _tplVersion();
+ if (sVersion == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE245", String.format(
+ "MissingRequiredField: Template is missing required field \"%s\"", DEFINITION_VERSION)));
+ } else {
+ _validateVersion(sVersion);
+ this.version = sVersion;
+ }
+
+ for (String sKey : tpl.keySet()) {
+ boolean bFound = false;
+ for (String sSection : SECTIONS) {
+ if (sKey.equals(sSection)) {
+ bFound = true;
+ break;
+ }
+ }
+ // check ADDITIONAL_SECTIONS
+ if (!bFound) {
+ if (ADDITIONAL_SECTIONS.get(version) != null &&
+ ADDITIONAL_SECTIONS.get(version).contains(sKey)) {
+ bFound = true;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE246", String.format(
+ "UnknownFieldError: Template contains unknown field \"%s\"",
+ sKey)));
+ }
+ }
+ }
+
+ private void _validateVersion(String sVersion) {
+ boolean bFound = false;
+ for (String vtv : VALID_TEMPLATE_VERSIONS) {
+ if (sVersion.equals(vtv)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE247", String.format(
+ "InvalidTemplateVersion: \"%s\" is invalid. Valid versions are %s",
+ sVersion, VALID_TEMPLATE_VERSIONS.toString())));
+ } else if ((!sVersion.equals("tosca_simple_yaml_1_0") && !sVersion.equals("tosca_simple_yaml_1_1"))) {
+ EntityType.updateDefinitions(sVersion);
+
+ }
+ }
+
+ private String _getPath(String _path) throws JToscaException {
+ if (_path.toLowerCase().endsWith(".yaml") || _path.toLowerCase().endsWith(".yml")) {
+ return _path;
+ } else if (_path.toLowerCase().endsWith(".zip") || _path.toLowerCase().endsWith(".csar")) {
+ // a CSAR archive
+ CSAR csar = new CSAR(_path, isFile);
+ if (csar.validate()) {
+ try {
+ csar.decompress();
+ metaProperties = csar.getMetaProperties();
+ } catch (IOException e) {
+ log.error("ToscaTemplate - _getPath - IOException trying to decompress {}", _path);
+ return null;
+ }
+ isFile = true; // the file has been decompressed locally
+ csar.cleanup();
+ csarTempDir = csar.getTempDir();
+ return csar.getTempDir() + File.separator + csar.getMainTemplate();
+ }
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE248", "ValueError: " + _path + " is not a valid file"));
+ return null;
+ }
+ return null;
+ }
+
+ private void verifyTemplate() throws JToscaException {
+ //Criticals
+ int validationIssuesCaught = ThreadLocalsHolder.getCollector().validationIssuesCaught();
+ if (validationIssuesCaught > 0) {
+ List<String> validationIssueStrings = ThreadLocalsHolder.getCollector().getValidationIssueReport();
+ log.trace("####################################################################################################");
+ log.trace("ToscaTemplate - verifyTemplate - {} Parsing Critical{} occurred...", validationIssuesCaught, (validationIssuesCaught > 1 ? "s" : ""));
+ for (String s : validationIssueStrings) {
+ log.trace("{}. CSAR name - {}", s, inputPath);
+ }
+ log.trace("####################################################################################################");
+ }
+
+ }
+
+ public String getPath() {
+ return path;
+ }
+
+ public String getVersion() {
+ return version;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public TopologyTemplate getTopologyTemplate() {
+ return topologyTemplate;
+ }
+
+ public Metadata getMetaData() {
+ return metaData;
+ }
+
+ public ArrayList<Input> getInputs() {
+ if (inputs != null) {
+ inputs.stream().forEach(Input::resetAnnotaions);
+ }
+ return inputs;
+ }
+
+ public ArrayList<Output> getOutputs() {
+ return outputs;
+ }
+
+ public ArrayList<Policy> getPolicies() {
+ return policies;
+ }
+
+ public ArrayList<Group> getGroups() {
+ return groups;
+ }
+
+ public ArrayList<NodeTemplate> getNodeTemplates() {
+ return nodeTemplates;
+ }
+
+ public LinkedHashMap<String, Object> getMetaProperties(String propertiesFile) {
+ return metaProperties.get(propertiesFile);
+ }
+
+// private boolean _isSubMappedNode(NodeTemplate nt,LinkedHashMap<String,Object> toscaTpl) {
+// // Return True if the nodetemple is substituted
+// if(nt != null && nt.getSubMappingToscaTemplate() == null &&
+// getSubMappingNodeType(toscaTpl).equals(nt.getType()) &&
+// nt.getInterfaces().size() < 1) {
+// return true;
+// }
+// return false;
+// }
+
+ private boolean _isSubMappedNode(NodeTemplate nt, LinkedHashMap<String, Object> toscaTpl) {
+ // Return True if the nodetemple is substituted
+ if (nt != null && nt.getSubMappingToscaTemplate() == null &&
+ getSubMappingNodeType(toscaTpl).equals(nt.getType()) &&
+ nt.getInterfaces().size() < 1) {
+ return true;
+ }
+ return false;
+ }
+
+ private LinkedHashMap<String, Object> _getParamsForNestedTemplate(NodeTemplate nt) {
+ // Return total params for nested_template
+ LinkedHashMap<String, Object> pparams;
+ if (parsedParams != null) {
+ pparams = parsedParams;
+ } else {
+ pparams = new LinkedHashMap<String, Object>();
+ }
+ if (nt != null) {
+ for (String pname : nt.getProperties().keySet()) {
+ pparams.put(pname, nt.getPropertyValue(pname));
+ }
+ }
+ return pparams;
+ }
+
+ @SuppressWarnings("unchecked")
+ private String getSubMappingNodeType(LinkedHashMap<String, Object> toscaTpl) {
+ // Return substitution mappings node type
+ if (toscaTpl != null) {
+ return TopologyTemplate.getSubMappingNodeType(
+ (LinkedHashMap<String, Object>) toscaTpl.get(TOPOLOGY_TEMPLATE));
+ }
+ return null;
+ }
+
+ public boolean hasNestedTemplates() {
+ // Return True if the tosca template has nested templates
+ return nestedToscaTemplatesWithTopology != null &&
+ nestedToscaTemplatesWithTopology.size() >= 1;
+
+ }
+
+ public ArrayList<TopologyTemplate> getNestedTemplates() {
+ return nestedToscaTemplatesWithTopology;
+ }
+
+ public ConcurrentHashMap<String, Object> getNestedTopologyTemplates() {
+ return nestedToscaTplsWithTopology;
+ }
+
+ /**
+ * Get datatypes.
+ *
+ * @return return list of datatypes.
+ */
+ public HashSet<DataType> getDataTypes() {
+ return dataTypes;
+ }
+
+ @Override
+ public String toString() {
+ return "ToscaTemplate{" +
+ "exttools=" + exttools +
+ ", VALID_TEMPLATE_VERSIONS=" + VALID_TEMPLATE_VERSIONS +
+ ", ADDITIONAL_SECTIONS=" + ADDITIONAL_SECTIONS +
+ ", isFile=" + isFile +
+ ", path='" + path + '\'' +
+ ", inputPath='" + inputPath + '\'' +
+ ", parsedParams=" + parsedParams +
+ ", tpl=" + tpl +
+ ", version='" + version + '\'' +
+ ", imports=" + imports +
+ ", relationshipTypes=" + relationshipTypes +
+ ", metaData=" + metaData +
+ ", description='" + description + '\'' +
+ ", topologyTemplate=" + topologyTemplate +
+ ", repositories=" + repositories +
+ ", inputs=" + inputs +
+ ", relationshipTemplates=" + relationshipTemplates +
+ ", nodeTemplates=" + nodeTemplates +
+ ", outputs=" + outputs +
+ ", policies=" + policies +
+ ", nestedToscaTplsWithTopology=" + nestedToscaTplsWithTopology +
+ ", nestedToscaTemplatesWithTopology=" + nestedToscaTemplatesWithTopology +
+ ", graph=" + graph +
+ ", csarTempDir='" + csarTempDir + '\'' +
+ ", nestingLoopCounter=" + nestingLoopCounter +
+ ", dataTypes=" + dataTypes +
+ '}';
+ }
+
+ public List<Input> getInputs(boolean annotationsRequired) {
+ if (inputs != null && annotationsRequired) {
+ inputs.stream().forEach(Input::parseAnnotations);
+ return inputs;
+ }
+ return getInputs();
+ }
+}
+
+/*python
+
+import logging
+import os
+
+from copy import deepcopy
+from toscaparser.common.exception import ValidationIssueCollector.collector
+from toscaparser.common.exception import InvalidTemplateVersion
+from toscaparser.common.exception import MissingRequiredFieldError
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.common.exception import ValidationError
+from toscaparser.elements.entity_type import update_definitions
+from toscaparser.extensions.exttools import ExtTools
+import org.openecomp.sdc.toscaparser.api.imports
+from toscaparser.prereq.csar import CSAR
+from toscaparser.repositories import Repository
+from toscaparser.topology_template import TopologyTemplate
+from toscaparser.tpl_relationship_graph import ToscaGraph
+from toscaparser.utils.gettextutils import _
+import org.openecomp.sdc.toscaparser.api.utils.yamlparser
+
+
+# TOSCA template key names
+SECTIONS = (DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME,
+ TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION,
+ DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES,
+ RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES,
+ CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES, INTERFACE_TYPES,
+ POLICY_TYPES, GROUP_TYPES, REPOSITORIES) = \
+ ('tosca_definitions_version', 'tosca_default_namespace',
+ 'template_name', 'topology_template', 'template_author',
+ 'template_version', 'description', 'imports', 'dsl_definitions',
+ 'node_types', 'relationship_types', 'relationship_templates',
+ 'capability_types', 'artifact_types', 'data_types',
+ 'interface_types', 'policy_types', 'group_types', 'repositories')
+# Sections that are specific to individual template definitions
+SPECIAL_SECTIONS = (METADATA) = ('metadata')
+
+log = logging.getLogger("tosca.model")
+
+YAML_LOADER = toscaparser.utils.yamlparser.load_yaml
+
+
+class ToscaTemplate(object):
+ exttools = ExtTools()
+
+ VALID_TEMPLATE_VERSIONS = ['tosca_simple_yaml_1_0']
+
+ VALID_TEMPLATE_VERSIONS.extend(exttools.get_versions())
+
+ ADDITIONAL_SECTIONS = {'tosca_simple_yaml_1_0': SPECIAL_SECTIONS}
+
+ ADDITIONAL_SECTIONS.update(exttools.get_sections())
+
+ '''Load the template data.'''
+ def __init__(self, path=None, parsed_params=None, a_file=True,
+ yaml_dict_tpl=None):
+
+ ValidationIssueCollector.collector.start()
+ self.a_file = a_file
+ self.input_path = None
+ self.path = None
+ self.tpl = None
+ self.nested_tosca_tpls_with_topology = {}
+ self.nested_tosca_templates_with_topology = []
+ if path:
+ self.input_path = path
+ self.path = self._get_path(path)
+ if self.path:
+ self.tpl = YAML_LOADER(self.path, self.a_file)
+ if yaml_dict_tpl:
+ msg = (_('Both path and yaml_dict_tpl arguments were '
+ 'provided. Using path and ignoring yaml_dict_tpl.'))
+ log.info(msg)
+ print(msg)
+ else:
+ if yaml_dict_tpl:
+ self.tpl = yaml_dict_tpl
+ else:
+ ValidationIssueCollector.collector.appendException(
+ ValueError(_('No path or yaml_dict_tpl was provided. '
+ 'There is nothing to parse.')))
+
+ if self.tpl:
+ self.parsed_params = parsed_params
+ self._validate_field()
+ self.version = self._tpl_version()
+ self.relationship_types = self._tpl_relationship_types()
+ self.description = self._tpl_description()
+ self.topology_template = self._topology_template()
+ self.repositories = self._tpl_repositories()
+ if self.topology_template.tpl:
+ self.inputs = self._inputs()
+ self.relationship_templates = self._relationship_templates()
+ self.nodetemplates = self._nodetemplates()
+ self.outputs = self._outputs()
+ self._handle_nested_tosca_templates_with_topology()
+ self.graph = ToscaGraph(self.nodetemplates)
+
+ ValidationIssueCollector.collector.stop()
+ self.verify_template()
+
+ def _topology_template(self):
+ return TopologyTemplate(self._tpl_topology_template(),
+ self._get_all_custom_defs(),
+ self.relationship_types,
+ self.parsed_params,
+ None)
+
+ def _inputs(self):
+ return self.topology_template.inputs
+
+ def _nodetemplates(self):
+ return self.topology_template.nodetemplates
+
+ def _relationship_templates(self):
+ return self.topology_template.relationship_templates
+
+ def _outputs(self):
+ return self.topology_template.outputs
+
+ def _tpl_version(self):
+ return self.tpl.get(DEFINITION_VERSION)
+
+ def _tpl_description(self):
+ desc = self.tpl.get(DESCRIPTION)
+ if desc:
+ return desc.rstrip()
+
+ def _tpl_imports(self):
+ return self.tpl.get(IMPORTS)
+
+ def _tpl_repositories(self):
+ repositories = self.tpl.get(REPOSITORIES)
+ reposit = []
+ if repositories:
+ for name, val in repositories.items():
+ reposits = Repository(name, val)
+ reposit.append(reposits)
+ return reposit
+
+ def _tpl_relationship_types(self):
+ return self._get_custom_types(RELATIONSHIP_TYPES)
+
+ def _tpl_relationship_templates(self):
+ topology_template = self._tpl_topology_template()
+ return topology_template.get(RELATIONSHIP_TEMPLATES)
+
+ def _tpl_topology_template(self):
+ return self.tpl.get(TOPOLOGY_TEMPLATE)
+
+ def _get_all_custom_defs(self, imports=None):
+ types = [IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES,
+ DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES]
+ custom_defs_final = {}
+ custom_defs = self._get_custom_types(types, imports)
+ if custom_defs:
+ custom_defs_final.update(custom_defs)
+ if custom_defs.get(IMPORTS):
+ import_defs = self._get_all_custom_defs(
+ custom_defs.get(IMPORTS))
+ custom_defs_final.update(import_defs)
+
+ # As imports are not custom_types, removing from the dict
+ custom_defs_final.pop(IMPORTS, None)
+ return custom_defs_final
+
+ def _get_custom_types(self, type_definitions, imports=None):
+ """Handle custom types defined in imported template files
+
+ This method loads the custom type definitions referenced in "imports"
+ section of the TOSCA YAML template.
+ """
+ custom_defs = {}
+ type_defs = []
+ if not isinstance(type_definitions, list):
+ type_defs.append(type_definitions)
+ else:
+ type_defs = type_definitions
+
+ if not imports:
+ imports = self._tpl_imports()
+
+ if imports:
+ custom_service = toscaparser.imports.\
+ ImportsLoader(imports, self.path,
+ type_defs, self.tpl)
+
+ nested_tosca_tpls = custom_service.get_nested_tosca_tpls()
+ self._update_nested_tosca_tpls_with_topology(nested_tosca_tpls)
+
+ custom_defs = custom_service.get_custom_defs()
+ if not custom_defs:
+ return
+
+ # Handle custom types defined in current template file
+ for type_def in type_defs:
+ if type_def != IMPORTS:
+ inner_custom_types = self.tpl.get(type_def) or {}
+ if inner_custom_types:
+ custom_defs.update(inner_custom_types)
+ return custom_defs
+
+ def _update_nested_tosca_tpls_with_topology(self, nested_tosca_tpls):
+ for tpl in nested_tosca_tpls:
+ filename, tosca_tpl = list(tpl.items())[0]
+ if (tosca_tpl.get(TOPOLOGY_TEMPLATE) and
+ filename not in list(
+ self.nested_tosca_tpls_with_topology.keys())):
+ self.nested_tosca_tpls_with_topology.update(tpl)
+
+ def _handle_nested_tosca_templates_with_topology(self):
+ for fname, tosca_tpl in self.nested_tosca_tpls_with_topology.items():
+ for nodetemplate in self.nodetemplates:
+ if self._is_sub_mapped_node(nodetemplate, tosca_tpl):
+ parsed_params = self._get_params_for_nested_template(
+ nodetemplate)
+ topology_tpl = tosca_tpl.get(TOPOLOGY_TEMPLATE)
+ topology_with_sub_mapping = TopologyTemplate(
+ topology_tpl,
+ self._get_all_custom_defs(),
+ self.relationship_types,
+ parsed_params,
+ nodetemplate)
+ if topology_with_sub_mapping.substitution_mappings:
+ # Record nested topo templates in top level template
+ self.nested_tosca_templates_with_topology.\
+ append(topology_with_sub_mapping)
+ # Set substitution mapping object for mapped node
+ nodetemplate.sub_mapping_tosca_template = \
+ topology_with_sub_mapping.substitution_mappings
+
+ def _validate_field(self):
+ version = self._tpl_version()
+ if not version:
+ ValidationIssueCollector.collector.appendException(
+ MissingRequiredFieldError(what='Template',
+ required=DEFINITION_VERSION))
+ else:
+ self._validate_version(version)
+ self.version = version
+
+ for name in self.tpl:
+ if (name not in SECTIONS and
+ name not in self.ADDITIONAL_SECTIONS.get(version, ())):
+ ValidationIssueCollector.collector.appendException(
+ UnknownFieldError(what='Template', field=name))
+
+ def _validate_version(self, version):
+ if version not in self.VALID_TEMPLATE_VERSIONS:
+ ValidationIssueCollector.collector.appendException(
+ InvalidTemplateVersion(
+ what=version,
+ valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS)))
+ else:
+ if version != 'tosca_simple_yaml_1_0':
+ update_definitions(version)
+
+ def _get_path(self, path):
+ if path.lower().endswith(('.yaml','.yml')):
+ return path
+ elif path.lower().endswith(('.zip', '.csar')):
+ # a CSAR archive
+ csar = CSAR(path, self.a_file)
+ if csar.validate():
+ csar.decompress()
+ self.a_file = True # the file has been decompressed locally
+ return os.path.join(csar.temp_dir, csar.get_main_template())
+ else:
+ ValidationIssueCollector.collector.appendException(
+ ValueError(_('"%(path)s" is not a valid file.')
+ % {'path': path}))
+
+ def verify_template(self):
+ if ValidationIssueCollector.collector.exceptionsCaught():
+ if self.input_path:
+ raise ValidationError(
+ message=(_('\nThe input "%(path)s" failed validation with '
+ 'the following error(s): \n\n\t')
+ % {'path': self.input_path}) +
+ '\n\t'.join(ValidationIssueCollector.collector.getExceptionsReport()))
+ else:
+ raise ValidationError(
+ message=_('\nThe pre-parsed input failed validation with '
+ 'the following error(s): \n\n\t') +
+ '\n\t'.join(ValidationIssueCollector.collector.getExceptionsReport()))
+ else:
+ if self.input_path:
+ msg = (_('The input "%(path)s" successfully passed '
+ 'validation.') % {'path': self.input_path})
+ else:
+ msg = _('The pre-parsed input successfully passed validation.')
+
+ log.info(msg)
+
+ def _is_sub_mapped_node(self, nodetemplate, tosca_tpl):
+ """Return True if the nodetemple is substituted."""
+ if (nodetemplate and not nodetemplate.sub_mapping_tosca_template and
+ self.get_sub_mapping_node_type(tosca_tpl) == nodetemplate.type
+ and len(nodetemplate.interfaces) < 1):
+ return True
+ else:
+ return False
+
+ def _get_params_for_nested_template(self, nodetemplate):
+ """Return total params for nested_template."""
+ parsed_params = deepcopy(self.parsed_params) \
+ if self.parsed_params else {}
+ if nodetemplate:
+ for pname in nodetemplate.get_properties():
+ parsed_params.update({pname:
+ nodetemplate.get_property_value(pname)})
+ return parsed_params
+
+ def get_sub_mapping_node_type(self, tosca_tpl):
+ """Return substitution mappings node type."""
+ if tosca_tpl:
+ return TopologyTemplate.get_sub_mapping_node_type(
+ tosca_tpl.get(TOPOLOGY_TEMPLATE))
+
+ def _has_substitution_mappings(self):
+ """Return True if the template has valid substitution mappings."""
+ return self.topology_template is not None and \
+ self.topology_template.substitution_mappings is not None
+
+ def has_nested_templates(self):
+ """Return True if the tosca template has nested templates."""
+ return self.nested_tosca_templates_with_topology is not None and \
+ len(self.nested_tosca_templates_with_topology) >= 1
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java
new file mode 100644
index 0000000..c78978f
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/Triggers.java
@@ -0,0 +1,201 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+import org.onap.sdc.toscaparser.api.utils.ValidateUtils;
+
+import java.util.LinkedHashMap;
+
+public class Triggers extends EntityTemplate {
+
+ private static final String DESCRIPTION = "description";
+ private static final String EVENT = "event_type";
+ private static final String SCHEDULE = "schedule";
+ private static final String TARGET_FILTER = "target_filter";
+ private static final String CONDITION = "condition";
+ private static final String ACTION = "action";
+
+ private static final String[] SECTIONS = {
+ DESCRIPTION, EVENT, SCHEDULE, TARGET_FILTER, CONDITION, ACTION
+ };
+
+ private static final String METER_NAME = "meter_name";
+ private static final String CONSTRAINT = "constraint";
+ private static final String PERIOD = "period";
+ private static final String EVALUATIONS = "evaluations";
+ private static final String METHOD = "method";
+ private static final String THRESHOLD = "threshold";
+ private static final String COMPARISON_OPERATOR = "comparison_operator";
+
+ private static final String[] CONDITION_KEYNAMES = {
+ METER_NAME, CONSTRAINT, PERIOD, EVALUATIONS, METHOD, THRESHOLD, COMPARISON_OPERATOR
+ };
+
+ private String name;
+ private LinkedHashMap<String, Object> triggerTpl;
+
+ public Triggers(String name, LinkedHashMap<String, Object> triggerTpl) {
+ super(); // dummy. don't want super
+ this.name = name;
+ this.triggerTpl = triggerTpl;
+ validateKeys();
+ validateCondition();
+ validateInput();
+ }
+
+ public String getDescription() {
+ return (String) triggerTpl.get("description");
+ }
+
+ public String getEvent() {
+ return (String) triggerTpl.get("event_type");
+ }
+
+ public LinkedHashMap<String, Object> getSchedule() {
+ return (LinkedHashMap<String, Object>) triggerTpl.get("schedule");
+ }
+
+ public LinkedHashMap<String, Object> getTargetFilter() {
+ return (LinkedHashMap<String, Object>) triggerTpl.get("target_filter");
+ }
+
+ public LinkedHashMap<String, Object> getCondition() {
+ return (LinkedHashMap<String, Object>) triggerTpl.get("condition");
+ }
+
+ public LinkedHashMap<String, Object> getAction() {
+ return (LinkedHashMap<String, Object>) triggerTpl.get("action");
+ }
+
+ private void validateKeys() {
+ for (String key : triggerTpl.keySet()) {
+ boolean bFound = false;
+ for (int i = 0; i < SECTIONS.length; i++) {
+ if (key.equals(SECTIONS[i])) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE249", String.format(
+ "UnknownFieldError: Triggers \"%s\" contains unknown field \"%s\"",
+ name, key)));
+ }
+ }
+ }
+
+ private void validateCondition() {
+ for (String key : getCondition().keySet()) {
+ boolean bFound = false;
+ for (int i = 0; i < CONDITION_KEYNAMES.length; i++) {
+ if (key.equals(CONDITION_KEYNAMES[i])) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE250", String.format(
+ "UnknownFieldError: Triggers \"%s\" contains unknown field \"%s\"",
+ name, key)));
+ }
+ }
+ }
+
+ private void validateInput() {
+ for (String key : getCondition().keySet()) {
+ Object value = getCondition().get(key);
+ if (key.equals(PERIOD) || key.equals(EVALUATIONS)) {
+ ValidateUtils.validateInteger(value);
+ } else if (key.equals(THRESHOLD)) {
+ ValidateUtils.validateNumeric(value);
+ } else if (key.equals(METER_NAME) || key.equals(METHOD)) {
+ ValidateUtils.validateString(value);
+ }
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "Triggers{"
+ + "name='" + name + '\''
+ + ", triggerTpl=" + triggerTpl
+ + '}';
+ }
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.entity_template import EntityTemplate
+
+SECTIONS = (DESCRIPTION, EVENT, SCHEDULE, TARGET_FILTER, CONDITION, ACTION) = \
+ ('description', 'event_type', 'schedule',
+ 'target_filter', 'condition', 'action')
+CONDITION_KEYNAMES = (CONTRAINT, PERIOD, EVALUATIONS, METHOD) = \
+ ('constraint', 'period', 'evaluations', 'method')
+log = logging.getLogger('tosca')
+
+
+class Triggers(EntityTemplate):
+
+ '''Triggers defined in policies of topology template'''
+
+ def __init__(self, name, trigger_tpl):
+ self.name = name
+ self.trigger_tpl = trigger_tpl
+ self._validate_keys()
+ self._validate_condition()
+
+ def get_description(self):
+ return self.trigger_tpl['description']
+
+ def get_event(self):
+ return self.trigger_tpl['event_type']
+
+ def get_schedule(self):
+ return self.trigger_tpl['schedule']
+
+ def get_target_filter(self):
+ return self.trigger_tpl['target_filter']
+
+ def get_condition(self):
+ return self.trigger_tpl['condition']
+
+ def get_action(self):
+ return self.trigger_tpl['action']
+
+ def _validate_keys(self):
+ for key in self.trigger_tpl.keys():
+ if key not in SECTIONS:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(what='Triggers "%s"' % self.name,
+ field=key))
+
+ def _validate_condition(self):
+ for key in self.get_condition():
+ if key not in CONDITION_KEYNAMES:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(what='Triggers "%s"' % self.name,
+ field=key))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java
new file mode 100644
index 0000000..f2bb650
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/UnsupportedType.java
@@ -0,0 +1,101 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+public class UnsupportedType {
+
+ // Note: TOSCA spec version related
+
+ /*
+ The tosca.nodes.Storage.ObjectStorage and tosca.nodes.Storage.BlockStorage
+ used here as un_supported_types are part of the name changes in TOSCA spec
+ version 1.1. The original name as specified in version 1.0 are,
+ tosca.nodes.BlockStorage and tosca.nodes.ObjectStorage which are supported
+ by the tosca-parser. Since there are little overlapping in version support
+ currently in the tosca-parser, the names tosca.nodes.Storage.ObjectStorage
+ and tosca.nodes.Storage.BlockStorage are used here to demonstrate the usage
+ of un_supported_types. As tosca-parser move to provide support for version
+ 1.1 and higher, they will be removed.
+ */
+
+ private UnsupportedType() {
+ }
+
+ private static final String[] UNSUPPORTED_TYPES = {
+ "tosca.test.invalidtype",
+ "tosca.nodes.Storage.ObjectStorage",
+ "tosca.nodes.Storage.BlockStorage"};
+
+ public static boolean validateType(String entityType) {
+ for (String ust : UNSUPPORTED_TYPES) {
+ if (ust.equals(entityType)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE251", String.format(
+ "UnsupportedTypeError: Entity type \"%s\" is not supported", entityType)));
+ return true;
+ }
+ }
+ return false;
+ }
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import UnsupportedTypeError
+from toscaparser.utils.gettextutils import _
+
+log = logging.getLogger('tosca')
+
+
+class UnsupportedType(object):
+
+ """Note: TOSCA spec version related
+
+ The tosca.nodes.Storage.ObjectStorage and tosca.nodes.Storage.BlockStorage
+ used here as un_supported_types are part of the name changes in TOSCA spec
+ version 1.1. The original name as specified in version 1.0 are,
+ tosca.nodes.BlockStorage and tosca.nodes.ObjectStorage which are supported
+ by the tosca-parser. Since there are little overlapping in version support
+ currently in the tosca-parser, the names tosca.nodes.Storage.ObjectStorage
+ and tosca.nodes.Storage.BlockStorage are used here to demonstrate the usage
+ of un_supported_types. As tosca-parser move to provide support for version
+ 1.1 and higher, they will be removed.
+ """
+ un_supported_types = ['tosca.test.invalidtype',
+ 'tosca.nodes.Storage.ObjectStorage',
+ 'tosca.nodes.Storage.BlockStorage']
+
+ def __init__(self):
+ pass
+
+ @staticmethod
+ def validate_type(entitytype):
+ if entitytype in UnsupportedType.un_supported_types:
+ ValidationIssueCollector.appendException(UnsupportedTypeError(
+ what=_('%s')
+ % entitytype))
+ return True
+ else:
+ return False
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java
new file mode 100644
index 0000000..56416c6
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaException.java
@@ -0,0 +1,47 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.common;
+
+public class JToscaException extends Exception {
+
+ private static final long serialVersionUID = 1L;
+ private String code;
+
+ public JToscaException(String message, String code) {
+ super(message);
+ this.code = code;
+ }
+
+ public String getCode() {
+ return code;
+ }
+
+ public void setCode(String code) {
+ this.code = code;
+ }
+
+ //JE1001 - Meta file missing
+ //JE1002 - Invalid yaml content
+ //JE1003 - Entry-Definition not defined in meta file
+ //JE1004 - Entry-Definition file missing
+ //JE1005 - General Error
+ //JE1006 - General Error/Path not valid
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java
new file mode 100644
index 0000000..cd5cbc5
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/JToscaValidationIssue.java
@@ -0,0 +1,75 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.common;
+
+import java.util.Objects;
+
+public class JToscaValidationIssue {
+
+ private String code;
+ private String message;
+
+
+ public JToscaValidationIssue(String code, String message) {
+ super();
+ this.code = code;
+ this.message = message;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public void setMessage(String message) {
+ this.message = message;
+ }
+
+ public String getCode() {
+ return code;
+ }
+
+ public void setCode(String code) {
+ this.code = code;
+ }
+
+ @Override
+ public String toString() {
+ return "JToscaError [code=" + code + ", message=" + message + "]";
+ }
+
+ @Override
+ public boolean equals(final Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ final JToscaValidationIssue that = (JToscaValidationIssue) o;
+ return Objects.equals(code, that.code) &&
+ Objects.equals(message, that.message);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(code, message);
+ }
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java
new file mode 100644
index 0000000..c109ffd
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/TOSCAException.java
@@ -0,0 +1,58 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.common;
+
+import java.util.IllegalFormatException;
+
+public class TOSCAException extends Exception {
+ private String message = "An unkown exception has occurred";
+ private static boolean FATAL_EXCEPTION_FORMAT_ERRORS = false;
+ private String msgFmt = null;
+
+ public TOSCAException(String... strings) {
+ try {
+ message = String.format(msgFmt, (Object[]) strings);
+ } catch (IllegalFormatException e) {
+ // TODO log
+
+ if (FATAL_EXCEPTION_FORMAT_ERRORS) {
+ throw e;
+ }
+
+ }
+
+ }
+
+ public String __str__() {
+ return message;
+ }
+
+ public static void generate_inv_schema_property_error(String name, String attr, String value, String valid_values) {
+ //TODO
+
+ }
+
+ public static void setFatalFormatException(boolean flag) {
+ FATAL_EXCEPTION_FORMAT_ERRORS = flag;
+ }
+
+}
+
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java
new file mode 100644
index 0000000..71c0401
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/common/ValidationIssueCollector.java
@@ -0,0 +1,57 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.common;
+
+import java.util.*;
+
+// Perfectly good enough...
+
+public class ValidationIssueCollector {
+
+ private Map<String, JToscaValidationIssue> validationIssues = new HashMap<String, JToscaValidationIssue>();
+
+ public void appendValidationIssue(JToscaValidationIssue issue) {
+
+ validationIssues.put(issue.getMessage(), issue);
+
+ }
+
+ public List<String> getValidationIssueReport() {
+ List<String> report = new ArrayList<>();
+ if (!validationIssues.isEmpty()) {
+ for (JToscaValidationIssue exception : validationIssues.values()) {
+ report.add("[" + exception.getCode() + "]: " + exception.getMessage());
+ }
+ }
+
+ return report;
+ }
+
+ public Map<String, JToscaValidationIssue> getValidationIssues() {
+ return validationIssues;
+ }
+
+
+ public int validationIssuesCaught() {
+ return validationIssues.size();
+ }
+
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java
new file mode 100644
index 0000000..9cf8c6c
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ArtifactTypeDef.java
@@ -0,0 +1,121 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import java.util.LinkedHashMap;
+
+public class ArtifactTypeDef extends StatefulEntityType {
+
+ private String type;
+ private LinkedHashMap<String, Object> customDef;
+ private LinkedHashMap<String, Object> properties;
+ private LinkedHashMap<String, Object> parentArtifacts;
+
+
+ public ArtifactTypeDef(String type, LinkedHashMap<String, Object> customDef) {
+ super(type, ARTIFACT_PREFIX, customDef);
+
+ this.type = type;
+ this.customDef = customDef;
+ properties = defs != null ? (LinkedHashMap<String, Object>) defs.get(PROPERTIES) : null;
+ parentArtifacts = getParentArtifacts();
+ }
+
+ private LinkedHashMap<String, Object> getParentArtifacts() {
+ LinkedHashMap<String, Object> artifacts = new LinkedHashMap<>();
+ String parentArtif = null;
+ if (getParentType() != null) {
+ parentArtif = getParentType().getType();
+ }
+ if (parentArtif != null && !parentArtif.isEmpty()) {
+ while (!parentArtif.equals("tosca.artifacts.Root")) {
+ Object ob = TOSCA_DEF.get(parentArtif);
+ artifacts.put(parentArtif, ob);
+ parentArtif =
+ (String) ((LinkedHashMap<String, Object>) ob).get("derived_from");
+ }
+ }
+ return artifacts;
+ }
+
+ public ArtifactTypeDef getParentType() {
+ // Return a artifact entity from which this entity is derived
+ if (defs == null) {
+ return null;
+ }
+ String partifactEntity = derivedFrom(defs);
+ if (partifactEntity != null) {
+ return new ArtifactTypeDef(partifactEntity, customDef);
+ }
+ return null;
+ }
+
+ public Object getArtifact(String name) {
+ // Return the definition of an artifact field by name
+ if (defs != null) {
+ return defs.get(name);
+ }
+ return null;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+}
+
+/*python
+class ArtifactTypeDef(StatefulEntityType):
+ '''TOSCA built-in artifacts type.'''
+
+ def __init__(self, atype, custom_def=None):
+ super(ArtifactTypeDef, self).__init__(atype, self.ARTIFACT_PREFIX,
+ custom_def)
+ self.type = atype
+ self.custom_def = custom_def
+ self.properties = None
+ if self.PROPERTIES in self.defs:
+ self.properties = self.defs[self.PROPERTIES]
+ self.parent_artifacts = self._get_parent_artifacts()
+
+ def _get_parent_artifacts(self):
+ artifacts = {}
+ parent_artif = self.parent_type.type if self.parent_type else None
+ if parent_artif:
+ while parent_artif != 'tosca.artifacts.Root':
+ artifacts[parent_artif] = self.TOSCA_DEF[parent_artif]
+ parent_artif = artifacts[parent_artif]['derived_from']
+ return artifacts
+
+ @property
+ def parent_type(self):
+ '''Return a artifact entity from which this entity is derived.'''
+ if not hasattr(self, 'defs'):
+ return None
+ partifact_entity = self.derived_from(self.defs)
+ if partifact_entity:
+ return ArtifactTypeDef(partifact_entity, self.custom_def)
+
+ def get_artifact(self, name):
+ '''Return the definition of an artifact field by name.'''
+ if name in self.defs:
+ return self.defs[name]
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java
new file mode 100644
index 0000000..e4a30f1
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/AttributeDef.java
@@ -0,0 +1,60 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import java.util.LinkedHashMap;
+
+public class AttributeDef {
+ // TOSCA built-in Attribute type
+
+ private String name;
+ private Object value;
+ private LinkedHashMap<String, Object> schema;
+
+ public AttributeDef(String adName, Object adValue, LinkedHashMap<String, Object> adSchema) {
+ name = adName;
+ value = adValue;
+ schema = adSchema;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public Object getValue() {
+ return value;
+ }
+
+ public LinkedHashMap<String, Object> getSchema() {
+ return schema;
+ }
+}
+
+/*python
+
+class AttributeDef(object):
+ '''TOSCA built-in Attribute type.'''
+
+ def __init__(self, name, value=None, schema=None):
+ self.name = name
+ self.value = value
+ self.schema = schema
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java
new file mode 100644
index 0000000..e3c24b3
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/CapabilityTypeDef.java
@@ -0,0 +1,240 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public class CapabilityTypeDef extends StatefulEntityType {
+ // TOSCA built-in capabilities type
+
+ private static final String TOSCA_TYPEURI_CAPABILITY_ROOT = "tosca.capabilities.Root";
+
+ private String name;
+ private String nodetype;
+ private LinkedHashMap<String, Object> customDef;
+ private LinkedHashMap<String, Object> properties;
+ private LinkedHashMap<String, Object> parentCapabilities;
+
+ @SuppressWarnings("unchecked")
+ public CapabilityTypeDef(String cname, String ctype, String ntype, LinkedHashMap<String, Object> ccustomDef) {
+ super(ctype, CAPABILITY_PREFIX, ccustomDef);
+
+ name = cname;
+ nodetype = ntype;
+ properties = null;
+ customDef = ccustomDef;
+ if (defs != null) {
+ properties = (LinkedHashMap<String, Object>) defs.get(PROPERTIES);
+ }
+ parentCapabilities = getParentCapabilities(customDef);
+ }
+
+ @SuppressWarnings("unchecked")
+ public ArrayList<PropertyDef> getPropertiesDefObjects() {
+ // Return a list of property definition objects
+ ArrayList<PropertyDef> propsdefs = new ArrayList<>();
+ LinkedHashMap<String, Object> parentProperties = new LinkedHashMap<>();
+ if (parentCapabilities != null) {
+ for (Map.Entry<String, Object> me : parentCapabilities.entrySet()) {
+ parentProperties.put(me.getKey(), ((LinkedHashMap<String, Object>) me.getValue()).get("properties"));
+ }
+ }
+ if (properties != null) {
+ for (Map.Entry<String, Object> me : properties.entrySet()) {
+ propsdefs.add(new PropertyDef(me.getKey(), null, (LinkedHashMap<String, Object>) me.getValue()));
+ }
+ }
+ if (parentProperties != null) {
+ for (Map.Entry<String, Object> me : parentProperties.entrySet()) {
+ LinkedHashMap<String, Object> props = (LinkedHashMap<String, Object>) me.getValue();
+ if (props != null) {
+ for (Map.Entry<String, Object> pe : props.entrySet()) {
+ String prop = pe.getKey();
+ LinkedHashMap<String, Object> schema = (LinkedHashMap<String, Object>) pe.getValue();
+ // add parent property if not overridden by children type
+ if (properties == null || properties.get(prop) == null) {
+ propsdefs.add(new PropertyDef(prop, null, schema));
+ }
+ }
+ }
+ }
+ }
+ return propsdefs;
+ }
+
+ public LinkedHashMap<String, PropertyDef> getPropertiesDef() {
+ LinkedHashMap<String, PropertyDef> pds = new LinkedHashMap<>();
+ for (PropertyDef pd : getPropertiesDefObjects()) {
+ pds.put(pd.getName(), pd);
+ }
+ return pds;
+ }
+
+ public PropertyDef getPropertyDefValue(String pdname) {
+ // Return the definition of a given property name
+ LinkedHashMap<String, PropertyDef> propsDef = getPropertiesDef();
+ if (propsDef != null && propsDef.get(pdname) != null) {
+ return (PropertyDef) propsDef.get(pdname).getPDValue();
+ }
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ private LinkedHashMap<String, Object> getParentCapabilities(LinkedHashMap<String, Object> customDef) {
+ LinkedHashMap<String, Object> capabilities = new LinkedHashMap<>();
+ CapabilityTypeDef parentCap = getParentType();
+ if (parentCap != null) {
+ String sParentCap = parentCap.getType();
+ while (!sParentCap.equals(TOSCA_TYPEURI_CAPABILITY_ROOT)) {
+ if (TOSCA_DEF.get(sParentCap) != null) {
+ capabilities.put(sParentCap, TOSCA_DEF.get(sParentCap));
+ } else if (customDef != null && customDef.get(sParentCap) != null) {
+ capabilities.put(sParentCap, customDef.get(sParentCap));
+ }
+ sParentCap = (String) ((LinkedHashMap<String, Object>) capabilities.get(sParentCap)).get("derived_from");
+ }
+ }
+ return capabilities;
+ }
+
+ public CapabilityTypeDef getParentType() {
+ // Return a capability this capability is derived from
+ if (defs == null) {
+ return null;
+ }
+ String pnode = derivedFrom(defs);
+ if (pnode != null && !pnode.isEmpty()) {
+ return new CapabilityTypeDef(name, pnode, nodetype, customDef);
+ }
+ return null;
+ }
+
+ public boolean inheritsFrom(ArrayList<String> typeNames) {
+ // Check this capability is in type_names
+
+ // Check if this capability or some of its parent types
+ // are in the list of types: type_names
+ if (typeNames.contains(getType())) {
+ return true;
+ } else if (getParentType() != null) {
+ return getParentType().inheritsFrom(typeNames);
+ }
+ return false;
+ }
+
+ // getters/setters
+
+ public LinkedHashMap<String, Object> getProperties() {
+ return properties;
+ }
+
+ public String getName() {
+ return name;
+ }
+}
+
+/*python
+from toscaparser.elements.property_definition import PropertyDef
+from toscaparser.elements.statefulentitytype import StatefulEntityType
+
+
+class CapabilityTypeDef(StatefulEntityType):
+ '''TOSCA built-in capabilities type.'''
+ TOSCA_TYPEURI_CAPABILITY_ROOT = 'tosca.capabilities.Root'
+
+ def __init__(self, name, ctype, ntype, custom_def=None):
+ self.name = name
+ super(CapabilityTypeDef, self).__init__(ctype, self.CAPABILITY_PREFIX,
+ custom_def)
+ self.nodetype = ntype
+ self.properties = None
+ self.custom_def = custom_def
+ if self.PROPERTIES in self.defs:
+ self.properties = self.defs[self.PROPERTIES]
+ self.parent_capabilities = self._get_parent_capabilities(custom_def)
+
+ def get_properties_def_objects(self):
+ '''Return a list of property definition objects.'''
+ properties = []
+ parent_properties = {}
+ if self.parent_capabilities:
+ for type, value in self.parent_capabilities.items():
+ parent_properties[type] = value.get('properties')
+ if self.properties:
+ for prop, schema in self.properties.items():
+ properties.append(PropertyDef(prop, None, schema))
+ if parent_properties:
+ for parent, props in parent_properties.items():
+ for prop, schema in props.items():
+ # add parent property if not overridden by children type
+ if not self.properties or \
+ prop not in self.properties.keys():
+ properties.append(PropertyDef(prop, None, schema))
+ return properties
+
+ def get_properties_def(self):
+ '''Return a dictionary of property definition name-object pairs.'''
+ return {prop.name: prop
+ for prop in self.get_properties_def_objects()}
+
+ def get_property_def_value(self, name):
+ '''Return the definition of a given property name.'''
+ props_def = self.get_properties_def()
+ if props_def and name in props_def:
+ return props_def[name].value
+
+ def _get_parent_capabilities(self, custom_def=None):
+ capabilities = {}
+ parent_cap = self.parent_type
+ if parent_cap:
+ parent_cap = parent_cap.type
+ while parent_cap != self.TOSCA_TYPEURI_CAPABILITY_ROOT:
+ if parent_cap in self.TOSCA_DEF.keys():
+ capabilities[parent_cap] = self.TOSCA_DEF[parent_cap]
+ elif custom_def and parent_cap in custom_def.keys():
+ capabilities[parent_cap] = custom_def[parent_cap]
+ parent_cap = capabilities[parent_cap]['derived_from']
+ return capabilities
+
+ @property
+ def parent_type(self):
+ '''Return a capability this capability is derived from.'''
+ if not hasattr(self, 'defs'):
+ return None
+ pnode = self.derived_from(self.defs)
+ if pnode:
+ return CapabilityTypeDef(self.name, pnode,
+ self.nodetype, self.custom_def)
+
+ def inherits_from(self, type_names):
+ '''Check this capability is in type_names
+
+ Check if this capability or some of its parent types
+ are in the list of types: type_names
+ '''
+ if self.type in type_names:
+ return True
+ elif self.parent_type:
+ return self.parent_type.inherits_from(type_names)
+ else:
+ return False*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java
new file mode 100644
index 0000000..d8cf460
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/DataType.java
@@ -0,0 +1,136 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+
+public class DataType extends StatefulEntityType {
+
+ LinkedHashMap<String, Object> customDef;
+
+ public DataType(String _dataTypeName, LinkedHashMap<String, Object> _customDef) {
+ super(_dataTypeName, DATATYPE_NETWORK_PREFIX, _customDef);
+
+ customDef = _customDef;
+ }
+
+ public DataType getParentType() {
+ // Return a datatype this datatype is derived from
+ if (defs != null) {
+ String ptype = derivedFrom(defs);
+ if (ptype != null) {
+ return new DataType(ptype, customDef);
+ }
+ }
+ return null;
+ }
+
+ public String getValueType() {
+ // Return 'type' section in the datatype schema
+ if (defs != null) {
+ return (String) entityValue(defs, "type");
+ }
+ return null;
+ }
+
+ public ArrayList<PropertyDef> getAllPropertiesObjects() {
+ //Return all properties objects defined in type and parent type
+ ArrayList<PropertyDef> propsDef = getPropertiesDefObjects();
+ DataType ptype = getParentType();
+ while (ptype != null) {
+ propsDef.addAll(ptype.getPropertiesDefObjects());
+ ptype = ptype.getParentType();
+ }
+ return propsDef;
+ }
+
+ public LinkedHashMap<String, PropertyDef> getAllProperties() {
+ // Return a dictionary of all property definition name-object pairs
+ LinkedHashMap<String, PropertyDef> pno = new LinkedHashMap<>();
+ for (PropertyDef pd : getAllPropertiesObjects()) {
+ pno.put(pd.getName(), pd);
+ }
+ return pno;
+ }
+
+ public Object getAllPropertyValue(String name) {
+ // Return the value of a given property name
+ LinkedHashMap<String, PropertyDef> propsDef = getAllProperties();
+ if (propsDef != null && propsDef.get(name) != null) {
+ return propsDef.get(name).getPDValue();
+ }
+ return null;
+ }
+
+ public LinkedHashMap<String, Object> getDefs() {
+ return defs;
+ }
+
+}
+
+/*python
+
+from toscaparser.elements.statefulentitytype import StatefulEntityType
+
+
+class DataType(StatefulEntityType):
+ '''TOSCA built-in and user defined complex data type.'''
+
+ def __init__(self, datatypename, custom_def=None):
+ super(DataType, self).__init__(datatypename,
+ self.DATATYPE_NETWORK_PREFIX,
+ custom_def)
+ self.custom_def = custom_def
+
+ @property
+ def parent_type(self):
+ '''Return a datatype this datatype is derived from.'''
+ ptype = self.derived_from(self.defs)
+ if ptype:
+ return DataType(ptype, self.custom_def)
+ return None
+
+ @property
+ def value_type(self):
+ '''Return 'type' section in the datatype schema.'''
+ return self.entity_value(self.defs, 'type')
+
+ def get_all_properties_objects(self):
+ '''Return all properties objects defined in type and parent type.'''
+ props_def = self.get_properties_def_objects()
+ ptype = self.parent_type
+ while ptype:
+ props_def.extend(ptype.get_properties_def_objects())
+ ptype = ptype.parent_type
+ return props_def
+
+ def get_all_properties(self):
+ '''Return a dictionary of all property definition name-object pairs.'''
+ return {prop.name: prop
+ for prop in self.get_all_properties_objects()}
+
+ def get_all_property_value(self, name):
+ '''Return the value of a given property name.'''
+ props_def = self.get_all_properties()
+ if props_def and name in props_def.key():
+ return props_def[name].value
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java
new file mode 100644
index 0000000..efc6ac9
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/EntityType.java
@@ -0,0 +1,436 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.CopyUtils;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+import org.onap.sdc.toscaparser.api.extensions.ExtTools;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+
+public class EntityType {
+
+ private static Logger log = LoggerFactory.getLogger(EntityType.class.getName());
+
+ private static final String TOSCA_DEFINITION_1_0_YAML = "TOSCA_definition_1_0.yaml";
+ protected static final String DERIVED_FROM = "derived_from";
+ protected static final String PROPERTIES = "properties";
+ protected static final String ATTRIBUTES = "attributes";
+ protected static final String REQUIREMENTS = "requirements";
+ protected static final String INTERFACES = "interfaces";
+ protected static final String CAPABILITIES = "capabilities";
+ protected static final String TYPE = "type";
+ protected static final String ARTIFACTS = "artifacts";
+
+ @SuppressWarnings("unused")
+ private static final String SECTIONS[] = {
+ DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS,
+ INTERFACES, CAPABILITIES, TYPE, ARTIFACTS
+ };
+
+ public static final String TOSCA_DEF_SECTIONS[] = {
+ "node_types", "data_types", "artifact_types",
+ "group_types", "relationship_types",
+ "capability_types", "interface_types",
+ "policy_types"};
+
+
+ // TOSCA definition file
+ //private final static String path = EntityType.class.getProtectionDomain().getCodeSource().getLocation().getPath();
+
+ //private final static String path = EntityType.class.getClassLoader().getResource("TOSCA_definition_1_0.yaml").getFile();
+ //private final static String TOSCA_DEF_FILE = EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml");
+
+ private static LinkedHashMap<String, Object> TOSCA_DEF_LOAD_AS_IS = loadTdf();
+
+ //EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml");
+
+ @SuppressWarnings("unchecked")
+ private static LinkedHashMap<String, Object> loadTdf() {
+ String toscaDefLocation = EntityType.class.getClassLoader().getResource(TOSCA_DEFINITION_1_0_YAML).getFile();
+ InputStream input = EntityType.class.getClassLoader().getResourceAsStream(TOSCA_DEFINITION_1_0_YAML);
+ if (input == null) {
+ log.error("EntityType - loadTdf - Couldn't load TOSCA_DEF_FILE {}", toscaDefLocation);
+ }
+ Yaml yaml = new Yaml();
+ Object loaded = yaml.load(input);
+ //@SuppressWarnings("unchecked")
+ return (LinkedHashMap<String, Object>) loaded;
+ }
+
+ // Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS
+ public static LinkedHashMap<String, Object> TOSCA_DEF;
+
+ static {
+ TOSCA_DEF = new LinkedHashMap<String, Object>();
+ for (String section : TOSCA_DEF_SECTIONS) {
+ @SuppressWarnings("unchecked")
+ LinkedHashMap<String, Object> value = (LinkedHashMap<String, Object>) TOSCA_DEF_LOAD_AS_IS.get(section);
+ if (value != null) {
+ for (String key : value.keySet()) {
+ TOSCA_DEF.put(key, value.get(key));
+ }
+ }
+ }
+ }
+
+ public static final String DEPENDSON = "tosca.relationships.DependsOn";
+ public static final String HOSTEDON = "tosca.relationships.HostedOn";
+ public static final String CONNECTSTO = "tosca.relationships.ConnectsTo";
+ public static final String ATTACHESTO = "tosca.relationships.AttachesTo";
+ public static final String LINKSTO = "tosca.relationships.network.LinksTo";
+ public static final String BINDSTO = "tosca.relationships.network.BindsTo";
+
+ public static final String RELATIONSHIP_TYPE[] = {
+ "tosca.relationships.DependsOn",
+ "tosca.relationships.HostedOn",
+ "tosca.relationships.ConnectsTo",
+ "tosca.relationships.AttachesTo",
+ "tosca.relationships.network.LinksTo",
+ "tosca.relationships.network.BindsTo"};
+
+ public static final String NODE_PREFIX = "tosca.nodes.";
+ public static final String RELATIONSHIP_PREFIX = "tosca.relationships.";
+ public static final String CAPABILITY_PREFIX = "tosca.capabilities.";
+ public static final String INTERFACE_PREFIX = "tosca.interfaces.";
+ public static final String ARTIFACT_PREFIX = "tosca.artifacts.";
+ public static final String POLICY_PREFIX = "tosca.policies.";
+ public static final String GROUP_PREFIX = "tosca.groups.";
+ //currently the data types are defined only for network
+ // but may have changes in the future.
+ public static final String DATATYPE_PREFIX = "tosca.datatypes.";
+ public static final String DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + "network.";
+ public static final String TOSCA = "tosca";
+
+ protected String type;
+ protected LinkedHashMap<String, Object> defs = null;
+
+ public Object getParentType() {
+ return null;
+ }
+
+ public String derivedFrom(LinkedHashMap<String, Object> defs) {
+ // Return a type this type is derived from
+ return (String) entityValue(defs, "derived_from");
+ }
+
+ public boolean isDerivedFrom(String type_str) {
+ // Check if object inherits from the given type
+ // Returns true if this object is derived from 'type_str'
+ // False otherwise.
+ if (type == null || this.type.isEmpty()) {
+ return false;
+ } else if (type == type_str) {
+ return true;
+ } else if (getParentType() != null) {
+ return ((EntityType) getParentType()).isDerivedFrom(type_str);
+ } else {
+ return false;
+ }
+ }
+
+ public Object entityValue(LinkedHashMap<String, Object> defs, String key) {
+ if (defs != null) {
+ return defs.get(key);
+ }
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ public Object getValue(String ndtype, LinkedHashMap<String, Object> _defs, boolean parent) {
+ Object value = null;
+ if (_defs == null) {
+ if (defs == null) {
+ return null;
+ }
+ _defs = this.defs;
+ }
+ Object defndt = _defs.get(ndtype);
+ if (defndt != null) {
+ // copy the value to avoid that next operations add items in the
+ // item definitions
+ //value = copy.copy(defs[ndtype])
+ value = CopyUtils.copyLhmOrAl(defndt);
+ }
+
+ if (parent) {
+ EntityType p = this;
+ if (p != null) {
+ while (p != null) {
+ if (p.defs != null && p.defs.get(ndtype) != null) {
+ // get the parent value
+ Object parentValue = p.defs.get(ndtype);
+ if (value != null) {
+ if (value instanceof LinkedHashMap) {
+ for (Map.Entry<String, Object> me : ((LinkedHashMap<String, Object>) parentValue).entrySet()) {
+ String k = me.getKey();
+ if (((LinkedHashMap<String, Object>) value).get(k) == null) {
+ ((LinkedHashMap<String, Object>) value).put(k, me.getValue());
+ }
+ }
+ }
+ if (value instanceof ArrayList) {
+ for (Object pValue : (ArrayList<Object>) parentValue) {
+ if (!((ArrayList<Object>) value).contains(pValue)) {
+ ((ArrayList<Object>) value).add(pValue);
+ }
+ }
+ }
+ } else {
+ // value = copy.copy(parent_value)
+ value = CopyUtils.copyLhmOrAl(parentValue);
+ }
+ }
+ p = (EntityType) p.getParentType();
+ }
+ }
+ }
+
+ return value;
+ }
+
+ @SuppressWarnings("unchecked")
+ public Object getDefinition(String ndtype) {
+ Object value = null;
+ LinkedHashMap<String, Object> _defs;
+ // no point in hasattr, because we have it, and it
+ // doesn't do anything except emit an exception anyway
+ //if not hasattr(self, 'defs'):
+ // defs = None
+ // ValidationIssueCollector.appendException(
+ // ValidationError(message="defs is " + str(defs)))
+ //else:
+ // defs = self.defs
+ _defs = this.defs;
+
+
+ if (_defs != null && _defs.get(ndtype) != null) {
+ value = _defs.get(ndtype);
+ }
+
+ Object p = getParentType();
+ if (p != null) {
+ Object inherited = ((EntityType) p).getDefinition(ndtype);
+ if (inherited != null) {
+ // inherited = dict(inherited) WTF?!?
+ if (value == null) {
+ value = inherited;
+ } else {
+ //?????
+ //inherited.update(value)
+ //value.update(inherited)
+ for (Map.Entry<String, Object> me : ((LinkedHashMap<String, Object>) inherited).entrySet()) {
+ ((LinkedHashMap<String, Object>) value).put(me.getKey(), me.getValue());
+ }
+ }
+ }
+ }
+ return value;
+ }
+
+ public static void updateDefinitions(String version) {
+ ExtTools exttools = new ExtTools();
+ String extensionDefsFile = exttools.getDefsFile(version);
+
+ try (InputStream input = EntityType.class.getClassLoader().getResourceAsStream(extensionDefsFile);) {
+ Yaml yaml = new Yaml();
+ LinkedHashMap<String, Object> nfvDefFile = (LinkedHashMap<String, Object>) yaml.load(input);
+ LinkedHashMap<String, Object> nfvDef = new LinkedHashMap<>();
+ for (String section : TOSCA_DEF_SECTIONS) {
+ if (nfvDefFile.get(section) != null) {
+ LinkedHashMap<String, Object> value =
+ (LinkedHashMap<String, Object>) nfvDefFile.get(section);
+ for (String key : value.keySet()) {
+ nfvDef.put(key, value.get(key));
+ }
+ }
+ }
+ TOSCA_DEF.putAll(nfvDef);
+ } catch (IOException e) {
+ log.error("EntityType - updateDefinitions - Failed to update definitions from defs file {}", extensionDefsFile);
+ log.error("Exception:", e);
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE280",
+ String.format("Failed to update definitions from defs file \"%s\" ", extensionDefsFile)));
+ return;
+ }
+ }
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import ValidationError
+from toscaparser.extensions.exttools import ExtTools
+import org.onap.sdc.toscaparser.api.utils.yamlparser
+
+log = logging.getLogger('tosca')
+
+
+class EntityType(object):
+ '''Base class for TOSCA elements.'''
+
+ SECTIONS = (DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS,
+ INTERFACES, CAPABILITIES, TYPE, ARTIFACTS) = \
+ ('derived_from', 'properties', 'attributes', 'requirements',
+ 'interfaces', 'capabilities', 'type', 'artifacts')
+
+ TOSCA_DEF_SECTIONS = ['node_types', 'data_types', 'artifact_types',
+ 'group_types', 'relationship_types',
+ 'capability_types', 'interface_types',
+ 'policy_types']
+
+ '''TOSCA definition file.'''
+ TOSCA_DEF_FILE = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)),
+ "TOSCA_definition_1_0.yaml")
+
+ loader = toscaparser.utils.yamlparser.load_yaml
+
+ TOSCA_DEF_LOAD_AS_IS = loader(TOSCA_DEF_FILE)
+
+ # Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS
+ TOSCA_DEF = {}
+ for section in TOSCA_DEF_SECTIONS:
+ if section in TOSCA_DEF_LOAD_AS_IS.keys():
+ value = TOSCA_DEF_LOAD_AS_IS[section]
+ for key in value.keys():
+ TOSCA_DEF[key] = value[key]
+
+ RELATIONSHIP_TYPE = (DEPENDSON, HOSTEDON, CONNECTSTO, ATTACHESTO,
+ LINKSTO, BINDSTO) = \
+ ('tosca.relationships.DependsOn',
+ 'tosca.relationships.HostedOn',
+ 'tosca.relationships.ConnectsTo',
+ 'tosca.relationships.AttachesTo',
+ 'tosca.relationships.network.LinksTo',
+ 'tosca.relationships.network.BindsTo')
+
+ NODE_PREFIX = 'tosca.nodes.'
+ RELATIONSHIP_PREFIX = 'tosca.relationships.'
+ CAPABILITY_PREFIX = 'tosca.capabilities.'
+ INTERFACE_PREFIX = 'tosca.interfaces.'
+ ARTIFACT_PREFIX = 'tosca.artifacts.'
+ POLICY_PREFIX = 'tosca.policies.'
+ GROUP_PREFIX = 'tosca.groups.'
+ # currently the data types are defined only for network
+ # but may have changes in the future.
+ DATATYPE_PREFIX = 'tosca.datatypes.'
+ DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + 'network.'
+ TOSCA = 'tosca'
+
+ def derived_from(self, defs):
+ '''Return a type this type is derived from.'''
+ return self.entity_value(defs, 'derived_from')
+
+ def is_derived_from(self, type_str):
+ '''Check if object inherits from the given type.
+
+ Returns true if this object is derived from 'type_str'.
+ False otherwise.
+ '''
+ if not self.type:
+ return False
+ elif self.type == type_str:
+ return True
+ elif self.parent_type:
+ return self.parent_type.is_derived_from(type_str)
+ else:
+ return False
+
+ def entity_value(self, defs, key):
+ if key in defs:
+ return defs[key]
+
+ def get_value(self, ndtype, defs=None, parent=None):
+ value = None
+ if defs is None:
+ if not hasattr(self, 'defs'):
+ return None
+ defs = self.defs
+ if ndtype in defs:
+ # copy the value to avoid that next operations add items in the
+ # item definitions
+ value = copy.copy(defs[ndtype])
+ if parent:
+ p = self
+ if p:
+ while p:
+ if ndtype in p.defs:
+ # get the parent value
+ parent_value = p.defs[ndtype]
+ if value:
+ if isinstance(value, dict):
+ for k, v in parent_value.items():
+ if k not in value.keys():
+ value[k] = v
+ if isinstance(value, list):
+ for p_value in parent_value:
+ if p_value not in value:
+ value.append(p_value)
+ else:
+ value = copy.copy(parent_value)
+ p = p.parent_type
+ return value
+
+ def get_definition(self, ndtype):
+ value = None
+ if not hasattr(self, 'defs'):
+ defs = None
+ ValidationIssueCollector.appendException(
+ ValidationError(message="defs is " + str(defs)))
+ else:
+ defs = self.defs
+ if defs is not None and ndtype in defs:
+ value = defs[ndtype]
+ p = self.parent_type
+ if p:
+ inherited = p.get_definition(ndtype)
+ if inherited:
+ inherited = dict(inherited)
+ if not value:
+ value = inherited
+ else:
+ inherited.update(value)
+ value.update(inherited)
+ return value
+
+
+def update_definitions(version):
+ exttools = ExtTools()
+ extension_defs_file = exttools.get_defs_file(version)
+ loader = toscaparser.utils.yamlparser.load_yaml
+ nfv_def_file = loader(extension_defs_file)
+ nfv_def = {}
+ for section in EntityType.TOSCA_DEF_SECTIONS:
+ if section in nfv_def_file.keys():
+ value = nfv_def_file[section]
+ for key in value.keys():
+ nfv_def[key] = value[key]
+ EntityType.TOSCA_DEF.update(nfv_def)
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java
new file mode 100644
index 0000000..db6f2b7
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/GroupType.java
@@ -0,0 +1,263 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public class GroupType extends StatefulEntityType {
+
+ private static final String DERIVED_FROM = "derived_from";
+ private static final String VERSION = "version";
+ private static final String METADATA = "metadata";
+ private static final String DESCRIPTION = "description";
+ private static final String PROPERTIES = "properties";
+ private static final String MEMBERS = "members";
+ private static final String INTERFACES = "interfaces";
+
+ private static final String[] SECTIONS = {
+ DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES};
+
+ private String groupType;
+ private LinkedHashMap<String, Object> customDef;
+ private String groupDescription;
+ private String groupVersion;
+ //private LinkedHashMap<String,Object> groupProperties;
+ //private ArrayList<String> groupMembers;
+ private LinkedHashMap<String, Object> metaData;
+
+ @SuppressWarnings("unchecked")
+ public GroupType(String groupType, LinkedHashMap<String, Object> customDef) {
+ super(groupType, GROUP_PREFIX, customDef);
+
+ this.groupType = groupType;
+ this.customDef = customDef;
+ validateFields();
+ if (defs != null) {
+ groupDescription = (String) defs.get(DESCRIPTION);
+ groupVersion = (String) defs.get(VERSION);
+ //groupProperties = (LinkedHashMap<String,Object>)defs.get(PROPERTIES);
+ //groupMembers = (ArrayList<String>)defs.get(MEMBERS);
+ Object mdo = defs.get(METADATA);
+ if (mdo instanceof LinkedHashMap) {
+ metaData = (LinkedHashMap<String, Object>) mdo;
+ } else {
+ metaData = null;
+ }
+
+ if (metaData != null) {
+ validateMetadata(metaData);
+ }
+ }
+ }
+
+ public GroupType getParentType() {
+ // Return a group statefulentity of this entity is derived from.
+ if (defs == null) {
+ return null;
+ }
+ String pgroupEntity = derivedFrom(defs);
+ if (pgroupEntity != null) {
+ return new GroupType(pgroupEntity, customDef);
+ }
+ return null;
+ }
+
+ public String getDescription() {
+ return groupDescription;
+ }
+
+ public String getVersion() {
+ return groupVersion;
+ }
+
+ @SuppressWarnings("unchecked")
+ public LinkedHashMap<String, Object> getInterfaces() {
+ Object ifo = getValue(INTERFACES, null, false);
+ if (ifo instanceof LinkedHashMap) {
+ return (LinkedHashMap<String, Object>) ifo;
+ }
+ return new LinkedHashMap<String, Object>();
+ }
+
+ private void validateFields() {
+ if (defs != null) {
+ for (String name : defs.keySet()) {
+ boolean bFound = false;
+ for (String sect : SECTIONS) {
+ if (name.equals(sect)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE120", String.format(
+ "UnknownFieldError: Group Type \"%s\" contains unknown field \"%s\"",
+ groupType, name)));
+ }
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void validateMetadata(LinkedHashMap<String, Object> metadata) {
+ String mtt = (String) metadata.get("type");
+ if (mtt != null && !mtt.equals("map") && !mtt.equals("tosca:map")) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE121", String.format(
+ "InvalidTypeError: \"%s\" defined in group for metadata is invalid",
+ mtt)));
+ }
+ for (String entrySchema : metadata.keySet()) {
+ Object estob = metadata.get(entrySchema);
+ if (estob instanceof LinkedHashMap) {
+ String est = (String) ((LinkedHashMap<String, Object>) estob).get("type");
+ if (!est.equals("string")) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE122", String.format(
+ "InvalidTypeError: \"%s\" defined in group for metadata \"%s\" is invalid",
+ est, entrySchema)));
+ }
+ }
+ }
+ }
+
+ public String getType() {
+ return groupType;
+ }
+
+ @SuppressWarnings("unchecked")
+ public ArrayList<CapabilityTypeDef> getCapabilitiesObjects() {
+ // Return a list of capability objects
+ ArrayList<CapabilityTypeDef> typecapabilities = new ArrayList<>();
+ LinkedHashMap<String, Object> caps = (LinkedHashMap<String, Object>) getValue(CAPABILITIES, null, true);
+ if (caps != null) {
+ // 'cname' is symbolic name of the capability
+ // 'cvalue' is a dict { 'type': <capability type name> }
+ for (Map.Entry<String, Object> me : caps.entrySet()) {
+ String cname = me.getKey();
+ LinkedHashMap<String, String> cvalue = (LinkedHashMap<String, String>) me.getValue();
+ String ctype = cvalue.get("type");
+ CapabilityTypeDef cap = new CapabilityTypeDef(cname, ctype, type, customDef);
+ typecapabilities.add(cap);
+ }
+ }
+ return typecapabilities;
+ }
+
+ public LinkedHashMap<String, CapabilityTypeDef> getCapabilities() {
+ // Return a dictionary of capability name-objects pairs
+ LinkedHashMap<String, CapabilityTypeDef> caps = new LinkedHashMap<>();
+ for (CapabilityTypeDef ctd : getCapabilitiesObjects()) {
+ caps.put(ctd.getName(), ctd);
+ }
+ return caps;
+ }
+
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import InvalidTypeError
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.elements.statefulentitytype import StatefulEntityType
+
+
+class GroupType(StatefulEntityType):
+ '''TOSCA built-in group type.'''
+
+ SECTIONS = (DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES,
+ MEMBERS, INTERFACES) = \
+ ("derived_from", "version", "metadata", "description",
+ "properties", "members", "interfaces")
+
+ def __init__(self, grouptype, custom_def=None):
+ super(GroupType, self).__init__(grouptype, self.GROUP_PREFIX,
+ custom_def)
+ self.custom_def = custom_def
+ self.grouptype = grouptype
+ self._validate_fields()
+ self.group_description = None
+ if self.DESCRIPTION in self.defs:
+ self.group_description = self.defs[self.DESCRIPTION]
+
+ self.group_version = None
+ if self.VERSION in self.defs:
+ self.group_version = self.defs[self.VERSION]
+
+ self.group_properties = None
+ if self.PROPERTIES in self.defs:
+ self.group_properties = self.defs[self.PROPERTIES]
+
+ self.group_members = None
+ if self.MEMBERS in self.defs:
+ self.group_members = self.defs[self.MEMBERS]
+
+ if self.METADATA in self.defs:
+ self.meta_data = self.defs[self.METADATA]
+ self._validate_metadata(self.meta_data)
+
+ @property
+ def parent_type(self):
+ '''Return a group statefulentity of this entity is derived from.'''
+ if not hasattr(self, 'defs'):
+ return None
+ pgroup_entity = self.derived_from(self.defs)
+ if pgroup_entity:
+ return GroupType(pgroup_entity, self.custom_def)
+
+ @property
+ def description(self):
+ return self.group_description
+
+ @property
+ def version(self):
+ return self.group_version
+
+ @property
+ def interfaces(self):
+ return self.get_value(self.INTERFACES)
+
+ def _validate_fields(self):
+ if self.defs:
+ for name in self.defs.keys():
+ if name not in self.SECTIONS:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(what='Group Type %s'
+ % self.grouptype, field=name))
+
+ def _validate_metadata(self, meta_data):
+ if not meta_data.get('type') in ['map', 'tosca:map']:
+ ValidationIssueCollector.appendException(
+ InvalidTypeError(what='"%s" defined in group for '
+ 'metadata' % (meta_data.get('type'))))
+ for entry_schema, entry_schema_type in meta_data.items():
+ if isinstance(entry_schema_type, dict) and not \
+ entry_schema_type.get('type') == 'string':
+ ValidationIssueCollector.appendException(
+ InvalidTypeError(what='"%s" defined in group for '
+ 'metadata "%s"'
+ % (entry_schema_type.get('type'),
+ entry_schema)))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java
new file mode 100644
index 0000000..2862a11
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/InterfacesDef.java
@@ -0,0 +1,283 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+import org.onap.sdc.toscaparser.api.EntityTemplate;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public class InterfacesDef extends StatefulEntityType {
+
+ public static final String LIFECYCLE = "tosca.interfaces.node.lifecycle.Standard";
+ public static final String CONFIGURE = "tosca.interfaces.relationship.Configure";
+ public static final String LIFECYCLE_SHORTNAME = "Standard";
+ public static final String CONFIGURE_SHORTNAME = "Configure";
+
+ public static final String[] SECTIONS = {
+ LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME, CONFIGURE_SHORTNAME
+ };
+
+ public static final String IMPLEMENTATION = "implementation";
+ public static final String DESCRIPTION = "description";
+ public static final String INPUTS = "inputs";
+
+ public static final String[] INTERFACE_DEF_RESERVED_WORDS = {
+ "type", "inputs", "derived_from", "version", "description"};
+
+ private EntityType ntype;
+ private EntityTemplate nodeTemplate;
+
+ private String operationName;
+ private Object operationDef;
+ private Object implementation;
+ private LinkedHashMap<String, Object> inputs;
+ private String description;
+
+ @SuppressWarnings("unchecked")
+ public InterfacesDef(EntityType inodeType,
+ String interfaceType,
+ EntityTemplate inodeTemplate,
+ String iname,
+ Object ivalue) {
+ // void
+ super();
+
+ ntype = inodeType;
+ nodeTemplate = inodeTemplate;
+ type = interfaceType;
+ operationName = iname;
+ operationDef = ivalue;
+ implementation = null;
+ inputs = null;
+ defs = new LinkedHashMap<>();
+
+ if (interfaceType.equals(LIFECYCLE_SHORTNAME)) {
+ interfaceType = LIFECYCLE;
+ }
+ if (interfaceType.equals(CONFIGURE_SHORTNAME)) {
+ interfaceType = CONFIGURE;
+ }
+
+ // only NodeType has getInterfaces "hasattr(ntype,interfaces)"
+ // while RelationshipType does not
+ if (ntype instanceof NodeType) {
+ if (((NodeType) ntype).getInterfaces() != null
+ && ((NodeType) ntype).getInterfaces().values().contains(interfaceType)) {
+ LinkedHashMap<String, Object> nii = (LinkedHashMap<String, Object>)
+ ((NodeType) ntype).getInterfaces().get(interfaceType);
+ interfaceType = (String) nii.get("type");
+ }
+ }
+ if (inodeType != null) {
+ if (nodeTemplate != null && nodeTemplate.getCustomDef() != null
+ && nodeTemplate.getCustomDef().containsKey(interfaceType)) {
+ defs = (LinkedHashMap<String, Object>)
+ nodeTemplate.getCustomDef().get(interfaceType);
+ } else {
+ defs = (LinkedHashMap<String, Object>) TOSCA_DEF.get(interfaceType);
+ }
+ }
+
+ if (ivalue != null) {
+ if (ivalue instanceof LinkedHashMap) {
+ for (Map.Entry<String, Object> me : ((LinkedHashMap<String, Object>) ivalue).entrySet()) {
+ if (me.getKey().equals(IMPLEMENTATION)) {
+ implementation = me.getValue();
+ } else if (me.getKey().equals(INPUTS)) {
+ inputs = (LinkedHashMap<String, Object>) me.getValue();
+ } else if (me.getKey().equals(DESCRIPTION)) {
+ description = (String) me.getValue();
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE123", String.format(
+ "UnknownFieldError: \"interfaces\" of template \"%s\" contain unknown field \"%s\"",
+ nodeTemplate.getName(), me.getKey())));
+ }
+ }
+ }
+ }
+ }
+
+ public ArrayList<String> getLifecycleOps() {
+ if (defs != null) {
+ if (type.equals(LIFECYCLE)) {
+ return ops();
+ }
+ }
+ return null;
+ }
+
+ public ArrayList<String> getInterfaceOps() {
+ if (defs != null) {
+ ArrayList<String> ops = ops();
+ ArrayList<String> idrw = new ArrayList<>();
+ for (int i = 0; i < InterfacesDef.INTERFACE_DEF_RESERVED_WORDS.length; i++) {
+ idrw.add(InterfacesDef.INTERFACE_DEF_RESERVED_WORDS[i]);
+ }
+ ops.removeAll(idrw);
+ return ops;
+ }
+ return null;
+ }
+
+ public ArrayList<String> getConfigureOps() {
+ if (defs != null) {
+ if (type.equals(CONFIGURE)) {
+ return ops();
+ }
+ }
+ return null;
+ }
+
+ private ArrayList<String> ops() {
+ return new ArrayList<String>(defs.keySet());
+ }
+
+ // getters/setters
+
+ public LinkedHashMap<String, Object> getInputs() {
+ return inputs;
+ }
+
+ public void setInput(String name, Object value) {
+ inputs.put(name, value);
+ }
+
+ public Object getImplementation() {
+ return implementation;
+ }
+
+ public void setImplementation(Object implementation) {
+ this.implementation = implementation;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public String getOperationName() {
+ return operationName;
+ }
+
+ public void setOperationName(String operationName) {
+ this.operationName = operationName;
+ }
+}
+
+
+
+/*python
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.elements.statefulentitytype import StatefulEntityType
+
+SECTIONS = (LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME,
+ CONFIGURE_SHORTNAME) = \
+ ('tosca.interfaces.node.lifecycle.Standard',
+ 'tosca.interfaces.relationship.Configure',
+ 'Standard', 'Configure')
+
+INTERFACEVALUE = (IMPLEMENTATION, INPUTS) = ('implementation', 'inputs')
+
+INTERFACE_DEF_RESERVED_WORDS = ['type', 'inputs', 'derived_from', 'version',
+ 'description']
+
+
+class InterfacesDef(StatefulEntityType):
+ '''TOSCA built-in interfaces type.'''
+
+ def __init__(self, node_type, interfacetype,
+ node_template=None, name=None, value=None):
+ self.ntype = node_type
+ self.node_template = node_template
+ self.type = interfacetype
+ self.name = name
+ self.value = value
+ self.implementation = None
+ self.inputs = None
+ self.defs = {}
+ if interfacetype == LIFECYCLE_SHORTNAME:
+ interfacetype = LIFECYCLE
+ if interfacetype == CONFIGURE_SHORTNAME:
+ interfacetype = CONFIGURE
+ if hasattr(self.ntype, 'interfaces') \
+ and self.ntype.interfaces \
+ and interfacetype in self.ntype.interfaces:
+ interfacetype = self.ntype.interfaces[interfacetype]['type']
+ if node_type:
+ if self.node_template and self.node_template.custom_def \
+ and interfacetype in self.node_template.custom_def:
+ self.defs = self.node_template.custom_def[interfacetype]
+ else:
+ self.defs = self.TOSCA_DEF[interfacetype]
+ if value:
+ if isinstance(self.value, dict):
+ for i, j in self.value.items():
+ if i == IMPLEMENTATION:
+ self.implementation = j
+ elif i == INPUTS:
+ self.inputs = j
+ else:
+ what = ('"interfaces" of template "%s"' %
+ self.node_template.name)
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(what=what, field=i))
+ else:
+ self.implementation = value
+
+ @property
+ def lifecycle_ops(self):
+ if self.defs:
+ if self.type == LIFECYCLE:
+ return self._ops()
+
+ @property
+ def configure_ops(self):
+ if self.defs:
+ if self.type == CONFIGURE:
+ return self._ops()
+
+ def _ops(self):
+ ops = []
+ for name in list(self.defs.keys()):
+ ops.append(name)
+ return ops
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java
new file mode 100644
index 0000000..f3de49e
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/Metadata.java
@@ -0,0 +1,62 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import java.util.AbstractMap;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+public class Metadata {
+
+ private final Map<String, Object> metadataMap;
+
+ public Metadata(Map<String, Object> metadataMap) {
+ this.metadataMap = metadataMap != null ? metadataMap : new HashMap<>();
+ }
+
+ public String getValue(String key) {
+
+ Object obj = this.metadataMap.get(key);
+ if (obj != null) {
+ return String.valueOf(obj);
+ }
+ return null;
+ }
+
+ /**
+ * Get all properties of a Metadata object.<br>
+ * This object represents the "metadata" section of some entity.
+ *
+ * @return all properties of this Metadata, as a key-value.
+ */
+ public Map<String, String> getAllProperties() {
+ return metadataMap.entrySet().stream().map(e -> new AbstractMap.SimpleEntry<String, String>(e.getKey(), String.valueOf(e.getValue()))).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
+ }
+
+ @Override
+ public String toString() {
+ return "Metadata{"
+ + "metadataMap=" + metadataMap
+ + '}';
+ }
+
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java
new file mode 100644
index 0000000..c251be9
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/NodeType.java
@@ -0,0 +1,549 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public class NodeType extends StatefulEntityType {
+ // TOSCA built-in node type
+
+ private static final String DERIVED_FROM = "derived_from";
+ private static final String METADATA = "metadata";
+ private static final String PROPERTIES = "properties";
+ private static final String VERSION = "version";
+ private static final String DESCRIPTION = "description";
+ private static final String ATTRIBUTES = "attributes";
+ private static final String REQUIREMENTS = "requirements";
+ private static final String CAPABILITIES = "capabilities";
+ private static final String INTERFACES = "interfaces";
+ private static final String ARTIFACTS = "artifacts";
+
+ private static final String SECTIONS[] = {
+ DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS
+ };
+
+ private String ntype;
+ public LinkedHashMap<String, Object> customDef;
+
+ public NodeType(String nttype, LinkedHashMap<String, Object> ntcustomDef) {
+ super(nttype, NODE_PREFIX, ntcustomDef);
+ ntype = nttype;
+ customDef = ntcustomDef;
+ _validateKeys();
+ }
+
+ public Object getParentType() {
+ // Return a node this node is derived from
+ if (defs == null) {
+ return null;
+ }
+ String pnode = derivedFrom(defs);
+ if (pnode != null && !pnode.isEmpty()) {
+ return new NodeType(pnode, customDef);
+ }
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ public LinkedHashMap<RelationshipType, NodeType> getRelationship() {
+ // Return a dictionary of relationships to other node types
+
+ // This method returns a dictionary of named relationships that nodes
+ // of the current node type (self) can have to other nodes (of specific
+ // types) in a TOSCA template.
+
+ LinkedHashMap<RelationshipType, NodeType> relationship = new LinkedHashMap<>();
+ ArrayList<LinkedHashMap<String, Object>> requires;
+ Object treq = getAllRequirements();
+ if (treq != null) {
+ // NOTE(sdmonov): Check if requires is a dict.
+ // If it is a dict convert it to a list of dicts.
+ // This is needed because currently the code below supports only
+ // lists as requirements definition. The following check will
+ // make sure if a map (dict) was provided it will be converted to
+ // a list before proceeding to the parsing.
+ if (treq instanceof LinkedHashMap) {
+ requires = new ArrayList<>();
+ for (Map.Entry<String, Object> me : ((LinkedHashMap<String, Object>) treq).entrySet()) {
+ LinkedHashMap<String, Object> tl = new LinkedHashMap<>();
+ tl.put(me.getKey(), me.getValue());
+ requires.add(tl);
+ }
+ } else {
+ requires = (ArrayList<LinkedHashMap<String, Object>>) treq;
+ }
+
+ String keyword = null;
+ String nodeType = null;
+ for (LinkedHashMap<String, Object> require : requires) {
+ String relation = null;
+ for (Map.Entry<String, Object> re : require.entrySet()) {
+ String key = re.getKey();
+ LinkedHashMap<String, Object> req = (LinkedHashMap<String, Object>) re.getValue();
+ if (req.get("relationship") != null) {
+ Object trelation = req.get("relationship");
+ // trelation is a string or a dict with "type" mapped to the string we want
+ if (trelation instanceof String) {
+ relation = (String) trelation;
+ } else {
+ if (((LinkedHashMap<String, Object>) trelation).get("type") != null) {
+ relation = (String) ((LinkedHashMap<String, Object>) trelation).get("type");
+ }
+ }
+ nodeType = (String) req.get("node");
+ //BUG meaningless?? LinkedHashMap<String,Object> value = req;
+ if (nodeType != null) {
+ keyword = "node";
+ } else {
+ String getRelation = null;
+ // If nodeTypeByCap is a dict and has a type key
+ // we need to lookup the node type using
+ // the capability type
+ String captype = (String) req.get("capability");
+ nodeType = _getNodeTypeByCap(captype);
+ if (nodeType != null) {
+ getRelation = _getRelation(key, nodeType);
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format(
+ "NodeTypeRequirementForCapabilityUnfulfilled: Node type: \"%s\" with requrement \"%s\" for node type with capability type \"%s\" is not found\\unfulfilled", this.ntype, key, captype)));
+ }
+ if (getRelation != null) {
+ relation = getRelation;
+ }
+ keyword = key;
+ }
+ }
+ }
+ if (relation == null || nodeType == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE11", String.format(
+ "NodeTypeForRelationUnfulfilled: Node type \"%s\" - relationship type \"%s\" is unfulfilled", this.ntype, relation)));
+ } else {
+ RelationshipType rtype = new RelationshipType(relation, keyword, customDef);
+ NodeType relatednode = new NodeType(nodeType, customDef);
+ relationship.put(rtype, relatednode);
+ }
+ }
+ }
+ return relationship;
+
+ }
+
+ @SuppressWarnings("unchecked")
+ private String _getNodeTypeByCap(String cap) {
+ // Find the node type that has the provided capability
+
+ // This method will lookup all node types if they have the
+ // provided capability.
+ // Filter the node types
+ ArrayList<String> nodeTypes = new ArrayList<>();
+ for (String nt : customDef.keySet()) {
+ if (nt.startsWith(NODE_PREFIX) || nt.startsWith("org.openecomp") && !nt.equals("tosca.nodes.Root")) {
+ nodeTypes.add(nt);
+ }
+ }
+ for (String nt : nodeTypes) {
+ LinkedHashMap<String, Object> nodeDef = (LinkedHashMap<String, Object>) customDef.get(nt);
+ if (nodeDef instanceof LinkedHashMap && nodeDef.get("capabilities") != null) {
+ LinkedHashMap<String, Object> nodeCaps = (LinkedHashMap<String, Object>) nodeDef.get("capabilities");
+ if (nodeCaps != null) {
+ for (Object val : nodeCaps.values()) {
+ if (val instanceof LinkedHashMap) {
+ String tp = (String) ((LinkedHashMap<String, Object>) val).get("type");
+ if (tp != null && tp.equals(cap)) {
+ return nt;
+ }
+ }
+ }
+ }
+ }
+ }
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ private String _getRelation(String key, String ndtype) {
+ String relation = null;
+ NodeType ntype = new NodeType(ndtype, customDef);
+ LinkedHashMap<String, CapabilityTypeDef> caps = ntype.getCapabilities();
+ if (caps != null && caps.get(key) != null) {
+ CapabilityTypeDef c = caps.get(key);
+ for (int i = 0; i < RELATIONSHIP_TYPE.length; i++) {
+ String r = RELATIONSHIP_TYPE[i];
+ if (r != null) {
+ relation = r;
+ break;
+ }
+ LinkedHashMap<String, Object> rtypedef = (LinkedHashMap<String, Object>) customDef.get(r);
+ for (Object o : rtypedef.values()) {
+ LinkedHashMap<String, Object> properties = (LinkedHashMap<String, Object>) o;
+ if (properties.get(c.getType()) != null) {
+ relation = r;
+ break;
+ }
+ }
+ if (relation != null) {
+ break;
+ } else {
+ for (Object o : rtypedef.values()) {
+ LinkedHashMap<String, Object> properties = (LinkedHashMap<String, Object>) o;
+ if (properties.get(c.getParentType()) != null) {
+ relation = r;
+ break;
+ }
+ }
+ }
+ }
+ }
+ return relation;
+ }
+
+ @SuppressWarnings("unchecked")
+ public ArrayList<CapabilityTypeDef> getCapabilitiesObjects() {
+ // Return a list of capability objects
+ ArrayList<CapabilityTypeDef> typecapabilities = new ArrayList<>();
+ LinkedHashMap<String, Object> caps = (LinkedHashMap<String, Object>) getValue(CAPABILITIES, null, true);
+ if (caps != null) {
+ // 'cname' is symbolic name of the capability
+ // 'cvalue' is a dict { 'type': <capability type name> }
+ for (Map.Entry<String, Object> me : caps.entrySet()) {
+ String cname = me.getKey();
+ LinkedHashMap<String, String> cvalue = (LinkedHashMap<String, String>) me.getValue();
+ String ctype = cvalue.get("type");
+ CapabilityTypeDef cap = new CapabilityTypeDef(cname, ctype, type, customDef);
+ typecapabilities.add(cap);
+ }
+ }
+ return typecapabilities;
+ }
+
+ public LinkedHashMap<String, CapabilityTypeDef> getCapabilities() {
+ // Return a dictionary of capability name-objects pairs
+ LinkedHashMap<String, CapabilityTypeDef> caps = new LinkedHashMap<>();
+ for (CapabilityTypeDef ctd : getCapabilitiesObjects()) {
+ caps.put(ctd.getName(), ctd);
+ }
+ return caps;
+ }
+
+ @SuppressWarnings("unchecked")
+ public ArrayList<Object> getRequirements() {
+ return (ArrayList<Object>) getValue(REQUIREMENTS, null, true);
+ }
+
+ public ArrayList<Object> getAllRequirements() {
+ return getRequirements();
+ }
+
+ @SuppressWarnings("unchecked")
+ public LinkedHashMap<String, Object> getInterfaces() {
+ return (LinkedHashMap<String, Object>) getValue(INTERFACES, null, false);
+ }
+
+
+ @SuppressWarnings("unchecked")
+ public ArrayList<String> getLifecycleInputs() {
+ // Return inputs to life cycle operations if found
+ ArrayList<String> inputs = new ArrayList<>();
+ LinkedHashMap<String, Object> interfaces = getInterfaces();
+ if (interfaces != null) {
+ for (Map.Entry<String, Object> me : interfaces.entrySet()) {
+ String iname = me.getKey();
+ LinkedHashMap<String, Object> ivalue = (LinkedHashMap<String, Object>) me.getValue();
+ if (iname.equals(InterfacesDef.LIFECYCLE)) {
+ for (Map.Entry<String, Object> ie : ivalue.entrySet()) {
+ if (ie.getKey().equals("input")) {
+ LinkedHashMap<String, Object> y = (LinkedHashMap<String, Object>) ie.getValue();
+ for (String i : y.keySet()) {
+ inputs.add(i);
+ }
+ }
+ }
+ }
+ }
+ }
+ return inputs;
+ }
+
+ public ArrayList<String> getLifecycleOperations() {
+ // Return available life cycle operations if found
+ ArrayList<String> ops = null;
+ LinkedHashMap<String, Object> interfaces = getInterfaces();
+ if (interfaces != null) {
+ InterfacesDef i = new InterfacesDef(this, InterfacesDef.LIFECYCLE, null, null, null);
+ ops = i.getLifecycleOps();
+ }
+ return ops;
+ }
+
+ public CapabilityTypeDef getCapability(String name) {
+ //BUG?? the python code has to be wrong
+ // it refers to a bad attribute 'value'...
+ LinkedHashMap<String, CapabilityTypeDef> caps = getCapabilities();
+ if (caps != null) {
+ return caps.get(name);
+ }
+ return null;
+ /*
+ def get_capability(self, name):
+ caps = self.get_capabilities()
+ if caps and name in caps.keys():
+ return caps[name].value
+ */
+ }
+
+ public String getCapabilityType(String name) {
+ //BUG?? the python code has to be wrong
+ // it refers to a bad attribute 'value'...
+ CapabilityTypeDef captype = getCapability(name);
+ if (captype != null) {
+ return captype.getType();
+ }
+ return null;
+ /*
+ def get_capability_type(self, name):
+ captype = self.get_capability(name)
+ if captype and name in captype.keys():
+ return captype[name].value
+ */
+ }
+
+ private void _validateKeys() {
+ if (defs != null) {
+ for (String key : defs.keySet()) {
+ boolean bFound = false;
+ for (int i = 0; i < SECTIONS.length; i++) {
+ if (key.equals(SECTIONS[i])) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE124", String.format(
+ "UnknownFieldError: Nodetype \"%s\" has unknown field \"%s\"", ntype, key)));
+ }
+ }
+ }
+ }
+
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.elements.capabilitytype import CapabilityTypeDef
+import org.openecomp.sdc.toscaparser.api.elements.interfaces as ifaces
+from toscaparser.elements.interfaces import InterfacesDef
+from toscaparser.elements.relationshiptype import RelationshipType
+from toscaparser.elements.statefulentitytype import StatefulEntityType
+
+
+class NodeType(StatefulEntityType):
+ '''TOSCA built-in node type.'''
+ SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS) = \
+ ('derived_from', 'metadata', 'properties', 'version',
+ 'description', 'attributes', 'requirements', 'capabilities',
+ 'interfaces', 'artifacts')
+
+ def __init__(self, ntype, custom_def=None):
+ super(NodeType, self).__init__(ntype, self.NODE_PREFIX, custom_def)
+ self.ntype = ntype
+ self.custom_def = custom_def
+ self._validate_keys()
+
+ @property
+ def parent_type(self):
+ '''Return a node this node is derived from.'''
+ if not hasattr(self, 'defs'):
+ return None
+ pnode = self.derived_from(self.defs)
+ if pnode:
+ return NodeType(pnode, self.custom_def)
+
+ @property
+ def relationship(self):
+ '''Return a dictionary of relationships to other node types.
+
+ This method returns a dictionary of named relationships that nodes
+ of the current node type (self) can have to other nodes (of specific
+ types) in a TOSCA template.
+
+ '''
+ relationship = {}
+ requires = self.get_all_requirements()
+ if requires:
+ # NOTE(sdmonov): Check if requires is a dict.
+ # If it is a dict convert it to a list of dicts.
+ # This is needed because currently the code below supports only
+ # lists as requirements definition. The following check will
+ # make sure if a map (dict) was provided it will be converted to
+ # a list before proceeding to the parsing.
+ if isinstance(requires, dict):
+ requires = [{key: value} for key, value in requires.items()]
+
+ keyword = None
+ node_type = None
+ for require in requires:
+ for key, req in require.items():
+ if 'relationship' in req:
+ relation = req.get('relationship')
+ if 'type' in relation:
+ relation = relation.get('type')
+ node_type = req.get('node')
+ value = req
+ if node_type:
+ keyword = 'node'
+ else:
+ # If value is a dict and has a type key
+ # we need to lookup the node type using
+ # the capability type
+ value = req
+ if isinstance(value, dict):
+ captype = value['capability']
+ value = (self.
+ _get_node_type_by_cap(key, captype))
+ relation = self._get_relation(key, value)
+ keyword = key
+ node_type = value
+ rtype = RelationshipType(relation, keyword, self.custom_def)
+ relatednode = NodeType(node_type, self.custom_def)
+ relationship[rtype] = relatednode
+ return relationship
+
+ def _get_node_type_by_cap(self, key, cap):
+ '''Find the node type that has the provided capability
+
+ This method will lookup all node types if they have the
+ provided capability.
+ '''
+
+ # Filter the node types
+ node_types = [node_type for node_type in self.TOSCA_DEF.keys()
+ if node_type.startswith(self.NODE_PREFIX) and
+ node_type != 'tosca.nodes.Root']
+
+ for node_type in node_types:
+ node_def = self.TOSCA_DEF[node_type]
+ if isinstance(node_def, dict) and 'capabilities' in node_def:
+ node_caps = node_def['capabilities']
+ for value in node_caps.values():
+ if isinstance(value, dict) and \
+ 'type' in value and value['type'] == cap:
+ return node_type
+
+ def _get_relation(self, key, ndtype):
+ relation = None
+ ntype = NodeType(ndtype)
+ caps = ntype.get_capabilities()
+ if caps and key in caps.keys():
+ c = caps[key]
+ for r in self.RELATIONSHIP_TYPE:
+ rtypedef = ntype.TOSCA_DEF[r]
+ for properties in rtypedef.values():
+ if c.type in properties:
+ relation = r
+ break
+ if relation:
+ break
+ else:
+ for properties in rtypedef.values():
+ if c.parent_type in properties:
+ relation = r
+ break
+ return relation
+
+ def get_capabilities_objects(self):
+ '''Return a list of capability objects.'''
+ typecapabilities = []
+ caps = self.get_value(self.CAPABILITIES, None, True)
+ if caps:
+ # 'name' is symbolic name of the capability
+ # 'value' is a dict { 'type': <capability type name> }
+ for name, value in caps.items():
+ ctype = value.get('type')
+ cap = CapabilityTypeDef(name, ctype, self.type,
+ self.custom_def)
+ typecapabilities.append(cap)
+ return typecapabilities
+
+ def get_capabilities(self):
+ '''Return a dictionary of capability name-objects pairs.'''
+ return {cap.name: cap
+ for cap in self.get_capabilities_objects()}
+
+ @property
+ def requirements(self):
+ return self.get_value(self.REQUIREMENTS, None, True)
+
+ def get_all_requirements(self):
+ return self.requirements
+
+ @property
+ def interfaces(self):
+ return self.get_value(self.INTERFACES)
+
+ @property
+ def lifecycle_inputs(self):
+ '''Return inputs to life cycle operations if found.'''
+ inputs = []
+ interfaces = self.interfaces
+ if interfaces:
+ for name, value in interfaces.items():
+ if name == ifaces.LIFECYCLE:
+ for x, y in value.items():
+ if x == 'inputs':
+ for i in y.iterkeys():
+ inputs.append(i)
+ return inputs
+
+ @property
+ def lifecycle_operations(self):
+ '''Return available life cycle operations if found.'''
+ ops = None
+ interfaces = self.interfaces
+ if interfaces:
+ i = InterfacesDef(self.type, ifaces.LIFECYCLE)
+ ops = i.lifecycle_ops
+ return ops
+
+ def get_capability(self, name):
+ caps = self.get_capabilities()
+ if caps and name in caps.keys():
+ return caps[name].value
+
+ def get_capability_type(self, name):
+ captype = self.get_capability(name)
+ if captype and name in captype.keys():
+ return captype[name].value
+
+ def _validate_keys(self):
+ if self.defs:
+ for key in self.defs.keys():
+ if key not in self.SECTIONS:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(what='Nodetype"%s"' % self.ntype,
+ field=key))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java
new file mode 100644
index 0000000..b227a31
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PolicyType.java
@@ -0,0 +1,309 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.TOSCAVersionProperty;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+
+public class PolicyType extends StatefulEntityType {
+
+ private static final String DERIVED_FROM = "derived_from";
+ private static final String METADATA = "metadata";
+ private static final String PROPERTIES = "properties";
+ private static final String VERSION = "version";
+ private static final String DESCRIPTION = "description";
+ private static final String TARGETS = "targets";
+ private static final String TRIGGERS = "triggers";
+ private static final String TYPE = "type";
+
+ private static final String[] SECTIONS = {
+ DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS, TRIGGERS, TYPE
+ };
+
+ private LinkedHashMap<String, Object> customDef;
+ private String policyDescription;
+ private Object policyVersion;
+ private LinkedHashMap<String, Object> properties;
+ private LinkedHashMap<String, Object> parentPolicies;
+ private LinkedHashMap<String, Object> metaData;
+ private ArrayList<String> targetsList;
+
+
+ public PolicyType(String type, LinkedHashMap<String, Object> customDef) {
+ super(type, POLICY_PREFIX, customDef);
+
+ this.type = type;
+ this.customDef = customDef;
+ validateKeys();
+
+ metaData = null;
+ if (defs != null && defs.get(METADATA) != null) {
+ metaData = (LinkedHashMap<String, Object>) defs.get(METADATA);
+ validateMetadata(metaData);
+ }
+
+ properties = null;
+ if (defs != null && defs.get(PROPERTIES) != null) {
+ properties = (LinkedHashMap<String, Object>) defs.get(PROPERTIES);
+ }
+ parentPolicies = getParentPolicies();
+
+ policyVersion = null;
+ if (defs != null && defs.get(VERSION) != null) {
+ policyVersion = (new TOSCAVersionProperty(
+ defs.get(VERSION).toString())).getVersion();
+ }
+
+ policyDescription = null;
+ if (defs != null && defs.get(DESCRIPTION) != null) {
+ policyDescription = (String) defs.get(DESCRIPTION);
+ }
+
+ targetsList = null;
+ if (defs != null && defs.get(TARGETS) != null) {
+ targetsList = (ArrayList<String>) defs.get(TARGETS);
+ validateTargets(targetsList, this.customDef);
+ }
+
+ }
+
+ private LinkedHashMap<String, Object> getParentPolicies() {
+ LinkedHashMap<String, Object> policies = new LinkedHashMap<>();
+ String parentPolicy;
+ if (getParentType() != null) {
+ parentPolicy = getParentType().getType();
+ } else {
+ parentPolicy = null;
+ }
+ if (parentPolicy != null) {
+ while (parentPolicy != null && !parentPolicy.equals("tosca.policies.Root")) {
+ policies.put(parentPolicy, TOSCA_DEF.get(parentPolicy));
+ parentPolicy = (String)
+ ((LinkedHashMap<String, Object>) policies.get(parentPolicy)).get("derived_from);");
+ }
+ }
+ return policies;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public PolicyType getParentType() {
+ // Return a policy statefulentity of this node is derived from
+ if (defs == null) {
+ return null;
+ }
+ String policyEntity = derivedFrom(defs);
+ if (policyEntity != null) {
+ return new PolicyType(policyEntity, customDef);
+ }
+ return null;
+ }
+
+ public Object getPolicy(String name) {
+ // Return the definition of a policy field by name
+ if (defs != null && defs.get(name) != null) {
+ return defs.get(name);
+ }
+ return null;
+ }
+
+ public ArrayList<String> getTargets() {
+ // Return targets
+ return targetsList;
+ }
+
+ public String getDescription() {
+ return policyDescription;
+ }
+
+ public Object getVersion() {
+ return policyVersion;
+ }
+
+ private void validateKeys() {
+ for (String key : defs.keySet()) {
+ boolean bFound = false;
+ for (String sect : SECTIONS) {
+ if (key.equals(sect)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE125", String.format(
+ "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"",
+ type, key)));
+ }
+ }
+ }
+
+ private void validateTargets(ArrayList<String> targetsList,
+ LinkedHashMap<String, Object> customDef) {
+ for (String nodetype : targetsList) {
+ if (customDef.get(nodetype) == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE126", String.format(
+ "InvalidTypeError: \"%s\" defined in targets for policy \"%s\"",
+ nodetype, type)));
+
+ }
+ }
+ }
+
+ private void validateMetadata(LinkedHashMap<String, Object> metaData) {
+ String mtype = (String) metaData.get("type");
+ if (mtype != null && !mtype.equals("map") && !mtype.equals("tosca:map")) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE127", String.format(
+ "InvalidTypeError: \"%s\" defined in policy for metadata",
+ mtype)));
+ }
+ for (String entrySchema : this.metaData.keySet()) {
+ Object estob = this.metaData.get(entrySchema);
+ if (estob instanceof LinkedHashMap) {
+ String est = (String)
+ ((LinkedHashMap<String, Object>) estob).get("type");
+ if (!est.equals("string")) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE128", String.format(
+ "InvalidTypeError: \"%s\" defined in policy for metadata \"%s\"",
+ est, entrySchema)));
+ }
+ }
+ }
+ }
+
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import InvalidTypeError
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.elements.statefulentitytype import StatefulEntityType
+from toscaparser.utils.validateutils import TOSCAVersionProperty
+
+
+class PolicyType(StatefulEntityType):
+
+ '''TOSCA built-in policies type.'''
+ SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS) = \
+ ('derived_from', 'metadata', 'properties', 'version',
+ 'description', 'targets')
+
+ def __init__(self, ptype, custom_def=None):
+ super(PolicyType, self).__init__(ptype, self.POLICY_PREFIX,
+ custom_def)
+ self.type = ptype
+ self.custom_def = custom_def
+ self._validate_keys()
+
+ self.meta_data = None
+ if self.METADATA in self.defs:
+ self.meta_data = self.defs[self.METADATA]
+ self._validate_metadata(self.meta_data)
+
+ self.properties = None
+ if self.PROPERTIES in self.defs:
+ self.properties = self.defs[self.PROPERTIES]
+ self.parent_policies = self._get_parent_policies()
+
+ self.policy_version = None
+ if self.VERSION in self.defs:
+ self.policy_version = TOSCAVersionProperty(
+ self.defs[self.VERSION]).get_version()
+
+ self.policy_description = self.defs[self.DESCRIPTION] \
+ if self.DESCRIPTION in self.defs else None
+
+ self.targets_list = None
+ if self.TARGETS in self.defs:
+ self.targets_list = self.defs[self.TARGETS]
+ self._validate_targets(self.targets_list, custom_def)
+
+ def _get_parent_policies(self):
+ policies = {}
+ parent_policy = self.parent_type.type if self.parent_type else None
+ if parent_policy:
+ while parent_policy != 'tosca.policies.Root':
+ policies[parent_policy] = self.TOSCA_DEF[parent_policy]
+ parent_policy = policies[parent_policy]['derived_from']
+ return policies
+
+ @property
+ def parent_type(self):
+ '''Return a policy statefulentity of this node is derived from.'''
+ if not hasattr(self, 'defs'):
+ return None
+ ppolicy_entity = self.derived_from(self.defs)
+ if ppolicy_entity:
+ return PolicyType(ppolicy_entity, self.custom_def)
+
+ def get_policy(self, name):
+ '''Return the definition of a policy field by name.'''
+ if name in self.defs:
+ return self.defs[name]
+
+ @property
+ def targets(self):
+ '''Return targets.'''
+ return self.targets_list
+
+ @property
+ def description(self):
+ return self.policy_description
+
+ @property
+ def version(self):
+ return self.policy_version
+
+ def _validate_keys(self):
+ for key in self.defs.keys():
+ if key not in self.SECTIONS:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(what='Policy "%s"' % self.type,
+ field=key))
+
+ def _validate_targets(self, targets_list, custom_def):
+ for nodetype in targets_list:
+ if nodetype not in custom_def:
+ ValidationIssueCollector.appendException(
+ InvalidTypeError(what='"%s" defined in targets for '
+ 'policy "%s"' % (nodetype, self.type)))
+
+ def _validate_metadata(self, meta_data):
+ if not meta_data.get('type') in ['map', 'tosca:map']:
+ ValidationIssueCollector.appendException(
+ InvalidTypeError(what='"%s" defined in policy for '
+ 'metadata' % (meta_data.get('type'))))
+
+ for entry_schema, entry_schema_type in meta_data.items():
+ if isinstance(entry_schema_type, dict) and not \
+ entry_schema_type.get('type') == 'string':
+ ValidationIssueCollector.appendException(
+ InvalidTypeError(what='"%s" defined in policy for '
+ 'metadata "%s"'
+ % (entry_schema_type.get('type'),
+ entry_schema)))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java
new file mode 100644
index 0000000..01fb9fc
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PortSpec.java
@@ -0,0 +1,177 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import org.onap.sdc.toscaparser.api.DataEntity;
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+import org.onap.sdc.toscaparser.api.utils.ValidateUtils;
+
+import java.util.LinkedHashMap;
+
+public class PortSpec {
+ // Parent class for tosca.datatypes.network.PortSpec type
+
+ private static final String SHORTNAME = "PortSpec";
+ private static final String TYPE_URI = "tosca.datatypes.network." + SHORTNAME;
+
+ private static final String PROTOCOL = "protocol";
+ private static final String SOURCE = "source";
+ private static final String SOURCE_RANGE = "source_range";
+ private static final String TARGET = "target";
+ private static final String TARGET_RANGE = "target_range";
+
+ private static final String PROPERTY_NAMES[] = {
+ PROTOCOL, SOURCE, SOURCE_RANGE,
+ TARGET, TARGET_RANGE
+ };
+
+ // todo(TBD) May want to make this a subclass of DataType
+ // and change init method to set PortSpec's properties
+ public PortSpec() {
+
+ }
+
+ // The following additional requirements MUST be tested:
+ // 1) A valid PortSpec MUST have at least one of the following properties:
+ // target, target_range, source or source_range.
+ // 2) A valid PortSpec MUST have a value for the source property that
+ // is within the numeric range specified by the property source_range
+ // when source_range is specified.
+ // 3) A valid PortSpec MUST have a value for the target property that is
+ // within the numeric range specified by the property target_range
+ // when target_range is specified.
+ public static void validateAdditionalReq(Object _properties,
+ String propName,
+ LinkedHashMap<String, Object> custom_def) {
+
+ try {
+ LinkedHashMap<String, Object> properties = (LinkedHashMap<String, Object>) _properties;
+ Object source = properties.get(PortSpec.SOURCE);
+ Object sourceRange = properties.get(PortSpec.SOURCE_RANGE);
+ Object target = properties.get(PortSpec.TARGET);
+ Object targetRange = properties.get(PortSpec.TARGET_RANGE);
+
+ // verify one of the specified values is set
+ if (source == null && sourceRange == null &&
+ target == null && targetRange == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE129", String.format(
+ "InvalidTypeAdditionalRequirementsError: Additional requirements for type \"%s\" not met",
+ TYPE_URI)));
+ }
+ // Validate source value is in specified range
+ if (source != null && sourceRange != null) {
+ ValidateUtils.validateValueInRange(source, sourceRange, SOURCE);
+ } else {
+ DataEntity portdef = new DataEntity("PortDef", source, null, SOURCE);
+ portdef.validate();
+ }
+ // Validate target value is in specified range
+ if (target != null && targetRange != null) {
+ ValidateUtils.validateValueInRange(target, targetRange, SOURCE);
+ } else {
+ DataEntity portdef = new DataEntity("PortDef", source, null, TARGET);
+ portdef.validate();
+ }
+ } catch (Exception e) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE130", String.format(
+ "ValueError: \"%s\" do not meet requirements for type \"%s\"",
+ _properties.toString(), SHORTNAME)));
+ }
+ }
+
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import InvalidTypeAdditionalRequirementsError
+from toscaparser.utils.gettextutils import _
+import org.openecomp.sdc.toscaparser.api.utils.validateutils as validateutils
+
+log = logging.getLogger('tosca')
+
+
+class PortSpec(object):
+ '''Parent class for tosca.datatypes.network.PortSpec type.'''
+
+ SHORTNAME = 'PortSpec'
+ TYPE_URI = 'tosca.datatypes.network.' + SHORTNAME
+
+ PROPERTY_NAMES = (
+ PROTOCOL, SOURCE, SOURCE_RANGE,
+ TARGET, TARGET_RANGE
+ ) = (
+ 'protocol', 'source', 'source_range',
+ 'target', 'target_range'
+ )
+
+ # TODO(TBD) May want to make this a subclass of DataType
+ # and change init method to set PortSpec's properties
+ def __init__(self):
+ pass
+
+ # The following additional requirements MUST be tested:
+ # 1) A valid PortSpec MUST have at least one of the following properties:
+ # target, target_range, source or source_range.
+ # 2) A valid PortSpec MUST have a value for the source property that
+ # is within the numeric range specified by the property source_range
+ # when source_range is specified.
+ # 3) A valid PortSpec MUST have a value for the target property that is
+ # within the numeric range specified by the property target_range
+ # when target_range is specified.
+ @staticmethod
+ def validate_additional_req(properties, prop_name, custom_def=None, ):
+ try:
+ source = properties.get(PortSpec.SOURCE)
+ source_range = properties.get(PortSpec.SOURCE_RANGE)
+ target = properties.get(PortSpec.TARGET)
+ target_range = properties.get(PortSpec.TARGET_RANGE)
+
+ # verify one of the specified values is set
+ if source is None and source_range is None and \
+ target is None and target_range is None:
+ ValidationIssueCollector.appendException(
+ InvalidTypeAdditionalRequirementsError(
+ type=PortSpec.TYPE_URI))
+ # Validate source value is in specified range
+ if source and source_range:
+ validateutils.validate_value_in_range(source, source_range,
+ PortSpec.SOURCE)
+ else:
+ from toscaparser.dataentity import DataEntity
+ portdef = DataEntity('PortDef', source, None, PortSpec.SOURCE)
+ portdef.validate()
+ # Validate target value is in specified range
+ if target and target_range:
+ validateutils.validate_value_in_range(target, target_range,
+ PortSpec.TARGET)
+ else:
+ from toscaparser.dataentity import DataEntity
+ portdef = DataEntity('PortDef', source, None, PortSpec.TARGET)
+ portdef.validate()
+ except Exception:
+ msg = _('"%(value)s" do not meet requirements '
+ 'for type "%(type)s".') \
+ % {'value': properties, 'type': PortSpec.SHORTNAME}
+ ValidationIssueCollector.appendException(
+ ValueError(msg))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java
new file mode 100644
index 0000000..484d17e
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/PropertyDef.java
@@ -0,0 +1,249 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+public class PropertyDef {
+
+ private static final String PROPERTY_KEYNAME_DEFAULT = "default";
+ private static final String PROPERTY_KEYNAME_REQUIRED = "required";
+ private static final String PROPERTY_KEYNAME_STATUS = "status";
+ private static final String VALID_PROPERTY_KEYNAMES[] = {
+ PROPERTY_KEYNAME_DEFAULT,
+ PROPERTY_KEYNAME_REQUIRED,
+ PROPERTY_KEYNAME_STATUS};
+
+ private static final boolean PROPERTY_REQUIRED_DEFAULT = true;
+
+ private static final String VALID_REQUIRED_VALUES[] = {"true", "false"};
+
+ private static final String PROPERTY_STATUS_SUPPORTED = "supported";
+ private static final String PROPERTY_STATUS_EXPERIMENTAL = "experimental";
+ private static final String VALID_STATUS_VALUES[] = {
+ PROPERTY_STATUS_SUPPORTED, PROPERTY_STATUS_EXPERIMENTAL};
+
+ private static final String PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED;
+
+ private String name;
+ private Object value;
+ private LinkedHashMap<String, Object> schema;
+ private String _status;
+ private boolean _required;
+
+ public PropertyDef(String pdName, Object pdValue,
+ LinkedHashMap<String, Object> pdSchema) {
+ name = pdName;
+ value = pdValue;
+ schema = pdSchema;
+ _status = PROPERTY_STATUS_DEFAULT;
+ _required = PROPERTY_REQUIRED_DEFAULT;
+
+ if (schema != null) {
+ // Validate required 'type' property exists
+ if (schema.get("type") == null) {
+ //msg = (_('Schema definition of "%(pname)s" must have a "type" '
+ // 'attribute.') % dict(pname=self.name))
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE131", String.format(
+ "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute", name)));
+ }
+ _loadRequiredAttrFromSchema();
+ _loadStatusAttrFromSchema();
+ }
+ }
+
+ public Object getDefault() {
+ if (schema != null) {
+ for (Map.Entry<String, Object> me : schema.entrySet()) {
+ if (me.getKey().equals(PROPERTY_KEYNAME_DEFAULT)) {
+ return me.getValue();
+ }
+ }
+ }
+ return null;
+ }
+
+ public boolean isRequired() {
+ return _required;
+ }
+
+ private void _loadRequiredAttrFromSchema() {
+ // IF 'required' keyname exists verify it's a boolean,
+ // if so override default
+ Object val = schema.get(PROPERTY_KEYNAME_REQUIRED);
+ if (val != null) {
+ if (val instanceof Boolean) {
+ _required = (boolean) val;
+ } else {
+ //valid_values = ', '.join(self.VALID_REQUIRED_VALUES)
+ //attr = self.PROPERTY_KEYNAME_REQUIRED
+ //TOSCAException.generate_inv_schema_property_error(self,
+ // attr,
+ // value,
+ // valid_values)
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE132", String.format(
+ "Schema definition of \"%s\" has \"required\" attribute with an invalid value",
+ name)));
+ }
+ }
+ }
+
+ public String getStatus() {
+ return _status;
+ }
+
+ private void _loadStatusAttrFromSchema() {
+ // IF 'status' keyname exists verify it's a boolean,
+ // if so override default
+ String sts = (String) schema.get(PROPERTY_KEYNAME_STATUS);
+ if (sts != null) {
+ boolean bFound = false;
+ for (String vsv : VALID_STATUS_VALUES) {
+ if (vsv.equals(sts)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (bFound) {
+ _status = sts;
+ } else {
+ //valid_values = ', '.join(self.VALID_STATUS_VALUES)
+ //attr = self.PROPERTY_KEYNAME_STATUS
+ //TOSCAException.generate_inv_schema_property_error(self,
+ // attr,
+ // value,
+ // valid_values)
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE006", String.format(
+ "Schema definition of \"%s\" has \"status\" attribute with an invalid value",
+ name)));
+ }
+ }
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public LinkedHashMap<String, Object> getSchema() {
+ return schema;
+ }
+
+ public Object getPDValue() {
+ // there's getValue in EntityType...
+ return value;
+ }
+
+}
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import InvalidSchemaError
+from toscaparser.common.exception import TOSCAException
+from toscaparser.utils.gettextutils import _
+
+
+class PropertyDef(object):
+ '''TOSCA built-in Property type.'''
+
+ VALID_PROPERTY_KEYNAMES = (PROPERTY_KEYNAME_DEFAULT,
+ PROPERTY_KEYNAME_REQUIRED,
+ PROPERTY_KEYNAME_STATUS) = \
+ ('default', 'required', 'status')
+
+ PROPERTY_REQUIRED_DEFAULT = True
+
+ VALID_REQUIRED_VALUES = ['true', 'false']
+ VALID_STATUS_VALUES = (PROPERTY_STATUS_SUPPORTED,
+ PROPERTY_STATUS_EXPERIMENTAL) = \
+ ('supported', 'experimental')
+
+ PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED
+
+ def __init__(self, name, value=None, schema=None):
+ self.name = name
+ self.value = value
+ self.schema = schema
+ self._status = self.PROPERTY_STATUS_DEFAULT
+ self._required = self.PROPERTY_REQUIRED_DEFAULT
+
+ # Validate required 'type' property exists
+ try:
+ self.schema['type']
+ except KeyError:
+ msg = (_('Schema definition of "%(pname)s" must have a "type" '
+ 'attribute.') % dict(pname=self.name))
+ ValidationIssueCollector.appendException(
+ InvalidSchemaError(message=msg))
+
+ if self.schema:
+ self._load_required_attr_from_schema()
+ self._load_status_attr_from_schema()
+
+ @property
+ def default(self):
+ if self.schema:
+ for prop_key, prop_value in self.schema.items():
+ if prop_key == self.PROPERTY_KEYNAME_DEFAULT:
+ return prop_value
+ return None
+
+ @property
+ def required(self):
+ return self._required
+
+ def _load_required_attr_from_schema(self):
+ # IF 'required' keyname exists verify it's a boolean,
+ # if so override default
+ if self.PROPERTY_KEYNAME_REQUIRED in self.schema:
+ value = self.schema[self.PROPERTY_KEYNAME_REQUIRED]
+ if isinstance(value, bool):
+ self._required = value
+ else:
+ valid_values = ', '.join(self.VALID_REQUIRED_VALUES)
+ attr = self.PROPERTY_KEYNAME_REQUIRED
+ TOSCAException.generate_inv_schema_property_error(self,
+ attr,
+ value,
+ valid_values)
+
+ @property
+ def status(self):
+ return self._status
+
+ def _load_status_attr_from_schema(self):
+ # IF 'status' keyname exists verify it's a valid value,
+ # if so override default
+ if self.PROPERTY_KEYNAME_STATUS in self.schema:
+ value = self.schema[self.PROPERTY_KEYNAME_STATUS]
+ if value in self.VALID_STATUS_VALUES:
+ self._status = value
+ else:
+ valid_values = ', '.join(self.VALID_STATUS_VALUES)
+ attr = self.PROPERTY_KEYNAME_STATUS
+ TOSCAException.generate_inv_schema_property_error(self,
+ attr,
+ value,
+ valid_values)
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java
new file mode 100644
index 0000000..4c39ec2
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/RelationshipType.java
@@ -0,0 +1,121 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.LinkedHashMap;
+
+public class RelationshipType extends StatefulEntityType {
+
+ private static final String DERIVED_FROM = "derived_from";
+ private static final String VALID_TARGET_TYPES = "valid_target_types";
+ private static final String INTERFACES = "interfaces";
+ private static final String ATTRIBUTES = "attributes";
+ private static final String PROPERTIES = "properties";
+ private static final String DESCRIPTION = "description";
+ private static final String VERSION = "version";
+ private static final String CREDENTIAL = "credential";
+
+ private static final String[] SECTIONS = {
+ DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES,
+ ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, CREDENTIAL};
+
+ private String capabilityName;
+ private LinkedHashMap<String, Object> customDef;
+
+ public RelationshipType(String type, String capabilityName, LinkedHashMap<String, Object> customDef) {
+ super(type, RELATIONSHIP_PREFIX, customDef);
+ this.capabilityName = capabilityName;
+ this.customDef = customDef;
+ }
+
+ public RelationshipType getParentType() {
+ // Return a relationship this reletionship is derived from.'''
+ String prel = derivedFrom(defs);
+ if (prel != null) {
+ return new RelationshipType(prel, null, customDef);
+ }
+ return null;
+ }
+
+ public Object getValidTargetTypes() {
+ return entityValue(defs, "valid_target_types");
+ }
+
+ private void validateKeys() {
+ for (String key : defs.keySet()) {
+ boolean bFound = false;
+ for (String section : SECTIONS) {
+ if (key.equals(section)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE133", String.format(
+ "UnknownFieldError: Relationshiptype \"%s\" has unknown field \"%s\"", type, key)));
+ }
+ }
+ }
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.elements.statefulentitytype import StatefulEntityType
+
+
+class RelationshipType(StatefulEntityType):
+ '''TOSCA built-in relationship type.'''
+ SECTIONS = (DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES,
+ ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION,
+ CREDENTIAL) = ('derived_from', 'valid_target_types',
+ 'interfaces', 'attributes', 'properties',
+ 'description', 'version', 'credential')
+
+ def __init__(self, type, capability_name=None, custom_def=None):
+ super(RelationshipType, self).__init__(type, self.RELATIONSHIP_PREFIX,
+ custom_def)
+ self.capability_name = capability_name
+ self.custom_def = custom_def
+ self._validate_keys()
+
+ @property
+ def parent_type(self):
+ '''Return a relationship this reletionship is derived from.'''
+ prel = self.derived_from(self.defs)
+ if prel:
+ return RelationshipType(prel, self.custom_def)
+
+ @property
+ def valid_target_types(self):
+ return self.entity_value(self.defs, 'valid_target_types')
+
+ def _validate_keys(self):
+ for key in self.defs.keys():
+ if key not in self.SECTIONS:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(what='Relationshiptype "%s"' % self.type,
+ field=key))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java
new file mode 100644
index 0000000..1eaa8a0
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnit.java
@@ -0,0 +1,287 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+import org.onap.sdc.toscaparser.api.utils.ValidateUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public abstract class ScalarUnit {
+
+ private static Logger log = LoggerFactory.getLogger(ScalarUnit.class.getName());
+
+ private static final String SCALAR_UNIT_SIZE = "scalar-unit.size";
+ private static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency";
+ private static final String SCALAR_UNIT_TIME = "scalar-unit.time";
+
+ public static final String[] SCALAR_UNIT_TYPES = {
+ SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME
+ };
+
+ private Object value;
+ private HashMap<String, Object> scalarUnitDict;
+ private String scalarUnitDefault;
+
+ public ScalarUnit(Object value) {
+ this.value = value;
+ scalarUnitDict = new HashMap<>();
+ scalarUnitDefault = "";
+ }
+
+ void putToScalarUnitDict(String key, Object value) {
+ scalarUnitDict.put(key, value);
+ }
+
+ void setScalarUnitDefault(String scalarUnitDefault) {
+ this.scalarUnitDefault = scalarUnitDefault;
+ }
+
+ private String checkUnitInScalarStandardUnits(String inputUnit) {
+ // Check whether the input unit is following specified standard
+
+ // If unit is not following specified standard, convert it to standard
+ // unit after displaying a warning message.
+
+ if (scalarUnitDict.get(inputUnit) != null) {
+ return inputUnit;
+ } else {
+ for (String key : scalarUnitDict.keySet()) {
+ if (key.toUpperCase().equals(inputUnit.toUpperCase())) {
+ log.debug("ScalarUnit - checkUnitInScalarStandardUnits - \n"
+ + "The unit {} does not follow scalar unit standards\n"
+ + "using {} instead",
+ inputUnit, key);
+ return key;
+ }
+ }
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE007", String.format(
+ "'The unit \"%s\" is not valid. Valid units are \n%s",
+ inputUnit, scalarUnitDict.keySet().toString())));
+ return inputUnit;
+ }
+ }
+
+ public Object validateScalarUnit() {
+ Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)");
+ Matcher matcher = pattern.matcher(value.toString());
+ if (matcher.find()) {
+ ValidateUtils.strToNum(matcher.group(1));
+ String scalarUnit = checkUnitInScalarStandardUnits(matcher.group(2));
+ value = matcher.group(1) + " " + scalarUnit;
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE134", String.format(
+ "ValueError: \"%s\" is not a valid scalar-unit", value.toString())));
+ }
+ return value;
+ }
+
+ public double getNumFromScalarUnit(String unit) {
+ if (unit != null) {
+ unit = checkUnitInScalarStandardUnits(unit);
+ } else {
+ unit = scalarUnitDefault;
+ }
+ Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)");
+ Matcher matcher = pattern.matcher(value.toString());
+ if (matcher.find()) {
+ final double minimalNum = 0.0000000000001;
+
+ ValidateUtils.strToNum(matcher.group(1));
+ String scalarUnit = checkUnitInScalarStandardUnits(matcher.group(2));
+ value = matcher.group(1) + " " + scalarUnit;
+ Object on1 = ValidateUtils.strToNum(matcher.group(1)) != null ? ValidateUtils.strToNum(matcher.group(1)) : 0;
+ Object on2 = scalarUnitDict.get(matcher.group(2)) != null ? scalarUnitDict.get(matcher.group(2)) : 0;
+ Object on3 = scalarUnitDict.get(unit) != null ? scalarUnitDict.get(unit) : 0;
+
+ Double n1 = new Double(on1.toString());
+ Double n2 = new Double(on2.toString());
+ Double n3 = new Double(on3.toString());
+ double converted = n1 * n2 / n3;
+
+ if (Math.abs(converted - Math.round(converted)) < minimalNum) {
+ converted = Math.round(converted);
+ }
+ return converted;
+ }
+ return 0.0;
+ }
+
+ private static HashMap<String, String> scalarUnitMapping = getScalarUnitMappings();
+
+ private static HashMap<String, String> getScalarUnitMappings() {
+ HashMap<String, String> map = new HashMap<>();
+ map.put(SCALAR_UNIT_FREQUENCY, "ScalarUnitFrequency");
+ map.put(SCALAR_UNIT_SIZE, "ScalarUnitSize");
+ map.put(SCALAR_UNIT_TIME, "ScalarUnit_Time");
+ return map;
+ }
+
+ public static ScalarUnit getScalarunitClass(String type, Object val) {
+ if (type.equals(SCALAR_UNIT_SIZE)) {
+ return new ScalarUnitSize(val);
+ } else if (type.equals(SCALAR_UNIT_TIME)) {
+ return new ScalarUnitTime(val);
+ } else if (type.equals(SCALAR_UNIT_FREQUENCY)) {
+ return new ScalarUnitFrequency(val);
+ }
+ return null;
+ }
+
+ public static double getScalarunitValue(String type, Object value, String unit) {
+ if (type.equals(SCALAR_UNIT_SIZE)) {
+ return (new ScalarUnitSize(value)).getNumFromScalarUnit(unit);
+ }
+ if (type.equals(SCALAR_UNIT_TIME)) {
+ return (new ScalarUnitTime(value)).getNumFromScalarUnit(unit);
+ }
+ if (type.equals(SCALAR_UNIT_FREQUENCY)) {
+ return (new ScalarUnitFrequency(value)).getNumFromScalarUnit(unit);
+ }
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE135", String.format(
+ "TypeError: \"%s\" is not a valid scalar-unit type", type)));
+ return 0.0;
+ }
+
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.utils.gettextutils import _
+from toscaparser.utils import validateutils
+
+log = logging.getLogger('tosca')
+
+
+class ScalarUnit(object):
+ '''Parent class for scalar-unit type.'''
+
+ SCALAR_UNIT_TYPES = (
+ SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME
+ ) = (
+ 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time'
+ )
+
+ def __init__(self, value):
+ self.value = value
+
+ def _check_unit_in_scalar_standard_units(self, input_unit):
+ """Check whether the input unit is following specified standard
+
+ If unit is not following specified standard, convert it to standard
+ unit after displaying a warning message.
+ """
+ if input_unit in self.scalarUnitDict.keys():
+ return input_unit
+ else:
+ for key in self.scalarUnitDict.keys():
+ if key.upper() == input_unit.upper():
+ log.warning(_('The unit "%(unit)s" does not follow '
+ 'scalar unit standards; using "%(key)s" '
+ 'instead.') % {'unit': input_unit,
+ 'key': key})
+ return key
+ msg = (_('The unit "%(unit)s" is not valid. Valid units are '
+ '"%(valid_units)s".') %
+ {'unit': input_unit,
+ 'valid_units': sorted(self.scalarUnitDict.keys())})
+ ValidationIssueCollector.appendException(ValueError(msg))
+
+ def validate_scalar_unit(self):
+ regex = re.compile('([0-9.]+)\s*(\w+)')
+ try:
+ result = regex.match(str(self.value)).groups()
+ validateutils.str_to_num(result[0])
+ scalar_unit = self._check_unit_in_scalar_standard_units(result[1])
+ self.value = ' '.join([result[0], scalar_unit])
+ return self.value
+
+ except Exception:
+ ValidationIssueCollector.appendException(
+ ValueError(_('"%s" is not a valid scalar-unit.')
+ % self.value))
+
+ def get_num_from_scalar_unit(self, unit=None):
+ if unit:
+ unit = self._check_unit_in_scalar_standard_units(unit)
+ else:
+ unit = self.scalarUnitDefault
+ self.validate_scalar_unit()
+
+ regex = re.compile('([0-9.]+)\s*(\w+)')
+ result = regex.match(str(self.value)).groups()
+ converted = (float(validateutils.str_to_num(result[0]))
+ * self.scalarUnitDict[result[1]]
+ / self.scalarUnitDict[unit])
+ if converted - int(converted) < 0.0000000000001:
+ converted = int(converted)
+ return converted
+
+
+class ScalarUnit_Size(ScalarUnit):
+
+ scalarUnitDefault = 'B'
+ scalarUnitDict = {'B': 1, 'kB': 1000, 'KiB': 1024, 'MB': 1000000,
+ 'MiB': 1048576, 'GB': 1000000000,
+ 'GiB': 1073741824, 'TB': 1000000000000,
+ 'TiB': 1099511627776}
+
+
+class ScalarUnit_Time(ScalarUnit):
+
+ scalarUnitDefault = 'ms'
+ scalarUnitDict = {'d': 86400, 'h': 3600, 'm': 60, 's': 1,
+ 'ms': 0.001, 'us': 0.000001, 'ns': 0.000000001}
+
+
+class ScalarUnit_Frequency(ScalarUnit):
+
+ scalarUnitDefault = 'GHz'
+ scalarUnitDict = {'Hz': 1, 'kHz': 1000,
+ 'MHz': 1000000, 'GHz': 1000000000}
+
+
+scalarunit_mapping = {
+ ScalarUnit.SCALAR_UNIT_FREQUENCY: ScalarUnit_Frequency,
+ ScalarUnit.SCALAR_UNIT_SIZE: ScalarUnit_Size,
+ ScalarUnit.SCALAR_UNIT_TIME: ScalarUnit_Time,
+ }
+
+
+def get_scalarunit_class(type):
+ return scalarunit_mapping.get(type)
+
+
+def get_scalarunit_value(type, value, unit=None):
+ if type in ScalarUnit.SCALAR_UNIT_TYPES:
+ ScalarUnit_Class = get_scalarunit_class(type)
+ return (ScalarUnit_Class(value).
+ get_num_from_scalar_unit(unit))
+ else:
+ ValidationIssueCollector.appendException(
+ TypeError(_('"%s" is not a valid scalar-unit type.') % type))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java
new file mode 100644
index 0000000..ed10da9
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitFrequency.java
@@ -0,0 +1,39 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+public class ScalarUnitFrequency extends ScalarUnit {
+
+ private static final Long HZ = 1L;
+ private static final Long KHZ = 1000L;
+ private static final Long MHZ = 1000000L;
+ private static final Long GHZ = 1000000000L;
+
+ public ScalarUnitFrequency(Object value) {
+ super(value);
+ setScalarUnitDefault("GHz");
+ putToScalarUnitDict("Hz", HZ);
+ putToScalarUnitDict("kHz", KHZ);
+ putToScalarUnitDict("MHz", MHZ);
+ putToScalarUnitDict("GHz", GHZ);
+ }
+
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java
new file mode 100644
index 0000000..78687a1
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitSize.java
@@ -0,0 +1,43 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import org.onap.sdc.toscaparser.api.elements.enums.FileSize;
+
+public class ScalarUnitSize extends ScalarUnit {
+
+
+
+ public ScalarUnitSize(Object value) {
+ super(value);
+
+ setScalarUnitDefault("B");
+ putToScalarUnitDict("B", FileSize.B);
+ putToScalarUnitDict("kB", FileSize.KB);
+ putToScalarUnitDict("MB", FileSize.MB);
+ putToScalarUnitDict("GB", FileSize.GB);
+ putToScalarUnitDict("TB", FileSize.TB);
+ putToScalarUnitDict("kiB", FileSize.KIB);
+ putToScalarUnitDict("MiB", FileSize.MIB);
+ putToScalarUnitDict("GiB", FileSize.GIB);
+ putToScalarUnitDict("TiB", FileSize.TIB);
+ }
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java
new file mode 100644
index 0000000..8d2c13e
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/ScalarUnitTime.java
@@ -0,0 +1,37 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+public class ScalarUnitTime extends ScalarUnit {
+
+ public ScalarUnitTime(Object value) {
+ super(value);
+ setScalarUnitDefault("ms");
+ putToScalarUnitDict("d", 86400L);
+ putToScalarUnitDict("h", 3600L);
+ putToScalarUnitDict("m", 60L);
+ putToScalarUnitDict("s", 1L);
+ putToScalarUnitDict("ms", 0.001);
+ putToScalarUnitDict("us", 0.000001);
+ putToScalarUnitDict("ns", 0.000000001);
+ }
+
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java
new file mode 100644
index 0000000..b710dda
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/StatefulEntityType.java
@@ -0,0 +1,234 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import org.onap.sdc.toscaparser.api.UnsupportedType;
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+
+public class StatefulEntityType extends EntityType {
+ // Class representing TOSCA states
+
+ public static final String[] INTERFACE_NODE_LIFECYCLE_OPERATIONS = {
+ "create", "configure", "start", "stop", "delete"};
+
+ public static final String[] INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS = {
+ "post_configure_source", "post_configure_target", "add_target", "remove_target"};
+
+ public StatefulEntityType() {
+ // void constructor for subclasses that don't want super
+ }
+
+ @SuppressWarnings("unchecked")
+ public StatefulEntityType(String entityType, String prefix, LinkedHashMap<String, Object> customDef) {
+
+ String entireEntityType = entityType;
+ if (UnsupportedType.validateType(entireEntityType)) {
+ defs = null;
+ } else {
+ if (entityType.startsWith(TOSCA + ":")) {
+ entityType = entityType.substring(TOSCA.length() + 1);
+ entireEntityType = prefix + entityType;
+ }
+ if (!entityType.startsWith(TOSCA)) {
+ entireEntityType = prefix + entityType;
+ }
+ if (TOSCA_DEF.get(entireEntityType) != null) {
+ defs = (LinkedHashMap<String, Object>) TOSCA_DEF.get(entireEntityType);
+ entityType = entireEntityType;
+ } else if (customDef != null && customDef.get(entityType) != null) {
+ defs = (LinkedHashMap<String, Object>) customDef.get(entityType);
+ } else {
+ defs = null;
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE136", String.format(
+ "InvalidTypeError: \"%s\" is not a valid type", entityType)));
+ }
+ }
+ type = entityType;
+ }
+
+ @SuppressWarnings("unchecked")
+ public ArrayList<PropertyDef> getPropertiesDefObjects() {
+ // Return a list of property definition objects
+ ArrayList<PropertyDef> properties = new ArrayList<PropertyDef>();
+ LinkedHashMap<String, Object> props = (LinkedHashMap<String, Object>) getDefinition(PROPERTIES);
+ if (props != null) {
+ for (Map.Entry<String, Object> me : props.entrySet()) {
+ String pdname = me.getKey();
+ Object to = me.getValue();
+ if (to == null || !(to instanceof LinkedHashMap)) {
+ String s = to == null ? "null" : to.getClass().getSimpleName();
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE137", String.format(
+ "Unexpected type error: property \"%s\" has type \"%s\" (expected dict)", pdname, s)));
+ continue;
+ }
+ LinkedHashMap<String, Object> pdschema = (LinkedHashMap<String, Object>) to;
+ properties.add(new PropertyDef(pdname, null, pdschema));
+ }
+ }
+ return properties;
+ }
+
+ public LinkedHashMap<String, PropertyDef> getPropertiesDef() {
+ LinkedHashMap<String, PropertyDef> pds = new LinkedHashMap<String, PropertyDef>();
+ for (PropertyDef pd : getPropertiesDefObjects()) {
+ pds.put(pd.getName(), pd);
+ }
+ return pds;
+ }
+
+ public PropertyDef getPropertyDefValue(String name) {
+ // Return the property definition associated with a given name
+ PropertyDef pd = null;
+ LinkedHashMap<String, PropertyDef> propsDef = getPropertiesDef();
+ if (propsDef != null) {
+ pd = propsDef.get(name);
+ }
+ return pd;
+ }
+
+ public ArrayList<AttributeDef> getAttributesDefObjects() {
+ // Return a list of attribute definition objects
+ @SuppressWarnings("unchecked")
+ LinkedHashMap<String, Object> attrs = (LinkedHashMap<String, Object>) getValue(ATTRIBUTES, null, true);
+ ArrayList<AttributeDef> ads = new ArrayList<>();
+ if (attrs != null) {
+ for (Map.Entry<String, Object> me : attrs.entrySet()) {
+ String attr = me.getKey();
+ @SuppressWarnings("unchecked")
+ LinkedHashMap<String, Object> adschema = (LinkedHashMap<String, Object>) me.getValue();
+ ads.add(new AttributeDef(attr, null, adschema));
+ }
+ }
+ return ads;
+ }
+
+ public LinkedHashMap<String, AttributeDef> getAttributesDef() {
+ // Return a dictionary of attribute definition name-object pairs
+
+ LinkedHashMap<String, AttributeDef> ads = new LinkedHashMap<>();
+ for (AttributeDef ado : getAttributesDefObjects()) {
+ ads.put(((AttributeDef) ado).getName(), ado);
+ }
+ return ads;
+ }
+
+ public AttributeDef getAttributeDefValue(String name) {
+ // Return the attribute definition associated with a given name
+ AttributeDef ad = null;
+ LinkedHashMap<String, AttributeDef> attrsDef = getAttributesDef();
+ if (attrsDef != null) {
+ ad = attrsDef.get(name);
+ }
+ return ad;
+ }
+
+ public String getType() {
+ return type;
+ }
+}
+
+/*python
+
+from toscaparser.common.exception import InvalidTypeError
+from toscaparser.elements.attribute_definition import AttributeDef
+from toscaparser.elements.entity_type import EntityType
+from toscaparser.elements.property_definition import PropertyDef
+from toscaparser.unsupportedtype import UnsupportedType
+
+
+class StatefulEntityType(EntityType):
+ '''Class representing TOSCA states.'''
+
+ interfaces_node_lifecycle_operations = ['create',
+ 'configure', 'start',
+ 'stop', 'delete']
+
+ interfaces_relationship_configure_operations = ['post_configure_source',
+ 'post_configure_target',
+ 'add_target',
+ 'remove_target']
+
+ def __init__(self, entitytype, prefix, custom_def=None):
+ entire_entitytype = entitytype
+ if UnsupportedType.validate_type(entire_entitytype):
+ self.defs = None
+ else:
+ if entitytype.startswith(self.TOSCA + ":"):
+ entitytype = entitytype[(len(self.TOSCA) + 1):]
+ entire_entitytype = prefix + entitytype
+ if not entitytype.startswith(self.TOSCA):
+ entire_entitytype = prefix + entitytype
+ if entire_entitytype in list(self.TOSCA_DEF.keys()):
+ self.defs = self.TOSCA_DEF[entire_entitytype]
+ entitytype = entire_entitytype
+ elif custom_def and entitytype in list(custom_def.keys()):
+ self.defs = custom_def[entitytype]
+ else:
+ self.defs = None
+ ValidationIssueCollector.appendException(
+ InvalidTypeError(what=entitytype))
+ self.type = entitytype
+
+ def get_properties_def_objects(self):
+ '''Return a list of property definition objects.'''
+ properties = []
+ props = self.get_definition(self.PROPERTIES)
+ if props:
+ for prop, schema in props.items():
+ properties.append(PropertyDef(prop, None, schema))
+ return properties
+
+ def get_properties_def(self):
+ '''Return a dictionary of property definition name-object pairs.'''
+ return {prop.name: prop
+ for prop in self.get_properties_def_objects()}
+
+ def get_property_def_value(self, name):
+ '''Return the property definition associated with a given name.'''
+ props_def = self.get_properties_def()
+ if props_def and name in props_def.keys():
+ return props_def[name].value
+
+ def get_attributes_def_objects(self):
+ '''Return a list of attribute definition objects.'''
+ attrs = self.get_value(self.ATTRIBUTES, parent=True)
+ if attrs:
+ return [AttributeDef(attr, None, schema)
+ for attr, schema in attrs.items()]
+ return []
+
+ def get_attributes_def(self):
+ '''Return a dictionary of attribute definition name-object pairs.'''
+ return {attr.name: attr
+ for attr in self.get_attributes_def_objects()}
+
+ def get_attribute_def_value(self, name):
+ '''Return the attribute definition associated with a given name.'''
+ attrs_def = self.get_attributes_def()
+ if attrs_def and name in attrs_def.keys():
+ return attrs_def[name].value
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java
new file mode 100644
index 0000000..18dd5ca
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/TypeValidation.java
@@ -0,0 +1,173 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+
+import org.onap.sdc.toscaparser.api.extensions.ExtTools;
+
+public class TypeValidation {
+
+ private static final String DEFINITION_VERSION = "tosca_definitions_version";
+ private static final String DESCRIPTION = "description";
+ private static final String IMPORTS = "imports";
+ private static final String DSL_DEFINITIONS = "dsl_definitions";
+ private static final String NODE_TYPES = "node_types";
+ private static final String REPOSITORIES = "repositories";
+ private static final String DATA_TYPES = "data_types";
+ private static final String ARTIFACT_TYPES = "artifact_types";
+ private static final String GROUP_TYPES = "group_types";
+ private static final String RELATIONSHIP_TYPES = "relationship_types";
+ private static final String CAPABILITY_TYPES = "capability_types";
+ private static final String INTERFACE_TYPES = "interface_types";
+ private static final String POLICY_TYPES = "policy_types";
+ private static final String TOPOLOGY_TEMPLATE = "topology_template";
+ //Pavel
+ private static final String METADATA = "metadata";
+
+ private String ALLOWED_TYPE_SECTIONS[] = {
+ DEFINITION_VERSION, DESCRIPTION, IMPORTS,
+ DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES,
+ DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES,
+ RELATIONSHIP_TYPES, CAPABILITY_TYPES,
+ INTERFACE_TYPES, POLICY_TYPES,
+ TOPOLOGY_TEMPLATE, METADATA
+ };
+
+ private static ArrayList<String> VALID_TEMPLATE_VERSIONS = _getVTV();
+
+ private static ArrayList<String> _getVTV() {
+ ArrayList<String> vtv = new ArrayList<>();
+ vtv.add("tosca_simple_yaml_1_0");
+ vtv.add("tosca_simple_yaml_1_1");
+ ExtTools exttools = new ExtTools();
+ vtv.addAll(exttools.getVersions());
+ return vtv;
+ }
+
+ //private LinkedHashMap<String,Object> customTypes;
+ private Object importDef;
+ //private String version;
+
+ public TypeValidation(LinkedHashMap<String, Object> _customTypes,
+ Object _importDef) {
+ importDef = _importDef;
+ _validateTypeKeys(_customTypes);
+ }
+
+ private void _validateTypeKeys(LinkedHashMap<String, Object> customTypes) {
+
+ String sVersion = (String) customTypes.get(DEFINITION_VERSION);
+ if (sVersion != null) {
+ _validateTypeVersion(sVersion);
+ //version = sVersion;
+ }
+ for (String name : customTypes.keySet()) {
+ boolean bFound = false;
+ for (String ats : ALLOWED_TYPE_SECTIONS) {
+ if (name.equals(ats)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE138", String.format(
+ "UnknownFieldError: Template \"%s\" contains unknown field \"%s\"",
+ importDef.toString(), name)));
+ }
+ }
+ }
+
+ private void _validateTypeVersion(String sVersion) {
+ boolean bFound = false;
+ String allowed = "";
+ for (String atv : VALID_TEMPLATE_VERSIONS) {
+ allowed += "\"" + atv + "\" ";
+ if (sVersion.equals(atv)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE139", String.format(
+ "InvalidTemplateVersion: version \"%s\" in \"%s\" is not supported\n" +
+ "Allowed versions: [%s]",
+ sVersion, importDef.toString(), allowed)));
+ }
+ }
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import InvalidTemplateVersion
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.extensions.exttools import ExtTools
+
+
+class TypeValidation(object):
+
+ ALLOWED_TYPE_SECTIONS = (DEFINITION_VERSION, DESCRIPTION, IMPORTS,
+ DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES,
+ DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES,
+ RELATIONSHIP_TYPES, CAPABILITY_TYPES,
+ INTERFACE_TYPES, POLICY_TYPES,
+ TOPOLOGY_TEMPLATE) = \
+ ('tosca_definitions_version', 'description', 'imports',
+ 'dsl_definitions', 'node_types', 'repositories',
+ 'data_types', 'artifact_types', 'group_types',
+ 'relationship_types', 'capability_types',
+ 'interface_types', 'policy_types', 'topology_template')
+ VALID_TEMPLATE_VERSIONS = ['tosca_simple_yaml_1_0']
+ exttools = ExtTools()
+ VALID_TEMPLATE_VERSIONS.extend(exttools.get_versions())
+
+ def __init__(self, custom_types, import_def):
+ self.import_def = import_def
+ self._validate_type_keys(custom_types)
+
+ def _validate_type_keys(self, custom_type):
+ version = custom_type[self.DEFINITION_VERSION] \
+ if self.DEFINITION_VERSION in custom_type \
+ else None
+ if version:
+ self._validate_type_version(version)
+ self.version = version
+
+ for name in custom_type:
+ if name not in self.ALLOWED_TYPE_SECTIONS:
+ ValidationIssueCollector.appendException(
+# UnknownFieldError(what='Template ' + (self.import_def),
+ UnknownFieldError(what= (self.import_def),
+ field=name))
+
+ def _validate_type_version(self, version):
+ if version not in self.VALID_TEMPLATE_VERSIONS:
+ ValidationIssueCollector.appendException(
+ InvalidTemplateVersion(
+# what=version + ' in ' + self.import_def,
+ what=self.import_def,
+ valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS)))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java
new file mode 100644
index 0000000..dd77659
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Constraint.java
@@ -0,0 +1,309 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements.constraints;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.elements.ScalarUnit;
+import org.onap.sdc.toscaparser.api.functions.Function;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.List;
+
+public abstract class Constraint {
+
+ // Parent class for constraints for a Property or Input
+
+ protected static final String EQUAL = "equal";
+ protected static final String GREATER_THAN = "greater_than";
+ protected static final String GREATER_OR_EQUAL = "greater_or_equal";
+ protected static final String LESS_THAN = "less_than";
+ protected static final String LESS_OR_EQUAL = "less_or_equal";
+ protected static final String IN_RANGE = "in_range";
+ protected static final String VALID_VALUES = "valid_values";
+ protected static final String LENGTH = "length";
+ protected static final String MIN_LENGTH = "min_length";
+ protected static final String MAX_LENGTH = "max_length";
+ protected static final String PATTERN = "pattern";
+
+ protected static final String[] CONSTRAINTS = {
+ EQUAL, GREATER_THAN, GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL,
+ IN_RANGE, VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN};
+
+ @SuppressWarnings("unchecked")
+ public static Constraint factory(String constraintClass, String propname, String proptype, Object constraint) {
+
+ // a factory for the different Constraint classes
+ // replaces Python's __new__() usage
+
+ if (!(constraint instanceof LinkedHashMap)
+ || ((LinkedHashMap<String, Object>) constraint).size() != 1) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE101",
+ "InvalidSchemaError: Invalid constraint schema " + constraint.toString()));
+ }
+
+ switch (constraintClass) {
+ case EQUAL:
+ return new Equal(propname, proptype, constraint);
+ case GREATER_THAN:
+ return new GreaterThan(propname, proptype, constraint);
+ case GREATER_OR_EQUAL:
+ return new GreaterOrEqual(propname, proptype, constraint);
+ case LESS_THAN:
+ return new LessThan(propname, proptype, constraint);
+ case LESS_OR_EQUAL:
+ return new LessOrEqual(propname, proptype, constraint);
+ case IN_RANGE:
+ return new InRange(propname, proptype, constraint);
+ case VALID_VALUES:
+ return new ValidValues(propname, proptype, constraint);
+ case LENGTH:
+ return new Length(propname, proptype, constraint);
+ case MIN_LENGTH:
+ return new MinLength(propname, proptype, constraint);
+ case MAX_LENGTH:
+ return new MaxLength(propname, proptype, constraint);
+ case PATTERN:
+ return new Pattern(propname, proptype, constraint);
+ default:
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE102", String.format(
+ "InvalidSchemaError: Invalid property \"%s\"", constraintClass)));
+ return null;
+ }
+ }
+
+ private String constraintKey = "TBD";
+ protected ArrayList<String> validTypes = new ArrayList<>();
+ protected ArrayList<String> validPropTypes = new ArrayList<>();
+
+ protected String propertyName;
+ private String propertyType;
+ protected Object constraintValue;
+ protected Object constraintValueMsg;
+ protected Object valueMsg;
+
+ @SuppressWarnings("unchecked")
+ public Constraint(String propname, String proptype, Object constraint) {
+
+ setValues();
+
+ propertyName = propname;
+ propertyType = proptype;
+ constraintValue = ((LinkedHashMap<String, Object>) constraint).get(constraintKey);
+ constraintValueMsg = constraintValue;
+ boolean bFound = false;
+ for (String s : ScalarUnit.SCALAR_UNIT_TYPES) {
+ if (s.equals(propertyType)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (bFound) {
+ constraintValue = _getScalarUnitConstraintValue();
+ }
+ // check if constraint is valid for property type
+ bFound = false;
+ for (String s : validPropTypes) {
+ if (s.equals(propertyType)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE103", String.format(
+ "InvalidSchemaError: Property \"%s\" is not valid for data type \"%s\"",
+ constraintKey, propertyType)));
+ }
+ }
+
+ public ArrayList<String> getValidTypes() {
+ return validTypes;
+ }
+
+ public void addValidTypes(List<String> validTypes) {
+ this.validTypes.addAll(validTypes);
+ }
+
+ public ArrayList<String> getValidPropTypes() {
+ return validPropTypes;
+ }
+
+ public String getPropertyType() {
+ return propertyType;
+ }
+
+ public Object getConstraintValue() {
+ return constraintValue;
+ }
+
+ public Object getConstraintValueMsg() {
+ return constraintValueMsg;
+ }
+
+ public Object getValueMsg() {
+ return valueMsg;
+ }
+
+ public void setConstraintKey(String constraintKey) {
+ this.constraintKey = constraintKey;
+ }
+
+ public void setValidTypes(ArrayList<String> validTypes) {
+ this.validTypes = validTypes;
+ }
+
+ public void setValidPropTypes(ArrayList<String> validPropTypes) {
+ this.validPropTypes = validPropTypes;
+ }
+
+ public void setPropertyType(String propertyType) {
+ this.propertyType = propertyType;
+ }
+
+ public void setConstraintValue(Object constraintValue) {
+ this.constraintValue = constraintValue;
+ }
+
+ public void setConstraintValueMsg(Object constraintValueMsg) {
+ this.constraintValueMsg = constraintValueMsg;
+ }
+
+ public void setValueMsg(Object valueMsg) {
+ this.valueMsg = valueMsg;
+ }
+
+ @SuppressWarnings("unchecked")
+ private Object _getScalarUnitConstraintValue() {
+ // code differs from Python because of class creation
+ if (constraintValue instanceof ArrayList) {
+ ArrayList<Object> ret = new ArrayList<>();
+ for (Object v : (ArrayList<Object>) constraintValue) {
+ ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType, v);
+ ret.add(su.getNumFromScalarUnit(null));
+ }
+ return ret;
+ } else {
+ ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType, constraintValue);
+ return su.getNumFromScalarUnit(null);
+ }
+ }
+
+ public void validate(Object value) {
+ if (Function.isFunction(value)) {
+ //skipping constraints check for functions
+ return;
+ }
+
+ valueMsg = value;
+ boolean bFound = false;
+ for (String s : ScalarUnit.SCALAR_UNIT_TYPES) {
+ if (s.equals(propertyType)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (bFound) {
+ value = ScalarUnit.getScalarunitValue(propertyType, value, null);
+ }
+ if (!isValid(value)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE008", "ValidationError: " + errMsg(value)));
+ }
+ }
+
+ protected abstract boolean isValid(Object value);
+
+ protected abstract void setValues();
+
+ protected abstract String errMsg(Object value);
+
+}
+
+/*python
+
+class Constraint(object):
+ '''Parent class for constraints for a Property or Input.'''
+
+ CONSTRAINTS = (EQUAL, GREATER_THAN,
+ GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, IN_RANGE,
+ VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN) = \
+ ('equal', 'greater_than', 'greater_or_equal', 'less_than',
+ 'less_or_equal', 'in_range', 'valid_values', 'length',
+ 'min_length', 'max_length', 'pattern')
+
+ def __new__(cls, property_name, property_type, constraint):
+ if cls is not Constraint:
+ return super(Constraint, cls).__new__(cls)
+
+ if(not isinstance(constraint, collections.Mapping) or
+ len(constraint) != 1):
+ ValidationIssueCollector.appendException(
+ InvalidSchemaError(message=_('Invalid constraint schema.')))
+
+ for type in constraint.keys():
+ ConstraintClass = get_constraint_class(type)
+ if not ConstraintClass:
+ msg = _('Invalid property "%s".') % type
+ ValidationIssueCollector.appendException(
+ InvalidSchemaError(message=msg))
+
+ return ConstraintClass(property_name, property_type, constraint)
+
+ def __init__(self, property_name, property_type, constraint):
+ self.property_name = property_name
+ self.property_type = property_type
+ self.constraint_value = constraint[self.constraint_key]
+ self.constraint_value_msg = self.constraint_value
+ if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES:
+ self.constraint_value = self._get_scalarunit_constraint_value()
+ # check if constraint is valid for property type
+ if property_type not in self.valid_prop_types:
+ msg = _('Property "%(ctype)s" is not valid for data type '
+ '"%(dtype)s".') % dict(
+ ctype=self.constraint_key,
+ dtype=property_type)
+ ValidationIssueCollector.appendException(InvalidSchemaError(message=msg))
+
+ def _get_scalarunit_constraint_value(self):
+ if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES:
+ ScalarUnit_Class = (scalarunit.
+ get_scalarunit_class(self.property_type))
+ if isinstance(self.constraint_value, list):
+ return [ScalarUnit_Class(v).get_num_from_scalar_unit()
+ for v in self.constraint_value]
+ else:
+ return (ScalarUnit_Class(self.constraint_value).
+ get_num_from_scalar_unit())
+
+ def _err_msg(self, value):
+ return _('Property "%s" could not be validated.') % self.property_name
+
+ def validate(self, value):
+ self.value_msg = value
+ if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES:
+ value = scalarunit.get_scalarunit_value(self.property_type, value)
+ if not self._is_valid(value):
+ err_msg = self._err_msg(value)
+ ValidationIssueCollector.appendException(
+ ValidationError(message=err_msg))
+
+
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java
new file mode 100644
index 0000000..f480099
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Equal.java
@@ -0,0 +1,77 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements.constraints;
+
+import java.util.Arrays;
+
+public class Equal extends Constraint {
+
+ protected void setValues() {
+
+ setConstraintKey(EQUAL);
+ validPropTypes.addAll(Arrays.asList(Schema.PROPERTY_TYPES));
+
+ }
+
+ public Equal(String name, String type, Object c) {
+ super(name, type, c);
+
+ }
+
+ protected boolean isValid(Object val) {
+ // equality of objects is tricky so we're comparing
+ // the toString() representation
+ return val.toString().equals(constraintValue.toString());
+ }
+
+ protected String errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" is not equal to \"%s\"",
+ valueMsg, propertyName, constraintValueMsg);
+ }
+
+}
+
+/*python
+
+class Equal(Constraint):
+"""Constraint class for "equal"
+
+Constrains a property or parameter to a value equal to ('=')
+the value declared.
+"""
+
+constraint_key = Constraint.EQUAL
+
+valid_prop_types = Schema.PROPERTY_TYPES
+
+def _is_valid(self, value):
+ if value == self.constraint_value:
+ return True
+
+ return False
+
+def _err_msg(self, value):
+ return (_('The value "%(pvalue)s" of property "%(pname)s" is not '
+ 'equal to "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=self.value_msg,
+ cvalue=self.constraint_value_msg))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java
new file mode 100644
index 0000000..0cb8f36
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java
@@ -0,0 +1,130 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements.constraints;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.functions.Function;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.Arrays;
+import java.util.Date;
+
+public class GreaterOrEqual extends Constraint {
+ // Constraint class for "greater_or_equal"
+
+ // Constrains a property or parameter to a value greater than or equal
+ // to ('>=') the value declared.
+
+ protected void setValues() {
+
+ setConstraintKey(GREATER_OR_EQUAL);
+
+ // timestamps are loaded as Date objects
+ addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date"));
+ //validTypes.add("datetime.date");
+ //validTypes.add("datetime.time");
+ //validTypes.add("datetime.datetime");
+
+ validPropTypes.add(Schema.INTEGER);
+ validPropTypes.add(Schema.FLOAT);
+ validPropTypes.add(Schema.TIMESTAMP);
+ validPropTypes.add(Schema.SCALAR_UNIT_SIZE);
+ validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY);
+ validPropTypes.add(Schema.SCALAR_UNIT_TIME);
+
+ }
+
+ public GreaterOrEqual(String name, String type, Object c) {
+ super(name, type, c);
+
+ if (!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE104", "InvalidSchemaError: The property \"greater_or_equal\" expects comparable values"));
+ }
+ }
+
+
+ @Override
+ protected boolean isValid(Object value) {
+ if (Function.isFunction(value)) {
+ return true;
+ }
+
+ // timestamps
+ if (value instanceof Date) {
+ if (constraintValue instanceof Date) {
+ return !((Date) value).before((Date) constraintValue);
+ }
+ return false;
+ }
+ // all others
+ Double n1 = new Double(value.toString());
+ Double n2 = new Double(constraintValue.toString());
+ return n1 >= n2;
+ }
+
+ protected String errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" must be greater or equal to \"%s\"",
+ valueMsg, propertyName, constraintValueMsg);
+ }
+}
+
+/*python
+
+class GreaterOrEqual(Constraint):
+"""Constraint class for "greater_or_equal"
+
+Constrains a property or parameter to a value greater than or equal
+to ('>=') the value declared.
+"""
+
+constraint_key = Constraint.GREATER_OR_EQUAL
+
+valid_types = (int, float, datetime.date,
+ datetime.time, datetime.datetime)
+
+valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP,
+ Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY,
+ Schema.SCALAR_UNIT_TIME)
+
+def __init__(self, property_name, property_type, constraint):
+ super(GreaterOrEqual, self).__init__(property_name, property_type,
+ constraint)
+ if not isinstance(self.constraint_value, self.valid_types):
+ ThreadLocalsHolder.getCollector().appendException(
+ InvalidSchemaError(message=_('The property '
+ '"greater_or_equal" expects '
+ 'comparable values.')))
+
+def _is_valid(self, value):
+ if toscaparser.functions.is_function(value) or \
+ value >= self.constraint_value:
+ return True
+ return False
+
+def _err_msg(self, value):
+ return (_('The value "%(pvalue)s" of property "%(pname)s" must be '
+ 'greater than or equal to "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=self.value_msg,
+ cvalue=self.constraint_value_msg))
+
+
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java
new file mode 100644
index 0000000..b501907
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/GreaterThan.java
@@ -0,0 +1,120 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements.constraints;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.Arrays;
+import java.util.Date;
+
+public class GreaterThan extends Constraint {
+
+ @Override
+ protected void setValues() {
+
+ setConstraintKey(GREATER_THAN);
+
+ // timestamps are loaded as Date objects
+ addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date"));
+ //validTypes.add("datetime.date");
+ //validTypes.add("datetime.time");
+ //validTypes.add("datetime.datetime");
+
+
+ validPropTypes.add(Schema.INTEGER);
+ validPropTypes.add(Schema.FLOAT);
+ validPropTypes.add(Schema.TIMESTAMP);
+ validPropTypes.add(Schema.SCALAR_UNIT_SIZE);
+ validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY);
+ validPropTypes.add(Schema.SCALAR_UNIT_TIME);
+
+ }
+
+ public GreaterThan(String name, String type, Object c) {
+ super(name, type, c);
+
+ if (!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE105", "InvalidSchemaError: The property \"greater_than\" expects comparable values"));
+ }
+ }
+
+ @Override
+ protected boolean isValid(Object value) {
+
+ // timestamps
+ if (value instanceof Date) {
+ if (constraintValue instanceof Date) {
+ return ((Date) value).after((Date) constraintValue);
+ }
+ return false;
+ }
+
+ Double n1 = new Double(value.toString());
+ Double n2 = new Double(constraintValue.toString());
+ return n1 > n2;
+ }
+
+ protected String errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" must be greater than \"%s\"",
+ valueMsg, propertyName, constraintValueMsg);
+ }
+
+}
+
+/*
+class GreaterThan(Constraint):
+ """Constraint class for "greater_than"
+
+ Constrains a property or parameter to a value greater than ('>')
+ the value declared.
+ """
+
+ constraint_key = Constraint.GREATER_THAN
+
+ valid_types = (int, float, datetime.date,
+ datetime.time, datetime.datetime)
+
+ valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP,
+ Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY,
+ Schema.SCALAR_UNIT_TIME)
+
+ def __init__(self, property_name, property_type, constraint):
+ super(GreaterThan, self).__init__(property_name, property_type,
+ constraint)
+ if not isinstance(constraint[self.GREATER_THAN], self.valid_types):
+ ValidationIsshueCollector.appendException(
+ InvalidSchemaError(message=_('The property "greater_than" '
+ 'expects comparable values.')))
+
+ def _is_valid(self, value):
+ if value > self.constraint_value:
+ return True
+
+ return False
+
+ def _err_msg(self, value):
+ return (_('The value "%(pvalue)s" of property "%(pname)s" must be '
+ 'greater than "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=self.value_msg,
+ cvalue=self.constraint_value_msg))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java
new file mode 100644
index 0000000..4edf021
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/InRange.java
@@ -0,0 +1,186 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements.constraints;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.Arrays;
+import java.util.Date;
+
+import java.util.ArrayList;
+
+public class InRange extends Constraint {
+ // Constraint class for "in_range"
+
+ //Constrains a property or parameter to a value in range of (inclusive)
+ //the two values declared.
+
+ private static final String UNBOUNDED = "UNBOUNDED";
+
+ private Object min, max;
+
+ protected void setValues() {
+
+ setConstraintKey(IN_RANGE);
+
+ // timestamps are loaded as Date objects
+ addValidTypes(Arrays.asList("Integer", "Double", "Float", "String", "Date"));
+ //validTypes.add("datetime.date");
+ //validTypes.add("datetime.time");
+ //validTypes.add("datetime.datetime");
+
+ validPropTypes.add(Schema.INTEGER);
+ validPropTypes.add(Schema.FLOAT);
+ validPropTypes.add(Schema.TIMESTAMP);
+ validPropTypes.add(Schema.SCALAR_UNIT_SIZE);
+ validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY);
+ validPropTypes.add(Schema.SCALAR_UNIT_TIME);
+ validPropTypes.add(Schema.RANGE);
+
+ }
+
+ @SuppressWarnings("unchecked")
+ public InRange(String name, String type, Object c) {
+ super(name, type, c);
+
+ if (!(constraintValue instanceof ArrayList) || ((ArrayList<Object>) constraintValue).size() != 2) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE106", "InvalidSchemaError: The property \"in_range\" expects a list"));
+
+ }
+
+ ArrayList<Object> alcv = (ArrayList<Object>) constraintValue;
+ String msg = "The property \"in_range\" expects comparable values";
+ for (Object vo : alcv) {
+ if (!validTypes.contains(vo.getClass().getSimpleName())) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE107", "InvalidSchemaError: " + msg));
+ }
+ // The only string we allow for range is the special value 'UNBOUNDED'
+ if ((vo instanceof String) && !((String) vo).equals(UNBOUNDED)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE108", "InvalidSchemaError: " + msg));
+ }
+ }
+ min = alcv.get(0);
+ max = alcv.get(1);
+
+ }
+
+ @Override
+ protected boolean isValid(Object value) {
+
+ // timestamps
+ if (value instanceof Date) {
+ if (min instanceof Date && max instanceof Date) {
+ return !((Date) value).before((Date) min)
+ && !((Date) value).after((Date) max);
+ }
+ return false;
+ }
+
+ Double dvalue = new Double(value.toString());
+ if (!(min instanceof String)) {
+ if (dvalue < new Double(min.toString())) {
+ return false;
+ }
+ } else if (!((String) min).equals(UNBOUNDED)) {
+ return false;
+ }
+ if (!(max instanceof String)) {
+ if (dvalue > new Double(max.toString())) {
+ return false;
+ }
+ } else if (!((String) max).equals(UNBOUNDED)) {
+ return false;
+ }
+ return true;
+ }
+
+ @Override
+ protected String errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" is out of range \"(min:%s, max:%s)\"",
+ valueMsg, propertyName, min.toString(), max.toString());
+ }
+
+}
+
+/*python
+
+class InRange(Constraint):
+ """Constraint class for "in_range"
+
+ Constrains a property or parameter to a value in range of (inclusive)
+ the two values declared.
+ """
+ UNBOUNDED = 'UNBOUNDED'
+
+ constraint_key = Constraint.IN_RANGE
+
+ valid_types = (int, float, datetime.date,
+ datetime.time, datetime.datetime, str)
+
+ valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP,
+ Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY,
+ Schema.SCALAR_UNIT_TIME, Schema.RANGE)
+
+ def __init__(self, property_name, property_type, constraint):
+ super(InRange, self).__init__(property_name, property_type, constraint)
+ if(not isinstance(self.constraint_value, collections.Sequence) or
+ (len(constraint[self.IN_RANGE]) != 2)):
+ ValidationIssueCollector.appendException(
+ InvalidSchemaError(message=_('The property "in_range" '
+ 'expects a list.')))
+
+ msg = _('The property "in_range" expects comparable values.')
+ for value in self.constraint_value:
+ if not isinstance(value, self.valid_types):
+ ValidationIssueCollector.appendException(
+ InvalidSchemaError(message=msg))
+ # The only string we allow for range is the special value
+ # 'UNBOUNDED'
+ if(isinstance(value, str) and value != self.UNBOUNDED):
+ ValidationIssueCollector.appendException(
+ InvalidSchemaError(message=msg))
+
+ self.min = self.constraint_value[0]
+ self.max = self.constraint_value[1]
+
+ def _is_valid(self, value):
+ if not isinstance(self.min, str):
+ if value < self.min:
+ return False
+ elif self.min != self.UNBOUNDED:
+ return False
+ if not isinstance(self.max, str):
+ if value > self.max:
+ return False
+ elif self.max != self.UNBOUNDED:
+ return False
+ return True
+
+ def _err_msg(self, value):
+ return (_('The value "%(pvalue)s" of property "%(pname)s" is out of '
+ 'range "(min:%(vmin)s, max:%(vmax)s)".') %
+ dict(pname=self.property_name,
+ pvalue=self.value_msg,
+ vmin=self.constraint_value_msg[0],
+ vmax=self.constraint_value_msg[1]))
+
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java
new file mode 100644
index 0000000..7988cb8
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Length.java
@@ -0,0 +1,100 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements.constraints;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.Collections;
+
+public class Length extends Constraint {
+ // Constraint class for "length"
+
+ // Constrains the property or parameter to a value of a given length.
+
+ @Override
+ protected void setValues() {
+
+ setConstraintKey(LENGTH);
+ addValidTypes(Collections.singletonList("Integer"));
+
+ validPropTypes.add(Schema.STRING);
+
+ }
+
+ public Length(String name, String type, Object c) {
+ super(name, type, c);
+
+ if (!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE109", "InvalidSchemaError: The property \"length\" expects an integer"));
+ }
+ }
+
+ @Override
+ protected boolean isValid(Object value) {
+ if (value instanceof String && constraintValue instanceof Integer &&
+ ((String) value).length() == (Integer) constraintValue) {
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ protected String errMsg(Object value) {
+ return String.format("Length of value \"%s\" of property \"%s\" must be equal to \"%s\"",
+ value.toString(), propertyName, constraintValue.toString());
+ }
+
+}
+
+/*python
+ class Length(Constraint):
+ """Constraint class for "length"
+
+ Constrains the property or parameter to a value of a given length.
+ """
+
+ constraint_key = Constraint.LENGTH
+
+ valid_types = (int, )
+
+ valid_prop_types = (Schema.STRING, )
+
+ def __init__(self, property_name, property_type, constraint):
+ super(Length, self).__init__(property_name, property_type, constraint)
+ if not isinstance(self.constraint_value, self.valid_types):
+ ValidationIsshueCollector.appendException(
+ InvalidSchemaError(message=_('The property "length" expects '
+ 'an integer.')))
+
+ def _is_valid(self, value):
+ if isinstance(value, str) and len(value) == self.constraint_value:
+ return True
+
+ return False
+
+ def _err_msg(self, value):
+ return (_('Length of value "%(pvalue)s" of property "%(pname)s" '
+ 'must be equal to "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=value,
+ cvalue=self.constraint_value))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java
new file mode 100644
index 0000000..37a4afc
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessOrEqual.java
@@ -0,0 +1,124 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements.constraints;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.Arrays;
+import java.util.Date;
+
+public class LessOrEqual extends Constraint {
+ // Constraint class for "less_or_equal"
+
+ // Constrains a property or parameter to a value less than or equal
+ // to ('<=') the value declared.
+
+ protected void setValues() {
+
+ setConstraintKey(LESS_OR_EQUAL);
+
+ // timestamps are loaded as Date objects
+ addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date"));
+ //validTypes.add("datetime.date");
+ //validTypes.add("datetime.time");
+ //validTypes.add("datetime.datetime");
+
+ validPropTypes.add(Schema.INTEGER);
+ validPropTypes.add(Schema.FLOAT);
+ validPropTypes.add(Schema.TIMESTAMP);
+ validPropTypes.add(Schema.SCALAR_UNIT_SIZE);
+ validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY);
+ validPropTypes.add(Schema.SCALAR_UNIT_TIME);
+
+ }
+
+ public LessOrEqual(String name, String type, Object c) {
+ super(name, type, c);
+
+ if (!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE110", "InvalidSchemaError: The property \"less_or_equal\" expects comparable values"));
+ }
+ }
+
+ @Override
+ protected boolean isValid(Object value) {
+
+ // timestamps
+ if (value instanceof Date) {
+ if (constraintValue instanceof Date) {
+ return !((Date) value).after((Date) constraintValue);
+ }
+ return false;
+ }
+
+ Double n1 = new Double(value.toString());
+ Double n2 = new Double(constraintValue.toString());
+ return n1 <= n2;
+ }
+
+ @Override
+ protected String errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" must be less or equal to \"%s\"",
+ valueMsg, propertyName, constraintValueMsg);
+ }
+
+}
+
+/*python
+
+class LessOrEqual(Constraint):
+ """Constraint class for "less_or_equal"
+
+ Constrains a property or parameter to a value less than or equal
+ to ('<=') the value declared.
+ """
+
+ constraint_key = Constraint.LESS_OR_EQUAL
+
+ valid_types = (int, float, datetime.date,
+ datetime.time, datetime.datetime)
+
+ valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP,
+ Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY,
+ Schema.SCALAR_UNIT_TIME)
+
+ def __init__(self, property_name, property_type, constraint):
+ super(LessOrEqual, self).__init__(property_name, property_type,
+ constraint)
+ if not isinstance(self.constraint_value, self.valid_types):
+ ValidationIsshueCollector.appendException(
+ InvalidSchemaError(message=_('The property "less_or_equal" '
+ 'expects comparable values.')))
+
+ def _is_valid(self, value):
+ if value <= self.constraint_value:
+ return True
+
+ return False
+
+ def _err_msg(self, value):
+ return (_('The value "%(pvalue)s" of property "%(pname)s" must be '
+ 'less than or equal to "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=self.value_msg,
+ cvalue=self.constraint_value_msg))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java
new file mode 100644
index 0000000..952861d
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/LessThan.java
@@ -0,0 +1,121 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements.constraints;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.Arrays;
+import java.util.Date;
+
+public class LessThan extends Constraint {
+
+ @Override
+ protected void setValues() {
+
+ setConstraintKey(LESS_THAN);
+ // timestamps are loaded as Date objects
+ addValidTypes(Arrays.asList("Integer", "Double", "Float", "Date"));
+ //validTypes.add("datetime.date");
+ //validTypes.add("datetime.time");
+ //validTypes.add("datetime.datetime");
+
+
+ validPropTypes.add(Schema.INTEGER);
+ validPropTypes.add(Schema.FLOAT);
+ validPropTypes.add(Schema.TIMESTAMP);
+ validPropTypes.add(Schema.SCALAR_UNIT_SIZE);
+ validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY);
+ validPropTypes.add(Schema.SCALAR_UNIT_TIME);
+
+ }
+
+ public LessThan(String name, String type, Object c) {
+ super(name, type, c);
+
+ if (!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE111", "InvalidSchemaError: The property \"less_than\" expects comparable values"));
+ }
+ }
+
+ @Override
+ protected boolean isValid(Object value) {
+
+ // timestamps
+ if (value instanceof Date) {
+ if (constraintValue instanceof Date) {
+ return ((Date) value).before((Date) constraintValue);
+ }
+ return false;
+ }
+
+ Double n1 = new Double(value.toString());
+ Double n2 = new Double(constraintValue.toString());
+ return n1 < n2;
+ }
+
+ @Override
+ protected String errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" must be less than \"%s\"",
+ valueMsg, propertyName, constraintValueMsg);
+ }
+
+}
+
+/*python
+
+class LessThan(Constraint):
+"""Constraint class for "less_than"
+
+Constrains a property or parameter to a value less than ('<')
+the value declared.
+"""
+
+constraint_key = Constraint.LESS_THAN
+
+valid_types = (int, float, datetime.date,
+ datetime.time, datetime.datetime)
+
+valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP,
+ Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY,
+ Schema.SCALAR_UNIT_TIME)
+
+def __init__(self, property_name, property_type, constraint):
+ super(LessThan, self).__init__(property_name, property_type,
+ constraint)
+ if not isinstance(self.constraint_value, self.valid_types):
+ ValidationIsshueCollector.appendException(
+ InvalidSchemaError(message=_('The property "less_than" '
+ 'expects comparable values.')))
+
+def _is_valid(self, value):
+ if value < self.constraint_value:
+ return True
+
+ return False
+
+def _err_msg(self, value):
+ return (_('The value "%(pvalue)s" of property "%(pname)s" must be '
+ 'less than "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=self.value_msg,
+ cvalue=self.constraint_value_msg))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java
new file mode 100644
index 0000000..9068b65
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MaxLength.java
@@ -0,0 +1,110 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements.constraints;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.Collections;
+import java.util.LinkedHashMap;
+
+public class MaxLength extends Constraint {
+ // Constraint class for "min_length"
+
+ // Constrains the property or parameter to a value of a maximum length.
+
+ @Override
+ protected void setValues() {
+
+ setConstraintKey(MAX_LENGTH);
+
+ addValidTypes(Collections.singletonList("Integer"));
+
+
+ validPropTypes.add(Schema.STRING);
+ validPropTypes.add(Schema.MAP);
+
+ }
+
+ public MaxLength(String name, String type, Object c) {
+ super(name, type, c);
+
+ if (!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE112", "InvalidSchemaError: The property \"max_length\" expects an integer"));
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ protected boolean isValid(Object value) {
+ if (value instanceof String && constraintValue instanceof Integer
+ && ((String) value).length() <= (Integer) constraintValue) {
+ return true;
+ } else {
+ return value instanceof LinkedHashMap && constraintValue instanceof Integer
+ && ((LinkedHashMap<String, Object>) value).size() <= (Integer) constraintValue;
+ }
+ }
+
+ @Override
+ protected String errMsg(Object value) {
+ return String.format("Length of value \"%s\" of property \"%s\" must be no greater than \"%s\"",
+ value.toString(), propertyName, constraintValue.toString());
+ }
+
+}
+
+/*python
+
+class MaxLength(Constraint):
+ """Constraint class for "max_length"
+
+ Constrains the property or parameter to a value to a maximum length.
+ """
+
+ constraint_key = Constraint.MAX_LENGTH
+
+ valid_types = (int, )
+
+ valid_prop_types = (Schema.STRING, Schema.MAP)
+
+ def __init__(self, property_name, property_type, constraint):
+ super(MaxLength, self).__init__(property_name, property_type,
+ constraint)
+ if not isinstance(self.constraint_value, self.valid_types):
+ ValidationIsshueCollector.appendException(
+ InvalidSchemaError(message=_('The property "max_length" '
+ 'expects an integer.')))
+
+ def _is_valid(self, value):
+ if ((isinstance(value, str) or isinstance(value, dict)) and
+ len(value) <= self.constraint_value):
+ return True
+
+ return False
+
+ def _err_msg(self, value):
+ return (_('Length of value "%(pvalue)s" of property "%(pname)s" '
+ 'must be no greater than "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=value,
+ cvalue=self.constraint_value))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java
new file mode 100644
index 0000000..eb1d870
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/MinLength.java
@@ -0,0 +1,109 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements.constraints;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.Collections;
+import java.util.LinkedHashMap;
+
+public class MinLength extends Constraint {
+ // Constraint class for "min_length"
+
+ // Constrains the property or parameter to a value of a minimum length.
+
+ @Override
+ protected void setValues() {
+
+ setConstraintKey(MIN_LENGTH);
+
+ addValidTypes(Collections.singletonList("Integer"));
+
+ validPropTypes.add(Schema.STRING);
+ validPropTypes.add(Schema.MAP);
+
+ }
+
+ public MinLength(String name, String type, Object c) {
+ super(name, type, c);
+
+ if (!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE113", "InvalidSchemaError: The property \"min_length\" expects an integer"));
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ protected boolean isValid(Object value) {
+ if (value instanceof String && constraintValue instanceof Integer
+ && ((String) value).length() >= (Integer) constraintValue) {
+ return true;
+ } else {
+ return value instanceof LinkedHashMap && constraintValue instanceof Integer
+ && ((LinkedHashMap<String, Object>) value).size() >= (Integer) constraintValue;
+ }
+ }
+
+ @Override
+ protected String errMsg(Object value) {
+ return String.format("Length of value \"%s\" of property \"%s\" must be at least \"%s\"",
+ value.toString(), propertyName, constraintValue.toString());
+ }
+
+}
+
+/*python
+
+class MinLength(Constraint):
+ """Constraint class for "min_length"
+
+ Constrains the property or parameter to a value to a minimum length.
+ """
+
+ constraint_key = Constraint.MIN_LENGTH
+
+ valid_types = (int, )
+
+ valid_prop_types = (Schema.STRING, Schema.MAP)
+
+ def __init__(self, property_name, property_type, constraint):
+ super(MinLength, self).__init__(property_name, property_type,
+ constraint)
+ if not isinstance(self.constraint_value, self.valid_types):
+ ValidationIsshueCollector.appendException(
+ InvalidSchemaError(message=_('The property "min_length" '
+ 'expects an integer.')))
+
+ def _is_valid(self, value):
+ if ((isinstance(value, str) or isinstance(value, dict)) and
+ len(value) >= self.constraint_value):
+ return True
+
+ return False
+
+ def _err_msg(self, value):
+ return (_('Length of value "%(pvalue)s" of property "%(pname)s" '
+ 'must be at least "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=value,
+ cvalue=self.constraint_value))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java
new file mode 100644
index 0000000..913e922
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Pattern.java
@@ -0,0 +1,116 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements.constraints;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.Collections;
+import java.util.regex.Matcher;
+import java.util.regex.PatternSyntaxException;
+
+public class Pattern extends Constraint {
+
+ @Override
+ protected void setValues() {
+
+ setConstraintKey(PATTERN);
+
+ addValidTypes(Collections.singletonList("String"));
+
+ validPropTypes.add(Schema.STRING);
+
+ }
+
+
+ public Pattern(String name, String type, Object c) {
+ super(name, type, c);
+
+ if (!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE114", "InvalidSchemaError: The property \"pattern\" expects a string"));
+ }
+ }
+
+ @Override
+ protected boolean isValid(Object value) {
+ try {
+ if (!(value instanceof String)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE115", String.format("ValueError: Input value \"%s\" to \"pattern\" property \"%s\" must be a string",
+ value.toString(), propertyName)));
+ return false;
+ }
+ String strp = constraintValue.toString();
+ String strm = value.toString();
+ java.util.regex.Pattern pattern = java.util.regex.Pattern.compile(strp);
+ Matcher matcher = pattern.matcher(strm);
+ if (matcher.find() && matcher.end() == strm.length()) {
+ return true;
+ }
+ return false;
+ } catch (PatternSyntaxException pse) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE116", String.format("ValueError: Invalid regex \"%s\" in \"pattern\" property \"%s\"",
+ constraintValue.toString(), propertyName)));
+ return false;
+ }
+ }
+
+ @Override
+ protected String errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" does not match the pattern \"%s\"",
+ value.toString(), propertyName, constraintValue.toString());
+ }
+
+}
+
+/*python
+
+class Pattern(Constraint):
+ """Constraint class for "pattern"
+
+ Constrains the property or parameter to a value that is allowed by
+ the provided regular expression.
+ """
+
+ constraint_key = Constraint.PATTERN
+
+ valid_types = (str, )
+
+ valid_prop_types = (Schema.STRING, )
+
+ def __init__(self, property_name, property_type, constraint):
+ super(Pattern, self).__init__(property_name, property_type, constraint)
+ if not isinstance(self.constraint_value, self.valid_types):
+ ValidationIsshueCollector.appendException(
+ InvalidSchemaError(message=_('The property "pattern" '
+ 'expects a string.')))
+ self.match = re.compile(self.constraint_value).match
+
+ def _is_valid(self, value):
+ match = self.match(value)
+ return match is not None and match.end() == len(value)
+
+ def _err_msg(self, value):
+ return (_('The value "%(pvalue)s" of property "%(pname)s" does not '
+ 'match pattern "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=value,
+ cvalue=self.constraint_value))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java
new file mode 100644
index 0000000..15ec597
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/Schema.java
@@ -0,0 +1,309 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements.constraints;
+
+import com.google.common.collect.ImmutableMap;
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.elements.enums.FileSize;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+
+public class Schema {
+
+ private static final String TYPE = "type";
+ private static final String REQUIRED = "required";
+ private static final String DESCRIPTION = "description";
+ private static final String DEFAULT = "default";
+ private static final String CONSTRAINTS = "constraints";
+ private static final String STATUS = "status";
+ private static final String ENTRYSCHEMA = "entry_schema";
+ private static final String[] KEYS = {
+ TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS};
+
+ public static final String INTEGER = "integer";
+ public static final String STRING = "string";
+ public static final String BOOLEAN = "boolean";
+ public static final String FLOAT = "float";
+ public static final String RANGE = "range";
+ public static final String NUMBER = "number";
+ public static final String TIMESTAMP = "timestamp";
+ public static final String LIST = "list";
+ public static final String MAP = "map";
+ public static final String SCALAR_UNIT_SIZE = "scalar-unit.size";
+ public static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency";
+ public static final String SCALAR_UNIT_TIME = "scalar-unit.time";
+ public static final String VERSION = "version";
+ public static final String PORTDEF = "PortDef";
+ public static final String PORTSPEC = "PortSpec"; //??? PortSpec.SHORTNAME
+ public static final String JSON = "json";
+
+ public static final String[] PROPERTY_TYPES = {
+ INTEGER, STRING, BOOLEAN, FLOAT, RANGE, NUMBER, TIMESTAMP, LIST, MAP,
+ SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME,
+ VERSION, PORTDEF, PORTSPEC, JSON};
+
+ public static final String[] SIMPLE_PROPERTY_TYPES = {
+ INTEGER, STRING, BOOLEAN, FLOAT, RANGE, NUMBER, TIMESTAMP,
+ SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME,
+ VERSION};
+
+ @SuppressWarnings("unused")
+ private static final String SCALAR_UNIT_SIZE_DEFAULT = "B";
+
+ private static Map<String, Long> scalarUnitSizeDict = ImmutableMap.<String, Long>builder()
+ .put("B", FileSize.B)
+ .put("KB", FileSize.KB)
+ .put("MB", FileSize.MB)
+ .put("GB", FileSize.GB)
+ .put("TB", FileSize.TB)
+ .put("KIB", FileSize.KIB)
+ .put("MIB", FileSize.MIB)
+ .put("GIB", FileSize.GIB)
+ .put("TIB", FileSize.TIB)
+ .build();
+
+
+ private String name;
+ private LinkedHashMap<String, Object> schema;
+ private int len;
+ private ArrayList<Constraint> constraintsList;
+
+
+ public Schema(String name, LinkedHashMap<String, Object> schemaDict) {
+ this.name = name;
+
+ if (!(schemaDict instanceof LinkedHashMap)) {
+ //msg = (_('Schema definition of "%(pname)s" must be a dict.')
+ // % dict(pname=name))
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE117", String.format(
+ "InvalidSchemaError: Schema definition of \"%s\" must be a dict", this.name)));
+ }
+
+ if (schemaDict.get("type") == null) {
+ //msg = (_('Schema definition of "%(pname)s" must have a "type" '
+ // 'attribute.') % dict(pname=name))
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE118", String.format(
+ "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute", this.name)));
+ }
+
+ schema = schemaDict;
+ len = 0; //??? None
+ constraintsList = new ArrayList<>();
+ }
+
+ public String getType() {
+ return (String) schema.get(TYPE);
+ }
+
+ public boolean isRequired() {
+ return (boolean) schema.getOrDefault(REQUIRED, true);
+ }
+
+ public String getDescription() {
+ return (String) schema.getOrDefault(DESCRIPTION, "");
+ }
+
+ public Object getDefault() {
+ return schema.get(DEFAULT);
+ }
+
+ public String getStatus() {
+ return (String) schema.getOrDefault(STATUS, "");
+ }
+
+ public static boolean isRequestedTypeSimple(String type) {
+ return Arrays.asList(SIMPLE_PROPERTY_TYPES).contains(type);
+ }
+
+ @SuppressWarnings("unchecked")
+ public ArrayList<Constraint> getConstraints() {
+ if (constraintsList.size() == 0) {
+ Object cob = schema.get(CONSTRAINTS);
+ if (cob instanceof ArrayList) {
+ ArrayList<Object> constraintSchemata = (ArrayList<Object>) cob;
+ for (Object ob : constraintSchemata) {
+ if (ob instanceof LinkedHashMap) {
+ for (String cClass : ((LinkedHashMap<String, Object>) ob).keySet()) {
+ Constraint c = Constraint.factory(cClass, name, getType(), ob);
+ if (c != null) {
+ constraintsList.add(c);
+ } else {
+ // error
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE119", String.format(
+ "UnknownFieldError: Constraint type \"%s\" for property \"%s\" is not supported",
+ cClass, name)));
+ }
+ break;
+ }
+ }
+ }
+ }
+ }
+ return constraintsList;
+ }
+
+ @SuppressWarnings("unchecked")
+ public LinkedHashMap<String, Object> getEntrySchema() {
+ return (LinkedHashMap<String, Object>) schema.get(ENTRYSCHEMA);
+ }
+
+ // Python intrinsic methods...
+
+ // substitute for __getitem__ (aka self[key])
+ public Object getItem(String key) {
+ return schema.get(key);
+ }
+
+ /*
+ def __iter__(self):
+ for k in self.KEYS:
+ try:
+ self.schema[k]
+ except KeyError:
+ pass
+ else:
+ yield k
+ */
+
+ // substitute for __len__ (aka self.len())
+ public int getLen() {
+ int len = 0;
+ for (String k : KEYS) {
+ if (schema.get(k) != null) {
+ len++;
+ }
+ this.len = len;
+ }
+ return this.len;
+ }
+
+ // getter
+ public LinkedHashMap<String, Object> getSchema() {
+ return schema;
+ }
+
+}
+
+/*python
+
+class Schema(collections.Mapping):
+
+KEYS = (
+ TYPE, REQUIRED, DESCRIPTION,
+ DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS
+) = (
+ 'type', 'required', 'description',
+ 'default', 'constraints', 'entry_schema', 'status'
+)
+
+PROPERTY_TYPES = (
+ INTEGER, STRING, BOOLEAN, FLOAT, RANGE,
+ NUMBER, TIMESTAMP, LIST, MAP,
+ SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME,
+ VERSION, PORTDEF, PORTSPEC
+) = (
+ 'integer', 'string', 'boolean', 'float', 'range',
+ 'number', 'timestamp', 'list', 'map',
+ 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time',
+ 'version', 'PortDef', PortSpec.SHORTNAME
+)
+
+SCALAR_UNIT_SIZE_DEFAULT = 'B'
+scalarUnitSizeDict = {'B': 1, 'KB': 1000, 'KIB': 1024, 'MB': 1000000,
+ 'MIB': 1048576, 'GB': 1000000000,
+ 'GIB': 1073741824, 'TB': 1000000000000,
+ 'TIB': 1099511627776}
+
+def __init__(self, name, schema_dict):
+ self.name = name
+ if not isinstance(schema_dict, collections.Mapping):
+ msg = (_('Schema definition of "%(pname)s" must be a dict.')
+ % dict(pname=name))
+ ValidationIssueCollector.appendException(InvalidSchemaError(message=msg))
+
+ try:
+ schema_dict['type']
+ except KeyError:
+ msg = (_('Schema definition of "%(pname)s" must have a "type" '
+ 'attribute.') % dict(pname=name))
+ ValidationIssueCollector.appendException(InvalidSchemaError(message=msg))
+
+ self.schema = schema_dict
+ self.len = None
+ self.constraints_list = []
+
+@property
+def type(self):
+ return self.schema[self.TYPE]
+
+@property
+def required(self):
+ return self.schema.get(self.REQUIRED, True)
+
+@property
+def description(self):
+ return self.schema.get(self.DESCRIPTION, '')
+
+@property
+def default(self):
+ return self.schema.get(self.DEFAULT)
+
+@property
+def status(self):
+ return self.schema.get(self.STATUS, '')
+
+@property
+def constraints(self):
+ if not self.constraints_list:
+ constraint_schemata = self.schema.get(self.CONSTRAINTS)
+ if constraint_schemata:
+ self.constraints_list = [Constraint(self.name,
+ self.type,
+ cschema)
+ for cschema in constraint_schemata]
+ return self.constraints_list
+
+@property
+def entry_schema(self):
+ return self.schema.get(self.ENTRYSCHEMA)
+
+def __getitem__(self, key):
+ return self.schema[key]
+
+def __iter__(self):
+ for k in self.KEYS:
+ try:
+ self.schema[k]
+ except KeyError:
+ pass
+ else:
+ yield k
+
+def __len__(self):
+ if self.len is None:
+ self.len = len(list(iter(self)))
+ return self.len
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java
new file mode 100644
index 0000000..c3a192d
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/constraints/ValidValues.java
@@ -0,0 +1,99 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements.constraints;
+
+import java.util.ArrayList;
+import java.util.Collections;
+
+public class ValidValues extends Constraint {
+
+
+ protected void setValues() {
+ setConstraintKey(VALID_VALUES);
+ Collections.addAll(validPropTypes, Schema.PROPERTY_TYPES);
+ }
+
+
+ public ValidValues(String name, String type, Object c) {
+ super(name, type, c);
+ }
+
+ @SuppressWarnings("unchecked")
+ protected boolean isValid(Object val) {
+ if (!(constraintValue instanceof ArrayList)) {
+ return false;
+ }
+ if (val instanceof ArrayList) {
+ boolean bAll = true;
+ for (Object v : (ArrayList<Object>) val) {
+ if (!((ArrayList<Object>) constraintValue).contains(v)) {
+ bAll = false;
+ break;
+ }
+ }
+ return bAll;
+ }
+ return ((ArrayList<Object>) constraintValue).contains(val);
+ }
+
+ protected String errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" is not valid. Expected a value from \"%s\"",
+ value.toString(), propertyName, constraintValue.toString());
+ }
+
+}
+
+/*python
+
+class ValidValues(Constraint):
+"""Constraint class for "valid_values"
+
+Constrains a property or parameter to a value that is in the list of
+declared values.
+"""
+constraint_key = Constraint.VALID_VALUES
+
+valid_prop_types = Schema.PROPERTY_TYPES
+
+def __init__(self, property_name, property_type, constraint):
+ super(ValidValues, self).__init__(property_name, property_type,
+ constraint)
+ if not isinstance(self.constraint_value, collections.Sequence):
+ ValidationIsshueCollector.appendException(
+ InvalidSchemaError(message=_('The property "valid_values" '
+ 'expects a list.')))
+
+def _is_valid(self, value):
+ print '*** payton parser validating ',value,' in ',self.constraint_value#GGG
+ if isinstance(value, list):
+ return all(v in self.constraint_value for v in value)
+ return value in self.constraint_value
+
+def _err_msg(self, value):
+ allowed = '[%s]' % ', '.join(str(a) for a in self.constraint_value)
+ return (_('The value "%(pvalue)s" of property "%(pname)s" is not '
+ 'valid. Expected a value from "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=value,
+ cvalue=allowed))
+
+
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java
new file mode 100644
index 0000000..b07f7fa
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/FileSize.java
@@ -0,0 +1,32 @@
+/*
+============LICENSE_START=======================================================
+ SDC
+ ================================================================================
+ Copyright (C) 2019 Nokia. All rights reserved.
+ ================================================================================
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ ============LICENSE_END=========================================================
+*/
+package org.onap.sdc.toscaparser.api.elements.enums;
+
+public class FileSize {
+ public static final long B = 1L;
+ public static final long KB = 1000L;
+ public static final long MB = 1000000L;
+ public static final long GB = 1000000000L;
+ public static final long TB = 1000000000000L;
+ public static final long KIB = 1000L;
+ public static final long MIB = 1048576L;
+ public static final long GIB = 1073741824L;
+ public static final long TIB = 1099511627776L;
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java
new file mode 100644
index 0000000..ac0d837
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/elements/enums/ToscaElementNames.java
@@ -0,0 +1,40 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.elements.enums;
+
+public enum ToscaElementNames {
+
+ TYPE("type"),
+ PROPERTIES("properties"),
+ ANNOTATIONS("annotations"),
+ SOURCE_TYPE("source_type");
+
+ private String name;
+
+ ToscaElementNames(String name) {
+ this.name = name;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java
new file mode 100644
index 0000000..5fbfca0
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/extensions/ExtTools.java
@@ -0,0 +1,204 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.extensions;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+import org.reflections.Reflections;
+import org.reflections.scanners.ResourcesScanner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Set;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+public class ExtTools {
+
+ private static Logger log = LoggerFactory.getLogger(ExtTools.class.getName());
+
+ private static LinkedHashMap<String, Object> extensionInfo = new LinkedHashMap<>();
+
+ public ExtTools() {
+ extensionInfo = loadExtensions();
+ }
+
+ private LinkedHashMap<String, Object> loadExtensions() {
+
+ LinkedHashMap<String, Object> extensions = new LinkedHashMap<>();
+
+ Reflections reflections = new Reflections("extensions", new ResourcesScanner());
+ Set<String> resourcePaths = reflections.getResources(Pattern.compile(".*\\.py$"));
+
+ for (String resourcePath : resourcePaths) {
+ try (InputStream is = ExtTools.class.getClassLoader().getResourceAsStream(resourcePath);
+ InputStreamReader isr = new InputStreamReader(is, Charset.forName("UTF-8"));
+ BufferedReader br = new BufferedReader(isr);) {
+ String version = null;
+ ArrayList<String> sections = null;
+ String defsFile = null;
+ String line;
+
+ Pattern pattern = Pattern.compile("^([^#]\\S+)\\s*=\\s*(\\S.*)$");
+ while ((line = br.readLine()) != null) {
+ line = line.replace("'", "\"");
+ Matcher matcher = pattern.matcher(line);
+ if (matcher.find()) {
+ if (matcher.group(1).equals("VERSION")) {
+ version = matcher.group(2);
+ if (version.startsWith("'") || version.startsWith("\"")) {
+ version = version.substring(1, version.length() - 1);
+ }
+ } else if (matcher.group(1).equals("DEFS_FILE")) {
+ String fn = matcher.group(2);
+ if (fn.startsWith("'") || fn.startsWith("\"")) {
+ fn = fn.substring(1, fn.length() - 1);
+ }
+ defsFile = resourcePath.replaceFirst("\\w*.py$", fn);
+ } else if (matcher.group(1).equals("SECTIONS")) {
+ sections = new ArrayList<>();
+ Pattern secpat = Pattern.compile("\"([^\"]+)\"");
+ Matcher secmat = secpat.matcher(matcher.group(2));
+ while (secmat.find()) {
+ sections.add(secmat.group(1));
+ }
+ }
+ }
+ }
+
+ if (version != null && defsFile != null) {
+ LinkedHashMap<String, Object> ext = new LinkedHashMap<>();
+ ext.put("defs_file", defsFile);
+ if (sections != null) {
+ ext.put("sections", sections);
+ }
+ extensions.put(version, ext);
+ }
+ } catch (Exception e) {
+ log.error("ExtTools - loadExtensions - {}", e);
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue(
+ "JE281", "Failed to load extensions" + e.getMessage()));
+ }
+ }
+ return extensions;
+ }
+
+ public ArrayList<String> getVersions() {
+ return new ArrayList<String>(extensionInfo.keySet());
+ }
+
+ public LinkedHashMap<String, ArrayList<String>> getSections() {
+ LinkedHashMap<String, ArrayList<String>> sections = new LinkedHashMap<>();
+ for (String version : extensionInfo.keySet()) {
+ LinkedHashMap<String, Object> eiv = (LinkedHashMap<String, Object>) extensionInfo.get(version);
+ sections.put(version, (ArrayList<String>) eiv.get("sections"));
+ }
+ return sections;
+ }
+
+ public String getDefsFile(String version) {
+ LinkedHashMap<String, Object> eiv = (LinkedHashMap<String, Object>) extensionInfo.get(version);
+ return (String) eiv.get("defs_file");
+ }
+
+}
+
+/*python
+
+from toscaparser.common.exception import ToscaExtAttributeError
+from toscaparser.common.exception import ToscaExtImportError
+
+log = logging.getLogger("tosca.model")
+
+REQUIRED_ATTRIBUTES = ['VERSION', 'DEFS_FILE']
+
+
+class ExtTools(object):
+ def __init__(self):
+ self.extensionInfo = self._load_extensions()
+
+ def _load_extensions(self):
+ '''Dynamically load all the extensions .'''
+ extensions = {}
+
+ # Use the absolute path of the class path
+ abs_path = os.path.dirname(os.path.abspath(__file__))
+
+ extdirs = [e for e in os.listdir(abs_path) if
+ not e.startswith('tests') and
+ os.path.isdir(os.path.join(abs_path, e))]
+
+ for e in extdirs:
+ log.info(e)
+ extpath = abs_path + '/' + e
+ # Grab all the extension files in the given path
+ ext_files = [f for f in os.listdir(extpath) if f.endswith('.py')
+ and not f.startswith('__init__')]
+
+ # For each module, pick out the target translation class
+ for f in ext_files:
+ log.info(f)
+ ext_name = 'toscaparser/extensions/' + e + '/' + f.strip('.py')
+ ext_name = ext_name.replace('/', '.')
+ try:
+ extinfo = importlib.import_module(ext_name)
+ version = getattr(extinfo, 'VERSION')
+ defs_file = extpath + '/' + getattr(extinfo, 'DEFS_FILE')
+
+ # Sections is an optional attribute
+ sections = getattr(extinfo, 'SECTIONS', ())
+
+ extensions[version] = {'sections': sections,
+ 'defs_file': defs_file}
+ except ImportError:
+ raise ToscaExtImportError(ext_name=ext_name)
+ except AttributeError:
+ attrs = ', '.join(REQUIRED_ATTRIBUTES)
+ raise ToscaExtAttributeError(ext_name=ext_name,
+ attrs=attrs)
+
+ print 'Extensions ',extensions#GGG
+ return extensions
+
+ def get_versions(self):
+ return self.extensionInfo.keys()
+
+ def get_sections(self):
+ sections = {}
+ for version in self.extensionInfo.keys():
+ sections[version] = self.extensionInfo[version]['sections']
+
+ return sections
+
+ def get_defs_file(self, version):
+ versiondata = self.extensionInfo.get(version)
+
+ if versiondata:
+ return versiondata.get('defs_file')
+ else:
+ return None
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java
new file mode 100644
index 0000000..4ebeba9
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Concat.java
@@ -0,0 +1,97 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.functions;
+
+import org.onap.sdc.toscaparser.api.TopologyTemplate;
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.ArrayList;
+
+public class Concat extends Function {
+ // Validate the function and provide an instance of the function
+
+ // Concatenation of values are supposed to be produced at runtime and
+ // therefore its the responsibility of the TOSCA engine to implement the
+ // evaluation of Concat functions.
+
+ // Arguments:
+
+ // * List of strings that needs to be concatenated
+
+ // Example:
+
+ // [ 'http://',
+ // get_attribute: [ server, public_address ],
+ // ':' ,
+ // get_attribute: [ server, port ] ]
+
+
+ public Concat(TopologyTemplate ttpl, Object context, String name, ArrayList<Object> args) {
+ super(ttpl, context, name, args);
+ }
+
+ @Override
+ public Object result() {
+ return this;
+ }
+
+ @Override
+ void validate() {
+ if (args.size() < 1) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE145",
+ "ValueError: Invalid arguments for function \"concat\". " +
+ "Expected at least one argument"));
+ }
+ }
+
+}
+
+/*python
+
+class Concat(Function):
+"""Validate the function and provide an instance of the function
+
+Concatenation of values are supposed to be produced at runtime and
+therefore its the responsibility of the TOSCA engine to implement the
+evaluation of Concat functions.
+
+Arguments:
+
+* List of strings that needs to be concatenated
+
+Example:
+
+ [ 'http://',
+ get_attribute: [ server, public_address ],
+ ':' ,
+ get_attribute: [ server, port ] ]
+"""
+
+def validate(self):
+ if len(self.args) < 1:
+ ValidationIsshueCollector.appendException(
+ ValueError(_('Invalid arguments for function "{0}". Expected '
+ 'at least one arguments.').format(CONCAT)))
+
+def result(self):
+ return self
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java
new file mode 100644
index 0000000..711a7ca
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Function.java
@@ -0,0 +1,259 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.functions;
+
+
+import org.onap.sdc.toscaparser.api.TopologyTemplate;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public abstract class Function {
+
+ protected static final String GET_PROPERTY = "get_property";
+ protected static final String GET_ATTRIBUTE = "get_attribute";
+ protected static final String GET_INPUT = "get_input";
+ protected static final String GET_OPERATION_OUTPUT = "get_operation_output";
+ protected static final String CONCAT = "concat";
+ protected static final String TOKEN = "token";
+
+ protected static final String SELF = "SELF";
+ protected static final String HOST = "HOST";
+ protected static final String TARGET = "TARGET";
+ protected static final String SOURCE = "SOURCE";
+
+ protected static final String HOSTED_ON = "tosca.relationships.HostedOn";
+
+ protected static HashMap<String, String> functionMappings = _getFunctionMappings();
+
+ private static HashMap<String, String> _getFunctionMappings() {
+ HashMap<String, String> map = new HashMap<>();
+ map.put(GET_PROPERTY, "GetProperty");
+ map.put(GET_INPUT, "GetInput");
+ map.put(GET_ATTRIBUTE, "GetAttribute");
+ map.put(GET_OPERATION_OUTPUT, "GetOperationOutput");
+ map.put(CONCAT, "Concat");
+ map.put(TOKEN, "Token");
+ return map;
+ }
+
+ protected TopologyTemplate toscaTpl;
+ protected Object context;
+ protected String name;
+ protected ArrayList<Object> args;
+
+
+ public Function(TopologyTemplate _toscaTpl, Object _context, String _name, ArrayList<Object> _args) {
+ toscaTpl = _toscaTpl;
+ context = _context;
+ name = _name;
+ args = _args;
+ validate();
+
+ }
+
+ abstract Object result();
+
+ abstract void validate();
+
+ @SuppressWarnings("unchecked")
+ public static boolean isFunction(Object funcObj) {
+ // Returns True if the provided function is a Tosca intrinsic function.
+ //
+ //Examples:
+ //
+ //* "{ get_property: { SELF, port } }"
+ //* "{ get_input: db_name }"
+ //* Function instance
+
+ //:param function: Function as string or a Function instance.
+ //:return: True if function is a Tosca intrinsic function, otherwise False.
+ //
+
+ if (funcObj instanceof LinkedHashMap) {
+ LinkedHashMap<String, Object> function = (LinkedHashMap<String, Object>) funcObj;
+ if (function.size() == 1) {
+ String funcName = (new ArrayList<String>(function.keySet())).get(0);
+ return functionMappings.keySet().contains(funcName);
+ }
+ }
+ return (funcObj instanceof Function);
+ }
+
+ @SuppressWarnings("unchecked")
+ public static Object getFunction(TopologyTemplate ttpl, Object context, Object rawFunctionObj, boolean resolveGetInput) {
+ // Gets a Function instance representing the provided template function.
+
+ // If the format provided raw_function format is not relevant for template
+ // functions or if the function name doesn't exist in function mapping the
+ // method returns the provided raw_function.
+ //
+ // :param tosca_tpl: The tosca template.
+ // :param node_template: The node template the function is specified for.
+ // :param raw_function: The raw function as dict.
+ // :return: Template function as Function instance or the raw_function if
+ // parsing was unsuccessful.
+
+
+ // iterate over leaves of the properties's tree and convert function leaves to function object,
+ // support List and Map nested,
+ // assuming that leaf value of function is always map type contains 1 item (e.g. my_leaf: {get_input: xxx}).
+
+ if (rawFunctionObj instanceof LinkedHashMap) { // In map type case
+ LinkedHashMap rawFunction = ((LinkedHashMap) rawFunctionObj);
+ if (rawFunction.size() == 1 &&
+ !(rawFunction.values().iterator().next() instanceof LinkedHashMap)) { // End point
+ return getFunctionForObjectItem(ttpl, context, rawFunction, resolveGetInput);
+ } else {
+ return getFunctionForMap(ttpl, context, rawFunction, resolveGetInput);
+ }
+ } else if (rawFunctionObj instanceof ArrayList) { // In list type case
+ return getFunctionForList(ttpl, context, (ArrayList) rawFunctionObj, resolveGetInput);
+ }
+
+ return rawFunctionObj;
+ }
+
+ private static Object getFunctionForList(TopologyTemplate ttpl, Object context, ArrayList rawFunctionObj, boolean resolveGetInput) {
+ // iterate over list properties in recursion, convert leaves to function,
+ // and collect them in the same hierarchy as the original list.
+ ArrayList<Object> rawFunctionObjList = new ArrayList<>();
+ for (Object rawFunctionObjItem : rawFunctionObj) {
+ rawFunctionObjList.add(getFunction(ttpl, context, rawFunctionObjItem, resolveGetInput));
+ }
+ return rawFunctionObjList;
+ }
+
+ private static Object getFunctionForMap(TopologyTemplate ttpl, Object context, LinkedHashMap rawFunction, boolean resolveGetInput) {
+ // iterate over map nested properties in recursion, convert leaves to function,
+ // and collect them in the same hierarchy as the original map.
+ LinkedHashMap rawFunctionObjMap = new LinkedHashMap();
+ for (Object rawFunctionObjItem : rawFunction.entrySet()) {
+ Object itemValue = getFunction(ttpl, context, ((Map.Entry) rawFunctionObjItem).getValue(), resolveGetInput);
+ rawFunctionObjMap.put(((Map.Entry) rawFunctionObjItem).getKey(), itemValue);
+ }
+ return rawFunctionObjMap;
+ }
+
+ private static Object getFunctionForObjectItem(TopologyTemplate ttpl, Object context, Object rawFunctionObjItem, boolean resolveGetInput) {
+ if (isFunction(rawFunctionObjItem)) {
+ LinkedHashMap<String, Object> rawFunction = (LinkedHashMap<String, Object>) rawFunctionObjItem;
+ String funcName = (new ArrayList<String>(rawFunction.keySet())).get(0);
+ if (functionMappings.keySet().contains(funcName)) {
+ String funcType = functionMappings.get(funcName);
+ Object oargs = (new ArrayList<Object>(rawFunction.values())).get(0);
+ ArrayList<Object> funcArgs;
+ if (oargs instanceof ArrayList) {
+ funcArgs = (ArrayList<Object>) oargs;
+ } else {
+ funcArgs = new ArrayList<>();
+ funcArgs.add(oargs);
+ }
+
+ switch (funcType) {
+ case "GetInput":
+ if (resolveGetInput) {
+ GetInput input = new GetInput(ttpl, context, funcName, funcArgs);
+ return input.result();
+ }
+ return new GetInput(ttpl, context, funcName, funcArgs);
+ case "GetAttribute":
+ return new GetAttribute(ttpl, context, funcName, funcArgs);
+ case "GetProperty":
+ return new GetProperty(ttpl, context, funcName, funcArgs);
+ case "GetOperationOutput":
+ return new GetOperationOutput(ttpl, context, funcName, funcArgs);
+ case "Concat":
+ return new Concat(ttpl, context, funcName, funcArgs);
+ case "Token":
+ return new Token(ttpl, context, funcName, funcArgs);
+ }
+ }
+ }
+
+ return rawFunctionObjItem;
+ }
+
+ @Override
+ public String toString() {
+ String argsStr = args.size() > 1 ? args.toString() : args.get(0).toString();
+ return name + ":" + argsStr;
+ }
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIsshueCollector
+from toscaparser.common.exception import UnknownInputError
+from toscaparser.dataentity import DataEntity
+from toscaparser.elements.constraints import Schema
+from toscaparser.elements.datatype import DataType
+from toscaparser.elements.entity_type import EntityType
+from toscaparser.elements.relationshiptype import RelationshipType
+from toscaparser.elements.statefulentitytype import StatefulEntityType
+from toscaparser.utils.gettextutils import _
+
+
+GET_PROPERTY = 'get_property'
+GET_ATTRIBUTE = 'get_attribute'
+GET_INPUT = 'get_input'
+GET_OPERATION_OUTPUT = 'get_operation_output'
+CONCAT = 'concat'
+TOKEN = 'token'
+
+SELF = 'SELF'
+HOST = 'HOST'
+TARGET = 'TARGET'
+SOURCE = 'SOURCE'
+
+HOSTED_ON = 'tosca.relationships.HostedOn'
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Function(object):
+ """An abstract type for representing a Tosca template function."""
+
+ def __init__(self, tosca_tpl, context, name, args):
+ self.tosca_tpl = tosca_tpl
+ self.context = context
+ self.name = name
+ self.args = args
+ self.validate()
+
+ @abc.abstractmethod
+ def result(self):
+ """Invokes the function and returns its result
+
+ Some methods invocation may only be relevant on runtime (for example,
+ getting runtime properties) and therefore its the responsibility of
+ the orchestrator/translator to take care of such functions invocation.
+
+ :return: Function invocation result.
+ """
+ return {self.name: self.args}
+
+ @abc.abstractmethod
+ def validate(self):
+ """Validates function arguments."""
+ pass
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java
new file mode 100644
index 0000000..564d410
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetAttribute.java
@@ -0,0 +1,544 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.functions;
+
+import org.onap.sdc.toscaparser.api.*;
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+
+import org.onap.sdc.toscaparser.api.*;
+import org.onap.sdc.toscaparser.api.elements.AttributeDef;
+import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef;
+import org.onap.sdc.toscaparser.api.elements.DataType;
+import org.onap.sdc.toscaparser.api.elements.EntityType;
+import org.onap.sdc.toscaparser.api.elements.NodeType;
+import org.onap.sdc.toscaparser.api.elements.PropertyDef;
+import org.onap.sdc.toscaparser.api.elements.RelationshipType;
+import org.onap.sdc.toscaparser.api.elements.StatefulEntityType;
+import org.onap.sdc.toscaparser.api.elements.constraints.Schema;
+
+public class GetAttribute extends Function {
+ // Get an attribute value of an entity defined in the service template
+
+ // Node template attributes values are set in runtime and therefore its the
+ // responsibility of the Tosca engine to implement the evaluation of
+ // get_attribute functions.
+
+ // Arguments:
+
+ // * Node template name | HOST.
+ // * Attribute name.
+
+ // If the HOST keyword is passed as the node template name argument the
+ // function will search each node template along the HostedOn relationship
+ // chain until a node which contains the attribute is found.
+
+ // Examples:
+
+ // * { get_attribute: [ server, private_address ] }
+ // * { get_attribute: [ HOST, private_address ] }
+ // * { get_attribute: [ HOST, private_address, 0 ] }
+ // * { get_attribute: [ HOST, private_address, 0, some_prop] }
+
+ public GetAttribute(TopologyTemplate ttpl, Object context, String name, ArrayList<Object> args) {
+ super(ttpl, context, name, args);
+ }
+
+ @Override
+ void validate() {
+ if (args.size() < 2) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146",
+ "ValueError: Illegal arguments for function \"get_attribute\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\""));
+ return;
+ } else if (args.size() == 2) {
+ _findNodeTemplateContainingAttribute();
+ } else {
+ NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0));
+ if (nodeTpl == null) {
+ return;
+ }
+ int index = 2;
+ AttributeDef attr = nodeTpl.getTypeDefinition().getAttributeDefValue((String) args.get(1));
+ if (attr != null) {
+ // found
+ } else {
+ index = 3;
+ // then check the req or caps
+ if (!(args.get(1) instanceof String) || !(args.get(2) instanceof String)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE146", "ValueError: Illegal arguments for function \"get_attribute\". Expected a String argument"));
+ }
+
+ attr = _findReqOrCapAttribute(args.get(1).toString(), args.get(2).toString());
+ if (attr == null) {
+ return;
+ }
+ }
+
+
+ String valueType = (String) attr.getSchema().get("type");
+ if (args.size() > index) {
+ for (Object elem : args.subList(index, args.size())) {
+ if (valueType.equals("list")) {
+ if (!(elem instanceof Integer)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE147", String.format(
+ "ValueError: Illegal arguments for function \"get_attribute\" \"%s\". Expected positive integer argument",
+ elem.toString())));
+ }
+ Object ob = attr.getSchema().get("entry_schema");
+ valueType = (String)
+ ((LinkedHashMap<String, Object>) ob).get("type");
+ } else if (valueType.equals("map")) {
+ Object ob = attr.getSchema().get("entry_schema");
+ valueType = (String)
+ ((LinkedHashMap<String, Object>) ob).get("type");
+ } else {
+ boolean bFound = false;
+ for (String p : Schema.PROPERTY_TYPES) {
+ if (p.equals(valueType)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE148", String.format(
+ "ValueError: 'Illegal arguments for function \"get_attribute\". Unexpected attribute/index value \"%s\"",
+ elem)));
+ return;
+ } else { // It is a complex type
+ DataType dataType = new DataType(valueType, null);
+ LinkedHashMap<String, PropertyDef> props =
+ dataType.getAllProperties();
+ PropertyDef prop = props.get((String) elem);
+ if (prop != null) {
+ valueType = (String) prop.getSchema().get("type");
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE149", String.format(
+ "KeyError: Illegal arguments for function \"get_attribute\". Attribute name \"%s\" not found in \"%\"",
+ elem, valueType)));
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ @Override
+ public Object result() {
+ return this;
+ }
+
+ private NodeTemplate getReferencedNodeTemplate() {
+ // Gets the NodeTemplate instance the get_attribute function refers to
+
+ // If HOST keyword was used as the node template argument, the node
+ // template which contains the attribute along the HostedOn relationship
+ // chain will be returned.
+
+ return _findNodeTemplateContainingAttribute();
+
+ }
+
+ // Attributes can be explicitly created as part of the type definition
+ // or a property name can be implicitly used as an attribute name
+ private NodeTemplate _findNodeTemplateContainingAttribute() {
+ NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0));
+ if (nodeTpl != null &&
+ !_attributeExistsInType(nodeTpl.getTypeDefinition()) &&
+ !nodeTpl.getProperties().keySet().contains(getAttributeName())) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE150", String.format(
+ "KeyError: Attribute \"%s\" was not found in node template \"%s\"",
+ getAttributeName(), nodeTpl.getName())));
+ }
+ return nodeTpl;
+ }
+
+ private boolean _attributeExistsInType(StatefulEntityType typeDefinition) {
+ LinkedHashMap<String, AttributeDef> attrsDef = typeDefinition.getAttributesDef();
+ return attrsDef.get(getAttributeName()) != null;
+ }
+
+ private NodeTemplate _findHostContainingAttribute(String nodeTemplateName) {
+ NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName);
+ if (nodeTemplate != null) {
+ LinkedHashMap<String, Object> hostedOnRel =
+ (LinkedHashMap<String, Object>) EntityType.TOSCA_DEF.get(HOSTED_ON);
+ for (RequirementAssignment r : nodeTemplate.getRequirements().getAll()) {
+ String targetName = r.getNodeTemplateName();
+ NodeTemplate targetNode = _findNodeTemplate(targetName);
+ NodeType targetType = (NodeType) targetNode.getTypeDefinition();
+ for (CapabilityTypeDef capability : targetType.getCapabilitiesObjects()) {
+// if(((ArrayList<String>)hostedOnRel.get("valid_target_types")).contains(capability.getType())) {
+ if (capability.inheritsFrom((ArrayList<String>) hostedOnRel.get("valid_target_types"))) {
+ if (_attributeExistsInType(targetType)) {
+ return targetNode;
+ }
+ return _findHostContainingAttribute(targetName);
+ }
+ }
+ }
+ }
+ return null;
+ }
+
+
+ private NodeTemplate _findNodeTemplate(String nodeTemplateName) {
+ if (nodeTemplateName.equals(HOST)) {
+ // Currently this is the only way to tell whether the function
+ // is used within the outputs section of the TOSCA template.
+ if (context instanceof ArrayList) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE151",
+ "ValueError: \"get_attribute: [ HOST, ... ]\" is not allowed in \"outputs\" section of the TOSCA template"));
+ return null;
+ }
+ NodeTemplate nodeTpl = _findHostContainingAttribute(SELF);
+ if (nodeTpl == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE152", String.format(
+ "ValueError: \"get_attribute: [ HOST, ... ]\" was used in " +
+ "node template \"%s\" but \"%s\" was not found in " +
+ "the relationship chain", ((NodeTemplate) context).getName(), HOSTED_ON)));
+ return null;
+ }
+ return nodeTpl;
+ }
+ if (nodeTemplateName.equals(TARGET)) {
+ if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE153",
+ "KeyError: \"TARGET\" keyword can only be used in context " +
+ " to \"Relationships\" target node"));
+ return null;
+ }
+ return ((RelationshipTemplate) context).getTarget();
+ }
+ if (nodeTemplateName.equals(SOURCE)) {
+ if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE154",
+ "KeyError: \"SOURCE\" keyword can only be used in context " +
+ " to \"Relationships\" source node"));
+ return null;
+ }
+ return ((RelationshipTemplate) context).getTarget();
+ }
+ String name;
+ if (nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) {
+ name = ((NodeTemplate) context).getName();
+ } else {
+ name = nodeTemplateName;
+ }
+ for (NodeTemplate nt : toscaTpl.getNodeTemplates()) {
+ if (nt.getName().equals(name)) {
+ return nt;
+ }
+ }
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE155", String.format(
+ "KeyError: Node template \"%s\" was not found", nodeTemplateName)));
+ return null;
+ }
+
+ public AttributeDef _findReqOrCapAttribute(String reqOrCap, String attrName) {
+
+ NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0));
+ // Find attribute in node template's requirements
+ for (RequirementAssignment r : nodeTpl.getRequirements().getAll()) {
+ String nodeName = r.getNodeTemplateName();
+ if (r.getName().equals(reqOrCap)) {
+ NodeTemplate nodeTemplate = _findNodeTemplate(nodeName);
+ return _getCapabilityAttribute(nodeTemplate, r.getName(), attrName);
+ }
+ }
+ // If requirement was not found, look in node template's capabilities
+ return _getCapabilityAttribute(nodeTpl, reqOrCap, attrName);
+ }
+
+ private AttributeDef _getCapabilityAttribute(NodeTemplate nodeTemplate,
+ String capabilityName,
+ String attrName) {
+ // Gets a node template capability attribute
+ CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName);
+
+ if (cap != null) {
+ AttributeDef attribute = null;
+ LinkedHashMap<String, AttributeDef> attrs =
+ cap.getDefinition().getAttributesDef();
+ if (attrs != null && attrs.keySet().contains(attrName)) {
+ attribute = attrs.get(attrName);
+ }
+ if (attribute == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE156", String.format(
+ "KeyError: Attribute \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"",
+ attrName, capabilityName, nodeTemplate.getName(), ((NodeTemplate) context).getName())));
+ }
+ return attribute;
+ }
+ String msg = String.format(
+ "Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"",
+ capabilityName, ((NodeTemplate) context).getName(), nodeTemplate.getName());
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE157", "KeyError: " + msg));
+ return null;
+ }
+
+ String getNodeTemplateName() {
+ return (String) args.get(0);
+ }
+
+ String getAttributeName() {
+ return (String) args.get(1);
+ }
+
+}
+
+/*python
+
+class GetAttribute(Function):
+"""Get an attribute value of an entity defined in the service template
+
+Node template attributes values are set in runtime and therefore its the
+responsibility of the Tosca engine to implement the evaluation of
+get_attribute functions.
+
+Arguments:
+
+* Node template name | HOST.
+* Attribute name.
+
+If the HOST keyword is passed as the node template name argument the
+function will search each node template along the HostedOn relationship
+chain until a node which contains the attribute is found.
+
+Examples:
+
+* { get_attribute: [ server, private_address ] }
+* { get_attribute: [ HOST, private_address ] }
+* { get_attribute: [ HOST, private_address, 0 ] }
+* { get_attribute: [ HOST, private_address, 0, some_prop] }
+"""
+
+def validate(self):
+ if len(self.args) < 2:
+ ValidationIssueCollector.appendException(
+ ValueError(_('Illegal arguments for function "{0}". Expected '
+ 'arguments: "node-template-name", "req-or-cap"'
+ '(optional), "property name"'
+ ).format(GET_ATTRIBUTE)))
+ return
+ elif len(self.args) == 2:
+ self._find_node_template_containing_attribute()
+ else:
+ node_tpl = self._find_node_template(self.args[0])
+ if node_tpl is None:
+ return
+ index = 2
+ attrs = node_tpl.type_definition.get_attributes_def()
+ found = [attrs[self.args[1]]] if self.args[1] in attrs else []
+ if found:
+ attr = found[0]
+ else:
+ index = 3
+ # then check the req or caps
+ attr = self._find_req_or_cap_attribute(self.args[1],
+ self.args[2])
+
+ value_type = attr.schema['type']
+ if len(self.args) > index:
+ for elem in self.args[index:]:
+ if value_type == "list":
+ if not isinstance(elem, int):
+ ValidationIssueCollector.appendException(
+ ValueError(_('Illegal arguments for function'
+ ' "{0}". "{1}" Expected positive'
+ ' integer argument'
+ ).format(GET_ATTRIBUTE, elem)))
+ value_type = attr.schema['entry_schema']['type']
+ elif value_type == "map":
+ value_type = attr.schema['entry_schema']['type']
+ elif value_type in Schema.PROPERTY_TYPES:
+ ValidationIssueCollector.appendException(
+ ValueError(_('Illegal arguments for function'
+ ' "{0}". Unexpected attribute/'
+ 'index value "{1}"'
+ ).format(GET_ATTRIBUTE, elem)))
+ return
+ else: # It is a complex type
+ data_type = DataType(value_type)
+ props = data_type.get_all_properties()
+ found = [props[elem]] if elem in props else []
+ if found:
+ prop = found[0]
+ value_type = prop.schema['type']
+ else:
+ ValidationIssueCollector.appendException(
+ KeyError(_('Illegal arguments for function'
+ ' "{0}". Attribute name "{1}" not'
+ ' found in "{2}"'
+ ).format(GET_ATTRIBUTE,
+ elem,
+ value_type)))
+
+def result(self):
+ return self
+
+def get_referenced_node_template(self):
+ """Gets the NodeTemplate instance the get_attribute function refers to.
+
+ If HOST keyword was used as the node template argument, the node
+ template which contains the attribute along the HostedOn relationship
+ chain will be returned.
+ """
+ return self._find_node_template_containing_attribute()
+
+# Attributes can be explicitly created as part of the type definition
+# or a property name can be implicitly used as an attribute name
+def _find_node_template_containing_attribute(self):
+ node_tpl = self._find_node_template(self.args[0])
+ if node_tpl and \
+ not self._attribute_exists_in_type(node_tpl.type_definition) \
+ and self.attribute_name not in node_tpl.get_properties():
+ ValidationIssueCollector.appendException(
+ KeyError(_('Attribute "%(att)s" was not found in node '
+ 'template "%(ntpl)s".') %
+ {'att': self.attribute_name,
+ 'ntpl': node_tpl.name}))
+ return node_tpl
+
+def _attribute_exists_in_type(self, type_definition):
+ attrs_def = type_definition.get_attributes_def()
+ found = [attrs_def[self.attribute_name]] \
+ if self.attribute_name in attrs_def else []
+ return len(found) == 1
+
+def _find_host_containing_attribute(self, node_template_name=SELF):
+ node_template = self._find_node_template(node_template_name)
+ if node_template:
+ hosted_on_rel = EntityType.TOSCA_DEF[HOSTED_ON]
+ for r in node_template.requirements:
+ for requirement, target_name in r.items():
+ target_node = self._find_node_template(target_name)
+ target_type = target_node.type_definition
+ for capability in target_type.get_capabilities_objects():
+ if capability.type in \
+ hosted_on_rel['valid_target_types']:
+ if self._attribute_exists_in_type(target_type):
+ return target_node
+ return self._find_host_containing_attribute(
+ target_name)
+
+def _find_node_template(self, node_template_name):
+ if node_template_name == HOST:
+ # Currently this is the only way to tell whether the function
+ # is used within the outputs section of the TOSCA template.
+ if isinstance(self.context, list):
+ ValidationIssueCollector.appendException(
+ ValueError(_(
+ '"get_attribute: [ HOST, ... ]" is not allowed in '
+ '"outputs" section of the TOSCA template.')))
+ return
+ node_tpl = self._find_host_containing_attribute()
+ if not node_tpl:
+ ValidationIssueCollector.appendException(
+ ValueError(_(
+ '"get_attribute: [ HOST, ... ]" was used in node '
+ 'template "{0}" but "{1}" was not found in '
+ 'the relationship chain.').format(self.context.name,
+ HOSTED_ON)))
+ return
+ return node_tpl
+ if node_template_name == TARGET:
+ if not isinstance(self.context.type_definition, RelationshipType):
+ ValidationIssueCollector.appendException(
+ KeyError(_('"TARGET" keyword can only be used in context'
+ ' to "Relationships" target node')))
+ return
+ return self.context.target
+ if node_template_name == SOURCE:
+ if not isinstance(self.context.type_definition, RelationshipType):
+ ValidationIssueCollector.appendException(
+ KeyError(_('"SOURCE" keyword can only be used in context'
+ ' to "Relationships" source node')))
+ return
+ return self.context.source
+ name = self.context.name \
+ if node_template_name == SELF and \
+ not isinstance(self.context, list) \
+ else node_template_name
+ for node_template in self.tosca_tpl.nodetemplates:
+ if node_template.name == name:
+ return node_template
+ ValidationIssueCollector.appendException(
+ KeyError(_(
+ 'Node template "{0}" was not found.'
+ ).format(node_template_name)))
+
+def _find_req_or_cap_attribute(self, req_or_cap, attr_name):
+ node_tpl = self._find_node_template(self.args[0])
+ # Find attribute in node template's requirements
+ for r in node_tpl.requirements:
+ for req, node_name in r.items():
+ if req == req_or_cap:
+ node_template = self._find_node_template(node_name)
+ return self._get_capability_attribute(
+ node_template,
+ req,
+ attr_name)
+ # If requirement was not found, look in node template's capabilities
+ return self._get_capability_attribute(node_tpl,
+ req_or_cap,
+ attr_name)
+
+def _get_capability_attribute(self,
+ node_template,
+ capability_name,
+ attr_name):
+ """Gets a node template capability attribute."""
+ caps = node_template.get_capabilities()
+ if caps and capability_name in caps.keys():
+ cap = caps[capability_name]
+ attribute = None
+ attrs = cap.definition.get_attributes_def()
+ if attrs and attr_name in attrs.keys():
+ attribute = attrs[attr_name]
+ if not attribute:
+ ValidationIssueCollector.appendException(
+ KeyError(_('Attribute "%(attr)s" was not found in '
+ 'capability "%(cap)s" of node template '
+ '"%(ntpl1)s" referenced from node template '
+ '"%(ntpl2)s".') % {'attr': attr_name,
+ 'cap': capability_name,
+ 'ntpl1': node_template.name,
+ 'ntpl2': self.context.name}))
+ return attribute
+ msg = _('Requirement/CapabilityAssignment "{0}" referenced from node template '
+ '"{1}" was not found in node template "{2}".').format(
+ capability_name,
+ self.context.name,
+ node_template.name)
+ ValidationIssueCollector.appendException(KeyError(msg))
+
+@property
+def node_template_name(self):
+ return self.args[0]
+
+@property
+def attribute_name(self):
+ return self.args[1]
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java
new file mode 100644
index 0000000..ee5be17
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetInput.java
@@ -0,0 +1,203 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * Copyright (c) 2017 AT&T Intellectual Property.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ * Modifications copyright (c) 2019 Fujitsu Limited.
+ * ================================================================================
+ */
+package org.onap.sdc.toscaparser.api.functions;
+
+import org.onap.sdc.toscaparser.api.DataEntity;
+import org.onap.sdc.toscaparser.api.TopologyTemplate;
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+import org.onap.sdc.toscaparser.api.parameters.Input;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+
+public class GetInput extends Function {
+
+ public static final String INDEX = "INDEX";
+ public static final String INPUTS = "inputs";
+ public static final String TYPE = "type";
+ public static final String PROPERTIES = "properties";
+ public static final String ENTRY_SCHEMA = "entry_schema";
+
+ public GetInput(TopologyTemplate toscaTpl, Object context, String name, ArrayList<Object> _args) {
+ super(toscaTpl, context, name, _args);
+
+ }
+
+ @Override
+ void validate() {
+
+// if(args.size() != 1) {
+// //PA - changed to WARNING from CRITICAL after talking to Renana, 22/05/2017
+// ThreadLocalsHolder.getCollector().appendWarning(String.format(
+// "ValueError: Expected one argument for function \"get_input\" but received \"%s\"",
+// args.toString()));
+// }
+ boolean bFound = false;
+ for (Input inp : toscaTpl.getInputs()) {
+ if (inp.getName().equals(args.get(0))) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE158", String.format(
+ "UnknownInputError: Unknown input \"%s\"", args.get(0))));
+ } else if (args.size() > 2) {
+ LinkedHashMap<String, Object> inputs = (LinkedHashMap<String, Object>) toscaTpl.getTpl().get(INPUTS);
+ LinkedHashMap<String, Object> data = (LinkedHashMap<String, Object>) inputs.get(getInputName());
+ String type;
+
+ for (int argumentNumber = 1; argumentNumber < args.size(); argumentNumber++) {
+ String dataTypeName = "";
+ bFound = false;
+ if (INDEX.equals(args.get(argumentNumber).toString()) || (args.get(argumentNumber) instanceof Integer)) {
+ bFound = true;
+ } else {
+ type = (String) data.get(TYPE);
+ //get type name
+ if (type.equals("list") || type.equals("map")) {
+ LinkedHashMap<String, Object> schema = (LinkedHashMap<String, Object>) data.get(ENTRY_SCHEMA);
+ dataTypeName = (String) schema.get(TYPE);
+ } else {
+ dataTypeName = type;
+ }
+ //check property name
+ LinkedHashMap<String, Object> dataType = (LinkedHashMap<String, Object>) toscaTpl.getCustomDefs().get(dataTypeName);
+ if (dataType != null) {
+ LinkedHashMap<String, Object> props = (LinkedHashMap<String, Object>) dataType.get(PROPERTIES);
+ data = (LinkedHashMap<String, Object>) props.get(args.get(argumentNumber).toString());
+ if (data != null) {
+ bFound = true;
+ }
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE282", String.format(
+ "UnknownDataType: Unknown data type \"%s\"", args.get(argumentNumber))));
+ }
+ }
+ }
+ }
+
+ public Object result() {
+ if (toscaTpl.getParsedParams() != null &&
+ toscaTpl.getParsedParams().get(getInputName()) != null) {
+ LinkedHashMap<String, Object> ttinp = (LinkedHashMap<String, Object>) toscaTpl.getTpl().get(INPUTS);
+ LinkedHashMap<String, Object> ttinpinp = (LinkedHashMap<String, Object>) ttinp.get(getInputName());
+ String type = (String) ttinpinp.get("type");
+
+ Object value = DataEntity.validateDatatype(
+ type, toscaTpl.getParsedParams().get(getInputName()), null, toscaTpl.getCustomDefs(), null);
+ //SDC resolving Get Input
+ if (value instanceof ArrayList) {
+ if (args.size() == 2 && args.get(1) instanceof Integer && ((ArrayList) value).size() > (Integer) args.get(1)) {
+ return ((ArrayList) value).get((Integer) args.get(1));
+ }
+ /* commented out for network cloud (SDNC)
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE273",String.format(
+ "GetInputError: cannot resolve input name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0))));
+ return null;
+*/
+ }
+ return value;
+ }
+
+ Input inputDef = null;
+ for (Input inpDef : toscaTpl.getInputs()) {
+ if (getInputName().equals(inpDef.getName())) {
+ inputDef = inpDef;
+ break;
+ }
+ }
+ if (inputDef != null) {
+ if (args.size() == 2 && inputDef.getDefault() != null && inputDef.getDefault() instanceof ArrayList) {
+ if (args.get(1) instanceof Integer
+ && ((ArrayList) inputDef.getDefault()).size() > ((Integer) args.get(1)).intValue()) {
+ return ((ArrayList) inputDef.getDefault()).get(((Integer) args.get(1)).intValue());
+ }
+/*
+ commented out for network cloud (SDNC)
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE274",(String.format(
+ "GetInputError: cannot resolve input Def name \"%s\", the expected structure is an argument with a name of input type list and a second argument with an index in the list", args.get(0)))));
+ return null;
+*/
+ }
+ return inputDef.getDefault();
+ }
+ return null;
+ }
+
+ public String getInputName() {
+ return (String) args.get(0);
+ }
+
+ public LinkedHashMap<String, Object> getEntrySchema() {
+ LinkedHashMap<String, Object> inputs = (LinkedHashMap<String, Object>) toscaTpl.getTpl().get(INPUTS);
+ LinkedHashMap<String, Object> inputValue = (LinkedHashMap<String, Object>) inputs.get(getInputName());
+ return (LinkedHashMap<String, Object>) inputValue.get(ENTRY_SCHEMA);
+ }
+
+ public ArrayList<Object> getArguments() {
+ return args;
+ }
+}
+
+/*python
+
+class GetInput(Function):
+"""Get a property value declared within the input of the service template.
+
+Arguments:
+
+* Input name.
+
+Example:
+
+* get_input: port
+"""
+
+def validate(self):
+ if len(self.args) != 1:
+ ValidationIssueCollector.appendException(
+ ValueError(_(
+ 'Expected one argument for function "get_input" but '
+ 'received "%s".') % self.args))
+ inputs = [input.name for input in self.tosca_tpl.inputs]
+ if self.args[0] not in inputs:
+ ValidationIssueCollector.appendException(
+ UnknownInputError(input_name=self.args[0]))
+
+def result(self):
+ if self.tosca_tpl.parsed_params and \
+ self.input_name in self.tosca_tpl.parsed_params:
+ return DataEntity.validate_datatype(
+ self.tosca_tpl.tpl['inputs'][self.input_name]['type'],
+ self.tosca_tpl.parsed_params[self.input_name])
+
+ input = [input_def for input_def in self.tosca_tpl.inputs
+ if self.input_name == input_def.name][0]
+ return input.default
+
+@property
+def input_name(self):
+ return self.args[0]
+
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java
new file mode 100644
index 0000000..06a28d6
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetOperationOutput.java
@@ -0,0 +1,243 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.functions;
+
+import org.onap.sdc.toscaparser.api.EntityTemplate;
+import org.onap.sdc.toscaparser.api.NodeTemplate;
+import org.onap.sdc.toscaparser.api.RelationshipTemplate;
+import org.onap.sdc.toscaparser.api.TopologyTemplate;
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.elements.InterfacesDef;
+import org.onap.sdc.toscaparser.api.elements.RelationshipType;
+import org.onap.sdc.toscaparser.api.elements.StatefulEntityType;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.ArrayList;
+
+
+public class GetOperationOutput extends Function {
+
+ public GetOperationOutput(TopologyTemplate ttpl, Object context, String name, ArrayList<Object> args) {
+ super(ttpl, context, name, args);
+ }
+
+ @Override
+ public void validate() {
+ if (args.size() == 4) {
+ _findNodeTemplate((String) args.get(0));
+ String interfaceName = _findInterfaceName((String) args.get(1));
+ _findOperationName(interfaceName, (String) args.get(2));
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE159",
+ "ValueError: Illegal arguments for function \"get_operation_output\". " +
+ "Expected arguments: \"template_name\",\"interface_name\"," +
+ "\"operation_name\",\"output_variable_name\""));
+ }
+ }
+
+ private String _findInterfaceName(String _interfaceName) {
+ boolean bFound = false;
+ for (String sect : InterfacesDef.SECTIONS) {
+ if (sect.equals(_interfaceName)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (bFound) {
+ return _interfaceName;
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE160", String.format(
+ "ValueError: invalid interface name \"%s\" in \"get_operation_output\"",
+ _interfaceName)));
+ return null;
+ }
+ }
+
+ private String _findOperationName(String interfaceName, String operationName) {
+
+ if (interfaceName.equals("Configure") ||
+ interfaceName.equals("tosca.interfaces.node.relationship.Configure")) {
+ boolean bFound = false;
+ for (String sect : StatefulEntityType.INTERFACE_RELATIONSHIP_CONFIGURE_OPERATIONS) {
+ if (sect.equals(operationName)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (bFound) {
+ return operationName;
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE161", String.format(
+ "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"",
+ operationName)));
+ return null;
+ }
+ }
+ if (interfaceName.equals("Standard") ||
+ interfaceName.equals("tosca.interfaces.node.lifecycle.Standard")) {
+ boolean bFound = false;
+ for (String sect : StatefulEntityType.INTERFACE_NODE_LIFECYCLE_OPERATIONS) {
+ if (sect.equals(operationName)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (bFound) {
+ return operationName;
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE162", String.format(
+ "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"",
+ operationName)));
+ return null;
+ }
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE163", String.format(
+ "ValueError: Invalid interface name \"%s\" in \"get_operation_output\"",
+ interfaceName)));
+ return null;
+ }
+ }
+
+ private NodeTemplate _findNodeTemplate(String nodeTemplateName) {
+ if (nodeTemplateName.equals(TARGET)) {
+ if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE164",
+ "KeyError: \"TARGET\" keyword can only be used in context " +
+ " to \"Relationships\" target node"));
+ return null;
+ }
+ return ((RelationshipTemplate) context).getTarget();
+ }
+ if (nodeTemplateName.equals(SOURCE)) {
+ if (!(((EntityTemplate) context).getTypeDefinition() instanceof RelationshipType)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE165",
+ "KeyError: \"SOURCE\" keyword can only be used in context " +
+ " to \"Relationships\" source node"));
+ return null;
+ }
+ return ((RelationshipTemplate) context).getTarget();
+ }
+ String name;
+ if (nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) {
+ name = ((NodeTemplate) context).getName();
+ } else {
+ name = nodeTemplateName;
+ }
+ for (NodeTemplate nt : toscaTpl.getNodeTemplates()) {
+ if (nodeTemplateName.equals(name)) {
+ return nt;
+ }
+ }
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE166", String.format(
+ "KeyError: Node template \"%s\" was not found", nodeTemplateName)));
+ return null;
+ }
+
+ @Override
+ public Object result() {
+ return this;
+ }
+
+}
+
+/*python
+
+class GetOperationOutput(Function):
+def validate(self):
+ if len(self.args) == 4:
+ self._find_node_template(self.args[0])
+ interface_name = self._find_interface_name(self.args[1])
+ self._find_operation_name(interface_name, self.args[2])
+ else:
+ ValidationIssueCollector.appendException(
+ ValueError(_('Illegal arguments for function "{0}". Expected '
+ 'arguments: "template_name","interface_name",'
+ '"operation_name","output_variable_name"'
+ ).format(GET_OPERATION_OUTPUT)))
+ return
+
+def _find_interface_name(self, interface_name):
+ if interface_name in toscaparser.elements.interfaces.SECTIONS:
+ return interface_name
+ else:
+ ValidationIssueCollector.appendException(
+ ValueError(_('Enter a valid interface name'
+ ).format(GET_OPERATION_OUTPUT)))
+ return
+
+def _find_operation_name(self, interface_name, operation_name):
+ if(interface_name == 'Configure' or
+ interface_name == 'tosca.interfaces.node.relationship.Configure'):
+ if(operation_name in
+ StatefulEntityType.
+ interfaces_relationship_configure_operations):
+ return operation_name
+ else:
+ ValidationIssueCollector.appendException(
+ ValueError(_('Enter an operation of Configure interface'
+ ).format(GET_OPERATION_OUTPUT)))
+ return
+ elif(interface_name == 'Standard' or
+ interface_name == 'tosca.interfaces.node.lifecycle.Standard'):
+ if(operation_name in
+ StatefulEntityType.interfaces_node_lifecycle_operations):
+ return operation_name
+ else:
+ ValidationIssueCollector.appendException(
+ ValueError(_('Enter an operation of Standard interface'
+ ).format(GET_OPERATION_OUTPUT)))
+ return
+ else:
+ ValidationIssueCollector.appendException(
+ ValueError(_('Enter a valid operation name'
+ ).format(GET_OPERATION_OUTPUT)))
+ return
+
+def _find_node_template(self, node_template_name):
+ if node_template_name == TARGET:
+ if not isinstance(self.context.type_definition, RelationshipType):
+ ValidationIssueCollector.appendException(
+ KeyError(_('"TARGET" keyword can only be used in context'
+ ' to "Relationships" target node')))
+ return
+ return self.context.target
+ if node_template_name == SOURCE:
+ if not isinstance(self.context.type_definition, RelationshipType):
+ ValidationIssueCollector.appendException(
+ KeyError(_('"SOURCE" keyword can only be used in context'
+ ' to "Relationships" source node')))
+ return
+ return self.context.source
+ name = self.context.name \
+ if node_template_name == SELF and \
+ not isinstance(self.context, list) \
+ else node_template_name
+ for node_template in self.tosca_tpl.nodetemplates:
+ if node_template.name == name:
+ return node_template
+ ValidationIssueCollector.appendException(
+ KeyError(_(
+ 'Node template "{0}" was not found.'
+ ).format(node_template_name)))
+
+def result(self):
+ return self
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java
new file mode 100644
index 0000000..90e0a8e
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/GetProperty.java
@@ -0,0 +1,639 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.functions;
+
+import org.onap.sdc.toscaparser.api.CapabilityAssignment;
+import org.onap.sdc.toscaparser.api.NodeTemplate;
+import org.onap.sdc.toscaparser.api.Property;
+import org.onap.sdc.toscaparser.api.RelationshipTemplate;
+import org.onap.sdc.toscaparser.api.RequirementAssignment;
+import org.onap.sdc.toscaparser.api.TopologyTemplate;
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.elements.CapabilityTypeDef;
+import org.onap.sdc.toscaparser.api.elements.EntityType;
+import org.onap.sdc.toscaparser.api.elements.NodeType;
+import org.onap.sdc.toscaparser.api.elements.PropertyDef;
+import org.onap.sdc.toscaparser.api.elements.RelationshipType;
+import org.onap.sdc.toscaparser.api.elements.StatefulEntityType;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+
+public class GetProperty extends Function {
+ // Get a property value of an entity defined in the same service template
+
+ // Arguments:
+
+ // * Node template name | SELF | HOST | SOURCE | TARGET.
+ // * Requirement or capability name (optional).
+ // * Property name.
+
+ // If requirement or capability name is specified, the behavior is as follows:
+ // The req or cap name is first looked up in the specified node template's
+ // requirements.
+ // If found, it would search for a matching capability
+ // of an other node template and get its property as specified in function
+ // arguments.
+ // Otherwise, the req or cap name would be looked up in the specified
+ // node template's capabilities and if found, it would return the property of
+ // the capability as specified in function arguments.
+
+ // Examples:
+
+ // * { get_property: [ mysql_server, port ] }
+ // * { get_property: [ SELF, db_port ] }
+ // * { get_property: [ SELF, database_endpoint, port ] }
+ // * { get_property: [ SELF, database_endpoint, port, 1 ] }
+
+
+ public GetProperty(TopologyTemplate ttpl, Object context, String name, ArrayList<Object> args) {
+ super(ttpl, context, name, args);
+ }
+
+ @Override
+ void validate() {
+ if (args.size() < 2) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE167",
+ "ValueError: Illegal arguments for function \"get_property\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\""));
+ return;
+ }
+ if (args.size() == 2) {
+ Property foundProp = _findProperty((String) args.get(1));
+ if (foundProp == null) {
+ return;
+ }
+ Object prop = foundProp.getValue();
+ if (prop instanceof Function) {
+ getFunction(toscaTpl, context, prop, toscaTpl.getResolveGetInput());
+ }
+ } else if (args.size() >= 3) {
+ // do not use _find_property to avoid raise KeyError
+ // if the prop is not found
+ // First check if there is property with this name
+ NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0));
+ LinkedHashMap<String, Property> props;
+ if (nodeTpl != null) {
+ props = nodeTpl.getProperties();
+ } else {
+ props = new LinkedHashMap<>();
+ }
+ int index = 2;
+ Object propertyValue;
+ if (props.get(args.get(1)) != null) {
+ propertyValue = ((Property) props.get(args.get(1))).getValue();
+ } else {
+ index = 3;
+ // then check the req or caps
+ propertyValue = _findReqOrCapProperty((String) args.get(1), (String) args.get(2));
+ }
+
+ if (args.size() > index) {
+ for (Object elem : args.subList(index, args.size() - 1)) {
+ if (propertyValue instanceof ArrayList) {
+ int intElem = (int) elem;
+ propertyValue = _getIndexValue(propertyValue, intElem);
+ } else {
+ propertyValue = _getAttributeValue(propertyValue, (String) elem);
+ }
+ }
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private Object _findReqOrCapProperty(String reqOrCap, String propertyName) {
+ NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0));
+ if (nodeTpl == null) {
+ return null;
+ }
+ // look for property in node template's requirements
+ for (RequirementAssignment req : nodeTpl.getRequirements().getAll()) {
+ String nodeName = req.getNodeTemplateName();
+ if (req.getName().equals(reqOrCap)) {
+ NodeTemplate nodeTemplate = _findNodeTemplate(nodeName);
+ return _getCapabilityProperty(nodeTemplate, req.getName(), propertyName, true);
+ }
+ }
+ // If requirement was not found, look in node template's capabilities
+ return _getCapabilityProperty(nodeTpl, reqOrCap, propertyName, true);
+ }
+
+ private Object _getCapabilityProperty(NodeTemplate nodeTemplate,
+ String capabilityName,
+ String propertyName,
+ boolean throwErrors) {
+
+ // Gets a node template capability property
+ Object property = null;
+ CapabilityAssignment cap = nodeTemplate.getCapabilities().getCapabilityByName(capabilityName);
+ if (cap != null) {
+ LinkedHashMap<String, Property> props = cap.getProperties();
+ if (props != null && props.get(propertyName) != null) {
+ property = ((Property) props.get(propertyName)).getValue();
+ }
+ if (property == null && throwErrors) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE168", String.format(
+ "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"",
+ propertyName, capabilityName, nodeTemplate.getName(), ((NodeTemplate) context).getName())));
+ }
+ return property;
+ }
+ if (throwErrors) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE169", String.format(
+ "KeyError: Requirement/CapabilityAssignment \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"",
+ capabilityName, ((NodeTemplate) context).getName(), nodeTemplate.getName())));
+ }
+
+ return null;
+ }
+
+ private Property _findProperty(String propertyName) {
+ NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0));
+ if (nodeTpl == null) {
+ return null;
+ }
+ LinkedHashMap<String, Property> props = nodeTpl.getProperties();
+ Property found = props.get(propertyName);
+ if (found == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE170", String.format(
+ "KeyError: Property \"%s\" was not found in node template \"%s\"",
+ propertyName, nodeTpl.getName())));
+ }
+ return found;
+ }
+
+ private NodeTemplate _findNodeTemplate(String nodeTemplateName) {
+ if (nodeTemplateName.equals(SELF)) {
+ return (NodeTemplate) context;
+ }
+ // enable the HOST value in the function
+ if (nodeTemplateName.equals(HOST)) {
+ NodeTemplate node = _findHostContainingProperty(null);
+ if (node == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE171", String.format(
+ "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"",
+ (String) args.get(2), (String) args.get(1), ((NodeTemplate) context).getName())));
+ return null;
+ }
+ return node;
+ }
+ if (nodeTemplateName.equals(TARGET)) {
+ if (!(((RelationshipTemplate) context).getTypeDefinition() instanceof RelationshipType)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE172",
+ "KeyError: \"TARGET\" keyword can only be used in context to \"Relationships\" target node"));
+ return null;
+ }
+ return ((RelationshipTemplate) context).getTarget();
+ }
+ if (nodeTemplateName.equals(SOURCE)) {
+ if (!(((RelationshipTemplate) context).getTypeDefinition() instanceof RelationshipType)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE173",
+ "KeyError: \"SOURCE\" keyword can only be used in context to \"Relationships\" target node"));
+ return null;
+ }
+ return ((RelationshipTemplate) context).getSource();
+ }
+ if (toscaTpl.getNodeTemplates() == null) {
+ return null;
+ }
+ for (NodeTemplate nodeTemplate : toscaTpl.getNodeTemplates()) {
+ if (nodeTemplate.getName().equals(nodeTemplateName)) {
+ return nodeTemplate;
+ }
+ }
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE174", String.format(
+ "KeyError: Node template \"%s\" was not found. Referenced from Node Template \"%s\"",
+ nodeTemplateName, ((NodeTemplate) context).getName())));
+
+ return null;
+ }
+
+ @SuppressWarnings("rawtypes")
+ private Object _getIndexValue(Object value, int index) {
+ if (value instanceof ArrayList) {
+ if (index < ((ArrayList) value).size()) {
+ return ((ArrayList) value).get(index);
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE175", String.format(
+ "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an element with index %d",
+ args.get(2), args.get(1), ((NodeTemplate) context).getName(), index)));
+
+ }
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE176", String.format(
+ "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a list",
+ args.get(2), args.get(1), ((NodeTemplate) context).getName())));
+ }
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ private Object _getAttributeValue(Object value, String attribute) {
+ if (value instanceof LinkedHashMap) {
+ Object ov = ((LinkedHashMap<String, Object>) value).get(attribute);
+ if (ov != null) {
+ return ov;
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE177", String.format(
+ "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an attribute named \"%s\"",
+ args.get(2), args.get(1), ((NodeTemplate) context).getName(), attribute)));
+ }
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE178", String.format(
+ "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a dict",
+ args.get(2), args.get(1), ((NodeTemplate) context).getName())));
+ }
+ return null;
+ }
+
+ // Add this functions similar to get_attribute case
+ private NodeTemplate _findHostContainingProperty(String nodeTemplateName) {
+ if (nodeTemplateName == null) {
+ nodeTemplateName = SELF;
+ }
+ NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName);
+ LinkedHashMap<String, Object> hostedOnRel = (LinkedHashMap<String, Object>)
+ EntityType.TOSCA_DEF.get(HOSTED_ON);
+ for (RequirementAssignment requirement : nodeTemplate.getRequirements().getAll()) {
+ String targetName = requirement.getNodeTemplateName();
+ NodeTemplate targetNode = _findNodeTemplate(targetName);
+ NodeType targetType = (NodeType) targetNode.getTypeDefinition();
+ for (CapabilityTypeDef capDef : targetType.getCapabilitiesObjects()) {
+ if (capDef.inheritsFrom((ArrayList<String>) hostedOnRel.get("valid_target_types"))) {
+ if (_propertyExistsInType(targetType)) {
+ return targetNode;
+ }
+ // If requirement was not found, look in node
+ // template's capabilities
+ if (args.size() > 2 &&
+ _getCapabilityProperty(targetNode, (String) args.get(1), (String) args.get(2), false) != null) {
+ return targetNode;
+ }
+
+ return _findHostContainingProperty(targetName);
+ }
+ }
+
+ }
+ return null;
+ }
+
+ private boolean _propertyExistsInType(StatefulEntityType typeDefinition) {
+ LinkedHashMap<String, PropertyDef> propsDef = typeDefinition.getPropertiesDef();
+ return propsDef.keySet().contains((String) args.get(1));
+ }
+
+ @Override
+ public Object result() {
+ Object propertyValue;
+ if (args.size() >= 3) {
+ // First check if there is property with this name
+ NodeTemplate nodeTpl = _findNodeTemplate((String) args.get(0));
+ LinkedHashMap<String, Property> props;
+ if (nodeTpl != null) {
+ props = nodeTpl.getProperties();
+ } else {
+ props = new LinkedHashMap<>();
+ }
+ int index = 2;
+ if (props.get(args.get(1)) != null) {
+ propertyValue = ((Property) props.get(args.get(1))).getValue();
+ } else {
+ index = 3;
+ // then check the req or caps
+ propertyValue = _findReqOrCapProperty((String) args.get(1), (String) args.get(2));
+ }
+
+ if (args.size() > index) {
+ for (Object elem : args.subList(index, args.size() - 1)) {
+ if (propertyValue instanceof ArrayList) {
+ int intElem = (int) elem;
+ propertyValue = _getIndexValue(propertyValue, intElem);
+ } else {
+ propertyValue = _getAttributeValue(propertyValue, (String) elem);
+ }
+ }
+ }
+ } else {
+ propertyValue = _findProperty((String) args.get(1)).getValue();
+ }
+ if (propertyValue instanceof Function) {
+ return ((Function) propertyValue).result();
+ }
+ return getFunction(toscaTpl, context, propertyValue, toscaTpl.getResolveGetInput());
+ }
+
+ public String getNodeTemplateName() {
+ return (String) args.get(0);
+ }
+
+ public String getPropertyName() {
+ if (args.size() > 2) {
+ return (String) args.get(2);
+ }
+ return (String) args.get(1);
+ }
+
+ public String getReqorCap() {
+ if (args.size() > 2) {
+ return (String) args.get(1);
+ }
+ return null;
+ }
+
+}
+
+/*python
+
+class GetProperty(Function):
+"""Get a property value of an entity defined in the same service template.
+
+Arguments:
+
+* Node template name | SELF | HOST | SOURCE | TARGET.
+* Requirement or capability name (optional).
+* Property name.
+
+If requirement or capability name is specified, the behavior is as follows:
+The req or cap name is first looked up in the specified node template's
+requirements.
+If found, it would search for a matching capability
+of an other node template and get its property as specified in function
+arguments.
+Otherwise, the req or cap name would be looked up in the specified
+node template's capabilities and if found, it would return the property of
+the capability as specified in function arguments.
+
+Examples:
+
+* { get_property: [ mysql_server, port ] }
+* { get_property: [ SELF, db_port ] }
+* { get_property: [ SELF, database_endpoint, port ] }
+* { get_property: [ SELF, database_endpoint, port, 1 ] }
+"""
+
+def validate(self):
+ if len(self.args) < 2:
+ ValidationIssueCollector.appendException(
+ ValueError(_(
+ 'Expected arguments: "node-template-name", "req-or-cap" '
+ '(optional), "property name".')))
+ return
+ if len(self.args) == 2:
+ found_prop = self._find_property(self.args[1])
+ if not found_prop:
+ return
+ prop = found_prop.value
+ if not isinstance(prop, Function):
+ get_function(self.tosca_tpl, self.context, prop)
+ elif len(self.args) >= 3:
+ # do not use _find_property to avoid raise KeyError
+ # if the prop is not found
+ # First check if there is property with this name
+ node_tpl = self._find_node_template(self.args[0])
+ props = node_tpl.get_properties() if node_tpl else []
+ index = 2
+ found = [props[self.args[1]]] if self.args[1] in props else []
+ if found:
+ property_value = found[0].value
+ else:
+ index = 3
+ # then check the req or caps
+ property_value = self._find_req_or_cap_property(self.args[1],
+ self.args[2])
+ if len(self.args) > index:
+ for elem in self.args[index:]:
+ if isinstance(property_value, list):
+ int_elem = int(elem)
+ property_value = self._get_index_value(property_value,
+ int_elem)
+ else:
+ property_value = self._get_attribute_value(
+ property_value,
+ elem)
+
+def _find_req_or_cap_property(self, req_or_cap, property_name):
+ node_tpl = self._find_node_template(self.args[0])
+ # Find property in node template's requirements
+ for r in node_tpl.requirements:
+ for req, node_name in r.items():
+ if req == req_or_cap:
+ node_template = self._find_node_template(node_name)
+ return self._get_capability_property(
+ node_template,
+ req,
+ property_name)
+ # If requirement was not found, look in node template's capabilities
+ return self._get_capability_property(node_tpl,
+ req_or_cap,
+ property_name)
+
+def _get_capability_property(self,
+ node_template,
+ capability_name,
+ property_name):
+ """Gets a node template capability property."""
+ caps = node_template.get_capabilities()
+ if caps and capability_name in caps.keys():
+ cap = caps[capability_name]
+ property = None
+ props = cap.get_properties()
+ if props and property_name in props.keys():
+ property = props[property_name].value
+ if not property:
+ ValidationIssueCollector.appendException(
+ KeyError(_('Property "%(prop)s" was not found in '
+ 'capability "%(cap)s" of node template '
+ '"%(ntpl1)s" referenced from node template '
+ '"%(ntpl2)s".') % {'prop': property_name,
+ 'cap': capability_name,
+ 'ntpl1': node_template.name,
+ 'ntpl2': self.context.name}))
+ return property
+ msg = _('Requirement/CapabilityAssignment "{0}" referenced from node template '
+ '"{1}" was not found in node template "{2}".').format(
+ capability_name,
+ self.context.name,
+ node_template.name)
+ ValidationIssueCollector.appendException(KeyError(msg))
+
+def _find_property(self, property_name):
+ node_tpl = self._find_node_template(self.args[0])
+ if not node_tpl:
+ return
+ props = node_tpl.get_properties()
+ found = [props[property_name]] if property_name in props else []
+ if len(found) == 0:
+ ValidationIssueCollector.appendException(
+ KeyError(_('Property "%(prop)s" was not found in node '
+ 'template "%(ntpl)s".') %
+ {'prop': property_name,
+ 'ntpl': node_tpl.name}))
+ return None
+ return found[0]
+
+def _find_node_template(self, node_template_name):
+ if node_template_name == SELF:
+ return self.context
+ # enable the HOST value in the function
+ if node_template_name == HOST:
+ return self._find_host_containing_property()
+ if node_template_name == TARGET:
+ if not isinstance(self.context.type_definition, RelationshipType):
+ ValidationIssueCollector.appendException(
+ KeyError(_('"TARGET" keyword can only be used in context'
+ ' to "Relationships" target node')))
+ return
+ return self.context.target
+ if node_template_name == SOURCE:
+ if not isinstance(self.context.type_definition, RelationshipType):
+ ValidationIssueCollector.appendException(
+ KeyError(_('"SOURCE" keyword can only be used in context'
+ ' to "Relationships" source node')))
+ return
+ return self.context.source
+ if not hasattr(self.tosca_tpl, 'nodetemplates'):
+ return
+ for node_template in self.tosca_tpl.nodetemplates:
+ if node_template.name == node_template_name:
+ return node_template
+ ValidationIssueCollector.appendException(
+ KeyError(_(
+ 'Node template "{0}" was not found.'
+ ).format(node_template_name)))
+
+def _get_index_value(self, value, index):
+ if isinstance(value, list):
+ if index < len(value):
+ return value[index]
+ else:
+ ValidationIssueCollector.appendException(
+ KeyError(_(
+ "Property '{0}' found in capability '{1}'"
+ " referenced from node template {2}"
+ " must have an element with index {3}.").
+ format(self.args[2],
+ self.args[1],
+ self.context.name,
+ index)))
+ else:
+ ValidationIssueCollector.appendException(
+ KeyError(_(
+ "Property '{0}' found in capability '{1}'"
+ " referenced from node template {2}"
+ " must be a list.").format(self.args[2],
+ self.args[1],
+ self.context.name)))
+
+def _get_attribute_value(self, value, attibute):
+ if isinstance(value, dict):
+ if attibute in value:
+ return value[attibute]
+ else:
+ ValidationIssueCollector.appendException(
+ KeyError(_(
+ "Property '{0}' found in capability '{1}'"
+ " referenced from node template {2}"
+ " must have an attribute named {3}.").
+ format(self.args[2],
+ self.args[1],
+ self.context.name,
+ attibute)))
+ else:
+ ValidationIssueCollector.appendException(
+ KeyError(_(
+ "Property '{0}' found in capability '{1}'"
+ " referenced from node template {2}"
+ " must be a dict.").format(self.args[2],
+ self.args[1],
+ self.context.name)))
+
+# Add this functions similar to get_attribute case
+def _find_host_containing_property(self, node_template_name=SELF):
+ node_template = self._find_node_template(node_template_name)
+ hosted_on_rel = EntityType.TOSCA_DEF[HOSTED_ON]
+ for r in node_template.requirements:
+ for requirement, target_name in r.items():
+ target_node = self._find_node_template(target_name)
+ target_type = target_node.type_definition
+ for capability in target_type.get_capabilities_objects():
+ if capability.type in hosted_on_rel['valid_target_types']:
+ if self._property_exists_in_type(target_type):
+ return target_node
+ return self._find_host_containing_property(
+ target_name)
+ return None
+
+def _property_exists_in_type(self, type_definition):
+ props_def = type_definition.get_properties_def()
+ found = [props_def[self.args[1]]] \
+ if self.args[1] in props_def else []
+ return len(found) == 1
+
+def result(self):
+ if len(self.args) >= 3:
+ # First check if there is property with this name
+ node_tpl = self._find_node_template(self.args[0])
+ props = node_tpl.get_properties() if node_tpl else []
+ index = 2
+ found = [props[self.args[1]]] if self.args[1] in props else []
+ if found:
+ property_value = found[0].value
+ else:
+ index = 3
+ # then check the req or caps
+ property_value = self._find_req_or_cap_property(self.args[1],
+ self.args[2])
+ if len(self.args) > index:
+ for elem in self.args[index:]:
+ if isinstance(property_value, list):
+ int_elem = int(elem)
+ property_value = self._get_index_value(property_value,
+ int_elem)
+ else:
+ property_value = self._get_attribute_value(
+ property_value,
+ elem)
+ else:
+ property_value = self._find_property(self.args[1]).value
+ if isinstance(property_value, Function):
+ return property_value.result()
+ return get_function(self.tosca_tpl,
+ self.context,
+ property_value)
+
+@property
+def node_template_name(self):
+ return self.args[0]
+
+@property
+def property_name(self):
+ if len(self.args) > 2:
+ return self.args[2]
+ return self.args[1]
+
+@property
+def req_or_cap(self):
+ if len(self.args) > 2:
+ return self.args[1]
+ return None
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java
new file mode 100644
index 0000000..240ce85
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/functions/Token.java
@@ -0,0 +1,130 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.functions;
+
+import org.onap.sdc.toscaparser.api.TopologyTemplate;
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+
+import java.util.ArrayList;
+
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+public class Token extends Function {
+ // Validate the function and provide an instance of the function
+
+ //The token function is used within a TOSCA service template on a string to
+ //parse out (tokenize) substrings separated by one or more token characters
+ //within a larger string.
+
+ //Arguments:
+
+ //* The composite string that contains one or more substrings separated by
+ // token characters.
+ //* The string that contains one or more token characters that separate
+ // substrings within the composite string.
+ //* The integer indicates the index of the substring to return from the
+ // composite string. Note that the first substring is denoted by using
+ // the '0' (zero) integer value.
+
+ //Example:
+
+ // [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ]
+
+
+ public Token(TopologyTemplate ttpl, Object context, String name, ArrayList<Object> args) {
+ super(ttpl, context, name, args);
+ }
+
+ @Override
+ public Object result() {
+ return this;
+ }
+
+ @Override
+ void validate() {
+ if (args.size() < 3) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE180",
+ "ValueError: Invalid arguments for function \"token\". " +
+ "Expected at least three arguments"));
+ } else {
+ if (!(args.get(1) instanceof String) ||
+ ((String) args.get(1)).length() != 1) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE181",
+ "ValueError: Invalid arguments for function \"token\". " +
+ "Expected single char value as second argument"));
+ }
+ if (!(args.get(2) instanceof Integer)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE182",
+ "ValueError: Invalid arguments for function \"token\"" +
+ "Expected integer value as third argument"));
+ }
+ }
+ }
+
+}
+
+/*python
+
+class Token(Function):
+"""Validate the function and provide an instance of the function
+
+The token function is used within a TOSCA service template on a string to
+parse out (tokenize) substrings separated by one or more token characters
+within a larger string.
+
+
+Arguments:
+
+* The composite string that contains one or more substrings separated by
+ token characters.
+* The string that contains one or more token characters that separate
+ substrings within the composite string.
+* The integer indicates the index of the substring to return from the
+ composite string. Note that the first substring is denoted by using
+ the '0' (zero) integer value.
+
+Example:
+
+ [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ]
+
+"""
+
+def validate(self):
+ if len(self.args) < 3:
+ ValidationIssueCollector.appendException(
+ ValueError(_('Invalid arguments for function "{0}". Expected '
+ 'at least three arguments.').format(TOKEN)))
+ else:
+ if not isinstance(self.args[1], str) or len(self.args[1]) != 1:
+ ValidationIssueCollector.appendException(
+ ValueError(_('Invalid arguments for function "{0}". '
+ 'Expected single char value as second '
+ 'argument.').format(TOKEN)))
+
+ if not isinstance(self.args[2], int):
+ ValidationIssueCollector.appendException(
+ ValueError(_('Invalid arguments for function "{0}". '
+ 'Expected integer value as third '
+ 'argument.').format(TOKEN)))
+
+def result(self):
+ return self
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java
new file mode 100644
index 0000000..a34ebb5
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Annotation.java
@@ -0,0 +1,98 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.parameters;
+
+import org.onap.sdc.toscaparser.api.Property;
+import org.onap.sdc.toscaparser.api.elements.enums.ToscaElementNames;
+
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.Optional;
+import java.util.stream.Collectors;
+
+public class Annotation {
+
+ private static final String HEAT = "HEAT";
+ private String name;
+ private String type;
+ private ArrayList<Property> properties;
+
+
+ public Annotation() {
+ }
+
+ @SuppressWarnings("unchecked")
+ public Annotation(Map.Entry<String, Object> annotationEntry) {
+ if (annotationEntry != null) {
+ name = annotationEntry.getKey();
+ Map<String, Object> annValue = (Map<String, Object>) annotationEntry.getValue();
+ type = (String) annValue.get(ToscaElementNames.TYPE.getName());
+ properties = fetchProperties((Map<String, Object>) annValue.get(ToscaElementNames.PROPERTIES.getName()));
+ }
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public ArrayList<Property> getProperties() {
+ return properties;
+ }
+
+ public void setProperties(ArrayList<Property> properties) {
+ this.properties = properties;
+ }
+
+ private ArrayList<Property> fetchProperties(Map<String, Object> properties) {
+ if (properties != null) {
+ return (ArrayList<Property>) properties.entrySet().stream()
+ .map(Property::new)
+ .collect(Collectors.toList());
+ }
+ return null;
+ }
+
+ public boolean isHeatSourceType() {
+ if (properties == null) {
+ return false;
+ }
+ Optional<Property> sourceType = properties.stream()
+ .filter(p -> p.getName().equals(ToscaElementNames.SOURCE_TYPE.getName()))
+ .findFirst();
+ if (!sourceType.isPresent()) {
+ return false;
+ }
+ return sourceType.get().getValue() != null && ((String) sourceType.get().getValue()).equals(HEAT);
+ }
+
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java
new file mode 100644
index 0000000..5d3ecb4
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Input.java
@@ -0,0 +1,199 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.parameters;
+
+import org.onap.sdc.toscaparser.api.DataEntity;
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.elements.EntityType;
+import org.onap.sdc.toscaparser.api.elements.constraints.Constraint;
+import org.onap.sdc.toscaparser.api.elements.constraints.Schema;
+import org.onap.sdc.toscaparser.api.elements.enums.ToscaElementNames;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+public class Input {
+
+ private static final String TYPE = "type";
+ private static final String DESCRIPTION = "description";
+ private static final String DEFAULT = "default";
+ private static final String CONSTRAINTS = "constraints";
+ private static final String REQUIRED = "required";
+ private static final String STATUS = "status";
+ private static final String ENTRY_SCHEMA = "entry_schema";
+
+ public static final String INTEGER = "integer";
+ public static final String STRING = "string";
+ public static final String BOOLEAN = "boolean";
+ public static final String FLOAT = "float";
+ public static final String LIST = "list";
+ public static final String MAP = "map";
+ public static final String JSON = "json";
+
+ private static String[] inputField = {
+ TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED, STATUS, ENTRY_SCHEMA
+ };
+
+ private static String[] primitiveTypes = {
+ INTEGER, STRING, BOOLEAN, FLOAT, LIST, MAP, JSON
+ };
+
+ private String name;
+ private Schema schema;
+ private LinkedHashMap<String, Object> customDefs;
+ private Map<String, Annotation> annotations;
+
+ public Input() {
+ }
+
+ public Input(String name, LinkedHashMap<String, Object> schema, LinkedHashMap<String, Object> customDefinitions) {
+ this.name = name;
+ this.schema = new Schema(name, schema);
+ customDefs = customDefinitions;
+ }
+
+ @SuppressWarnings("unchecked")
+ public void parseAnnotations() {
+ if (schema.getSchema() != null) {
+ LinkedHashMap<String, Object> annotations = (LinkedHashMap<String, Object>) schema.getSchema().get(ToscaElementNames.ANNOTATIONS.getName());
+ if (annotations != null) {
+ setAnnotations(annotations.entrySet().stream()
+ .map(Annotation::new)
+ .filter(Annotation::isHeatSourceType)
+ .collect(Collectors.toMap(Annotation::getName, a -> a)));
+ }
+ }
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public String getType() {
+ return schema.getType();
+ }
+
+ public String getDescription() {
+ return schema.getDescription();
+ }
+
+ public boolean isRequired() {
+ return schema.isRequired();
+ }
+
+ public Object getDefault() {
+ return schema.getDefault();
+ }
+
+ public ArrayList<Constraint> getConstraints() {
+ return schema.getConstraints();
+ }
+
+ public void validate(Object value) {
+ validateField();
+ validateType(getType());
+ if (value != null) {
+ validateValue(value);
+ }
+ }
+
+ private void validateField() {
+ for (String key : schema.getSchema().keySet()) {
+ boolean bFound = false;
+ for (String ifld : inputField) {
+ if (key.equals(ifld)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE214", String.format(
+ "UnknownFieldError: Input \"%s\" contains unknown field \"%s\"",
+ name, key)));
+ }
+ }
+ }
+
+ private void validateType(String inputType) {
+ boolean bFound = false;
+ for (String pt : Schema.PROPERTY_TYPES) {
+ if (pt.equals(inputType)) {
+ bFound = true;
+ break;
+ }
+ }
+
+ if (!bFound) {
+ if (customDefs.get(inputType) != null) {
+ bFound = true;
+ }
+ }
+
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE215", String.format(
+ "ValueError: Invalid type \"%s\"", inputType)));
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void validateValue(Object value) {
+ Object datatype;
+ if (EntityType.TOSCA_DEF.get(getType()) != null) {
+ datatype = EntityType.TOSCA_DEF.get(getType());
+ } else if (EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()) != null) {
+ datatype = EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType());
+ }
+
+ String type = getType();
+ // if it's one of the basic types DON'T look in customDefs
+ if (Arrays.asList(primitiveTypes).contains(type)) {
+ DataEntity.validateDatatype(getType(), value, null, customDefs, null);
+ return;
+ } else if (customDefs.get(getType()) != null) {
+ datatype = customDefs.get(getType());
+ DataEntity.validateDatatype(getType(), value, (LinkedHashMap<String, Object>) datatype, customDefs, null);
+ return;
+ }
+
+ DataEntity.validateDatatype(getType(), value, null, customDefs, null);
+ }
+
+ public Map<String, Annotation> getAnnotations() {
+ return annotations;
+ }
+
+ private void setAnnotations(Map<String, Annotation> annotations) {
+ this.annotations = annotations;
+ }
+
+ public void resetAnnotaions() {
+ annotations = null;
+ }
+
+ public LinkedHashMap<String, Object> getEntrySchema() {
+ return schema.getEntrySchema();
+ }
+
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java
new file mode 100644
index 0000000..8ef82b3
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/parameters/Output.java
@@ -0,0 +1,129 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.parameters;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+
+import java.util.LinkedHashMap;
+
+public class Output {
+
+ private static final String DESCRIPTION = "description";
+ public static final String VALUE = "value";
+ private static final String[] OUTPUT_FIELD = {DESCRIPTION, VALUE};
+
+ private String name;
+ private LinkedHashMap<String, Object> attributes;
+
+ public Output(String name, LinkedHashMap<String, Object> attributes) {
+ this.name = name;
+ this.attributes = attributes;
+ }
+
+ public String getDescription() {
+ return (String) attributes.get(DESCRIPTION);
+ }
+
+ public Object getValue() {
+ return attributes.get(VALUE);
+ }
+
+ public void validate() {
+ validateField();
+ }
+
+ private void validateField() {
+ if (attributes == null) {
+ //TODO wrong error message...
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE216", String.format(
+ "ValidationError: Output \"%s\" has wrong type. Expecting a dict",
+ name)));
+ }
+
+ if (getValue() == null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE217", String.format(
+ "MissingRequiredFieldError: Output \"%s\" is missing required \"%s\"",
+ name, VALUE)));
+ }
+ for (String key : attributes.keySet()) {
+ boolean bFound = false;
+ for (String of : OUTPUT_FIELD) {
+ if (key.equals(of)) {
+ bFound = true;
+ break;
+ }
+ }
+ if (!bFound) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE218", String.format(
+ "UnknownFieldError: Output \"%s\" contains unknown field \"%s\"",
+ name, key)));
+ }
+ }
+ }
+
+ // getter/setter
+
+ public String getName() {
+ return name;
+ }
+
+ public void setAttr(String name, Object value) {
+ attributes.put(name, value);
+ }
+}
+
+/*python
+
+class Output(object):
+
+ OUTPUT_FIELD = (DESCRIPTION, VALUE) = ('description', 'value')
+
+ def __init__(self, name, attributes):
+ self.name = name
+ self.attributes = attributes
+
+ @property
+ def description(self):
+ return self.attributes.get(self.DESCRIPTION)
+
+ @property
+ def value(self):
+ return self.attributes.get(self.VALUE)
+
+ def validate(self):
+ self._validate_field()
+
+ def _validate_field(self):
+ if not isinstance(self.attributes, dict):
+ ValidationIssueCollector.appendException(
+ MissingRequiredFieldError(what='Output "%s"' % self.name,
+ required=self.VALUE))
+ if self.value is None:
+ ValidationIssueCollector.appendException(
+ MissingRequiredFieldError(what='Output "%s"' % self.name,
+ required=self.VALUE))
+ for name in self.attributes:
+ if name not in self.OUTPUT_FIELD:
+ ValidationIssueCollector.appendException(
+ UnknownFieldError(what='Output "%s"' % self.name,
+ field=name))
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java
new file mode 100644
index 0000000..4ada267
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/prereq/CSAR.java
@@ -0,0 +1,790 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.prereq;
+
+import org.onap.sdc.toscaparser.api.ImportsLoader;
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+import org.onap.sdc.toscaparser.api.utils.ThreadLocalsHolder;
+import org.onap.sdc.toscaparser.api.utils.UrlUtils;
+
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.RandomAccessFile;
+import java.net.URL;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.nio.file.StandardCopyOption;
+import java.util.*;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipFile;
+import java.util.zip.ZipInputStream;
+
+import org.onap.sdc.toscaparser.api.common.JToscaException;
+import org.onap.sdc.toscaparser.api.utils.JToscaErrorCodes;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+
+public class CSAR {
+
+ private static Logger log = LoggerFactory.getLogger(CSAR.class.getName());
+ private static final ArrayList<String> META_PROPERTIES_FILES = new ArrayList<>(Arrays.asList("TOSCA-Metadata/TOSCA.meta", "csar.meta"));
+
+ private String path;
+ private boolean isFile;
+ private boolean isValidated;
+ private boolean errorCaught;
+ private String csar;
+ private String tempDir;
+ // private Metadata metaData;
+ private File tempFile;
+ private LinkedHashMap<String, LinkedHashMap<String, Object>> metaProperties;
+
+ public CSAR(String csarPath, boolean aFile) {
+ path = csarPath;
+ isFile = aFile;
+ isValidated = false;
+ errorCaught = false;
+ csar = null;
+ tempDir = null;
+ tempFile = null;
+ metaProperties = new LinkedHashMap<>();
+ }
+
+ public boolean validate() throws JToscaException {
+ isValidated = true;
+
+ //validate that the file or URL exists
+
+ if (isFile) {
+ File f = new File(path);
+ if (!f.isFile()) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE220", String.format("\"%s\" is not a file", path)));
+ return false;
+ } else {
+ this.csar = path;
+ }
+ } else {
+ if (!UrlUtils.validateUrl(path)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE221", String.format("ImportError: \"%s\" does not exist", path)));
+ return false;
+ }
+ // get it to a local file
+ try {
+ File tempFile = File.createTempFile("csartmp", ".csar");
+ Path ptf = Paths.get(tempFile.getPath());
+ URL webfile = new URL(path);
+ InputStream in = webfile.openStream();
+ Files.copy(in, ptf, StandardCopyOption.REPLACE_EXISTING);
+ } catch (Exception e) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE222", "ImportError: failed to load CSAR from " + path));
+ return false;
+ }
+
+ log.debug("CSAR - validate - currently only files are supported");
+ return false;
+ }
+
+ _parseAndValidateMetaProperties();
+
+ if (errorCaught) {
+ return false;
+ }
+
+ // validate that external references in the main template actually exist and are accessible
+ _validateExternalReferences();
+
+ return !errorCaught;
+
+ }
+
+ private void _parseAndValidateMetaProperties() throws JToscaException {
+
+ ZipFile zf = null;
+
+ try {
+
+ // validate that it is a valid zip file
+ RandomAccessFile raf = new RandomAccessFile(csar, "r");
+ long n = raf.readInt();
+ raf.close();
+ // check if Zip's magic number
+ if (n != 0x504B0304) {
+ String errorString = String.format("\"%s\" is not a valid zip file", csar);
+ log.error(errorString);
+ throw new JToscaException(errorString, JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue());
+ }
+
+ // validate that it contains the metadata file in the correct location
+ zf = new ZipFile(csar);
+ ZipEntry ze = zf.getEntry("TOSCA-Metadata/TOSCA.meta");
+ if (ze == null) {
+
+ String errorString = String.format(
+ "\"%s\" is not a valid CSAR as it does not contain the " +
+ "required file \"TOSCA.meta\" in the folder \"TOSCA-Metadata\"", csar);
+ log.error(errorString);
+ throw new JToscaException(errorString, JToscaErrorCodes.MISSING_META_FILE.getValue());
+ }
+
+ //Going over expected metadata files and parsing them
+ for (String metaFile : META_PROPERTIES_FILES) {
+
+ byte ba[] = new byte[4096];
+ ze = zf.getEntry(metaFile);
+ if (ze != null) {
+ InputStream inputStream = zf.getInputStream(ze);
+ n = inputStream.read(ba, 0, 4096);
+ String md = new String(ba);
+ md = md.substring(0, (int) n);
+
+ String errorString = String.format(
+ "The file \"%s\" in the" +
+ " CSAR \"%s\" does not contain valid YAML content", ze.getName(), csar);
+
+ try {
+ Yaml yaml = new Yaml();
+ Object mdo = yaml.load(md);
+ if (!(mdo instanceof LinkedHashMap)) {
+ log.error(errorString);
+ throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue());
+ }
+
+ String[] split = ze.getName().split("/");
+ String fileName = split[split.length - 1];
+
+ if (!metaProperties.containsKey(fileName)) {
+ metaProperties.put(fileName, (LinkedHashMap<String, Object>) mdo);
+ }
+ } catch (Exception e) {
+ log.error(errorString);
+ throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue());
+ }
+ }
+ }
+
+ // verify it has "Entry-Definition"
+ String edf = _getMetadata("Entry-Definitions");
+ if (edf == null) {
+ String errorString = String.format(
+ "The CSAR \"%s\" is missing the required metadata " +
+ "\"Entry-Definitions\" in \"TOSCA-Metadata/TOSCA.meta\"", csar);
+ log.error(errorString);
+ throw new JToscaException(errorString, JToscaErrorCodes.ENTRY_DEFINITION_NOT_DEFINED.getValue());
+ }
+
+ //validate that "Entry-Definitions' metadata value points to an existing file in the CSAR
+ boolean foundEDF = false;
+ Enumeration<? extends ZipEntry> entries = zf.entries();
+ while (entries.hasMoreElements()) {
+ ze = entries.nextElement();
+ if (ze.getName().equals(edf)) {
+ foundEDF = true;
+ break;
+ }
+ }
+ if (!foundEDF) {
+ String errorString = String.format(
+ "The \"Entry-Definitions\" file defined in the CSAR \"%s\" does not exist", csar);
+ log.error(errorString);
+ throw new JToscaException(errorString, JToscaErrorCodes.MISSING_ENTRY_DEFINITION_FILE.getValue());
+ }
+ } catch (JToscaException e) {
+ //ThreadLocalsHolder.getCollector().appendCriticalException(e.getMessage());
+ throw e;
+ } catch (Exception e) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE223", "ValidationError: " + e.getMessage()));
+ errorCaught = true;
+ }
+
+ try {
+ if (zf != null) {
+ zf.close();
+ }
+ } catch (IOException e) {
+ }
+ }
+
+ public void cleanup() {
+ try {
+ if (tempFile != null) {
+ tempFile.delete();
+ }
+ } catch (Exception e) {
+ }
+ }
+
+ private String _getMetadata(String key) throws JToscaException {
+ if (!isValidated) {
+ validate();
+ }
+ Object value = _getMetaProperty("TOSCA.meta").get(key);
+ return value != null ? value.toString() : null;
+ }
+
+ public String getAuthor() throws JToscaException {
+ return _getMetadata("Created-By");
+ }
+
+ public String getVersion() throws JToscaException {
+ return _getMetadata("CSAR-Version");
+ }
+
+ public LinkedHashMap<String, LinkedHashMap<String, Object>> getMetaProperties() {
+ return metaProperties;
+ }
+
+ private LinkedHashMap<String, Object> _getMetaProperty(String propertiesFile) {
+ return metaProperties.get(propertiesFile);
+ }
+
+ public String getMainTemplate() throws JToscaException {
+ String entryDef = _getMetadata("Entry-Definitions");
+ ZipFile zf;
+ boolean ok = false;
+ try {
+ zf = new ZipFile(path);
+ ok = (zf.getEntry(entryDef) != null);
+ zf.close();
+ } catch (IOException e) {
+ if (!ok) {
+ log.error("CSAR - getMainTemplate - failed to open {}", path);
+ }
+ }
+ if (ok) {
+ return entryDef;
+ } else {
+ return null;
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public LinkedHashMap<String, Object> getMainTemplateYaml() throws JToscaException {
+ String mainTemplate = tempDir + File.separator + getMainTemplate();
+ if (mainTemplate != null) {
+ try (InputStream input = new FileInputStream(new File(mainTemplate));) {
+ Yaml yaml = new Yaml();
+ Object data = yaml.load(input);
+ if (!(data instanceof LinkedHashMap)) {
+ throw new IOException();
+ }
+ return (LinkedHashMap<String, Object>) data;
+ } catch (Exception e) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE224", String.format(
+ "The file \"%s\" in the CSAR \"%s\" does not " +
+ "contain valid TOSCA YAML content",
+ mainTemplate, csar)));
+ }
+ }
+ return null;
+ }
+
+ public String getDescription() throws JToscaException {
+ String desc = _getMetadata("Description");
+ if (desc != null) {
+ return desc;
+ }
+
+ Map<String, Object> metaData = metaProperties.get("TOSCA.meta");
+ metaData.put("Description", getMainTemplateYaml().get("description"));
+ return _getMetadata("Description");
+ }
+
+ public String getTempDir() {
+ return tempDir;
+ }
+
+ public void decompress() throws IOException, JToscaException {
+ if (!isValidated) {
+ validate();
+ }
+
+ if (tempDir == null || tempDir.isEmpty()) {
+ tempDir = Files.createTempDirectory("JTP").toString();
+ unzip(path, tempDir);
+ }
+ }
+
+ private void _validateExternalReferences() throws JToscaException {
+ // Extracts files referenced in the main template
+ // These references are currently supported:
+ // * imports
+ // * interface implementations
+ // * artifacts
+ try {
+ decompress();
+ String mainTplFile = getMainTemplate();
+ if (mainTplFile == null) {
+ return;
+ }
+
+ LinkedHashMap<String, Object> mainTpl = getMainTemplateYaml();
+ if (mainTpl.get("imports") != null) {
+ // this loads the imports
+ ImportsLoader il = new ImportsLoader((ArrayList<Object>) mainTpl.get("imports"),
+ tempDir + File.separator + mainTplFile,
+ (Object) null,
+ (LinkedHashMap<String, Object>) null);
+ }
+
+ if (mainTpl.get("topology_template") != null) {
+ LinkedHashMap<String, Object> topologyTemplate =
+ (LinkedHashMap<String, Object>) mainTpl.get("topology_template");
+
+ if (topologyTemplate.get("node_templates") != null) {
+ LinkedHashMap<String, Object> nodeTemplates =
+ (LinkedHashMap<String, Object>) topologyTemplate.get("node_templates");
+ for (String nodeTemplateKey : nodeTemplates.keySet()) {
+ LinkedHashMap<String, Object> nodeTemplate =
+ (LinkedHashMap<String, Object>) nodeTemplates.get(nodeTemplateKey);
+ if (nodeTemplate.get("artifacts") != null) {
+ LinkedHashMap<String, Object> artifacts =
+ (LinkedHashMap<String, Object>) nodeTemplate.get("artifacts");
+ for (String artifactKey : artifacts.keySet()) {
+ Object artifact = artifacts.get(artifactKey);
+ if (artifact instanceof String) {
+ _validateExternalReference(mainTplFile, (String) artifact, true);
+ } else if (artifact instanceof LinkedHashMap) {
+ String file = (String) ((LinkedHashMap<String, Object>) artifact).get("file");
+ if (file != null) {
+ _validateExternalReference(mainTplFile, file, true);
+ }
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE225", String.format(
+ "ValueError: Unexpected artifact definition for \"%s\"",
+ artifactKey)));
+ errorCaught = true;
+ }
+ }
+ }
+ if (nodeTemplate.get("interfaces") != null) {
+ LinkedHashMap<String, Object> interfaces =
+ (LinkedHashMap<String, Object>) nodeTemplate.get("interfaces");
+ for (String interfaceKey : interfaces.keySet()) {
+ LinkedHashMap<String, Object> _interface =
+ (LinkedHashMap<String, Object>) interfaces.get(interfaceKey);
+ for (String operationKey : _interface.keySet()) {
+ Object operation = _interface.get(operationKey);
+ if (operation instanceof String) {
+ _validateExternalReference(mainTplFile, (String) operation, false);
+ } else if (operation instanceof LinkedHashMap) {
+ String imp = (String) ((LinkedHashMap<String, Object>) operation).get("implementation");
+ if (imp != null) {
+ _validateExternalReference(mainTplFile, imp, true);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ } catch (IOException e) {
+ errorCaught = true;
+ } finally {
+ // delete tempDir (only here?!?)
+ File fdir = new File(tempDir);
+ deleteDir(fdir);
+ tempDir = null;
+ }
+ }
+
+ public static void deleteDir(File fdir) {
+ try {
+ if (fdir.isDirectory()) {
+ for (File c : fdir.listFiles())
+ deleteDir(c);
+ }
+ fdir.delete();
+ } catch (Exception e) {
+ }
+ }
+
+ private void _validateExternalReference(String tplFile, String resourceFile, boolean raiseExc) {
+ // Verify that the external resource exists
+
+ // If resource_file is a URL verify that the URL is valid.
+ // If resource_file is a relative path verify that the path is valid
+ // considering base folder (self.temp_dir) and tpl_file.
+ // Note that in a CSAR resource_file cannot be an absolute path.
+ if (UrlUtils.validateUrl(resourceFile)) {
+ String msg = String.format("URLException: The resource at \"%s\" cannot be accessed", resourceFile);
+ try {
+ if (UrlUtils.isUrlAccessible(resourceFile)) {
+ return;
+ } else {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE226", msg));
+ errorCaught = true;
+ }
+ } catch (Exception e) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE227", msg));
+ }
+ }
+
+ String dirPath = Paths.get(tplFile).getParent().toString();
+ String filePath = tempDir + File.separator + dirPath + File.separator + resourceFile;
+ File f = new File(filePath);
+ if (f.isFile()) {
+ return;
+ }
+
+ if (raiseExc) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE228", String.format(
+ "ValueError: The resource \"%s\" does not exist", resourceFile)));
+ }
+ errorCaught = true;
+ }
+
+ private void unzip(String zipFilePath, String destDirectory) throws IOException {
+ File destDir = new File(destDirectory);
+ if (!destDir.exists()) {
+ destDir.mkdir();
+ }
+
+ try (ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFilePath));) {
+ ZipEntry entry = zipIn.getNextEntry();
+ // iterates over entries in the zip file
+ while (entry != null) {
+ // create all directories needed for nested items
+ String[] parts = entry.getName().split("/");
+ String s = destDirectory + File.separator;
+ for (int i = 0; i < parts.length - 1; i++) {
+ s += parts[i];
+ File idir = new File(s);
+ if (!idir.exists()) {
+ idir.mkdir();
+ }
+ s += File.separator;
+ }
+ String filePath = destDirectory + File.separator + entry.getName();
+ if (!entry.isDirectory()) {
+ // if the entry is a file, extracts it
+ extractFile(zipIn, filePath);
+ } else {
+ // if the entry is a directory, make the directory
+ File dir = new File(filePath);
+ dir.mkdir();
+ }
+ zipIn.closeEntry();
+ entry = zipIn.getNextEntry();
+ }
+ }
+ }
+
+ /**
+ * Extracts a zip entry (file entry)
+ *
+ * @param zipIn
+ * @param filePath
+ * @throws IOException
+ */
+ private static final int BUFFER_SIZE = 4096;
+
+ private void extractFile(ZipInputStream zipIn, String filePath) throws IOException {
+ //BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath));
+ try (FileOutputStream fos = new FileOutputStream(filePath);
+ BufferedOutputStream bos = new BufferedOutputStream(fos);) {
+ byte[] bytesIn = new byte[BUFFER_SIZE];
+ int read = 0;
+ while ((read = zipIn.read(bytesIn)) != -1) {
+ bos.write(bytesIn, 0, read);
+ }
+ }
+ }
+
+}
+
+/*python
+
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import URLException
+from toscaparser.common.exception import ValidationError
+from toscaparser.imports import ImportsLoader
+from toscaparser.utils.gettextutils import _
+from toscaparser.utils.urlutils import UrlUtils
+
+try: # Python 2.x
+ from BytesIO import BytesIO
+except ImportError: # Python 3.x
+ from io import BytesIO
+
+
+class CSAR(object):
+
+ def __init__(self, csar_file, a_file=True):
+ self.path = csar_file
+ self.a_file = a_file
+ self.is_validated = False
+ self.error_caught = False
+ self.csar = None
+ self.temp_dir = None
+
+ def validate(self):
+ """Validate the provided CSAR file."""
+
+ self.is_validated = True
+
+ # validate that the file or URL exists
+ missing_err_msg = (_('"%s" does not exist.') % self.path)
+ if self.a_file:
+ if not os.path.isfile(self.path):
+ ValidationIssueCollector.appendException(
+ ValidationError(message=missing_err_msg))
+ return False
+ else:
+ self.csar = self.path
+ else: # a URL
+ if not UrlUtils.validate_url(self.path):
+ ValidationIssueCollector.appendException(
+ ValidationError(message=missing_err_msg))
+ return False
+ else:
+ response = requests.get(self.path)
+ self.csar = BytesIO(response.content)
+
+ # validate that it is a valid zip file
+ if not zipfile.is_zipfile(self.csar):
+ err_msg = (_('"%s" is not a valid zip file.') % self.path)
+ ValidationIssueCollector.appendException(
+ ValidationError(message=err_msg))
+ return False
+
+ # validate that it contains the metadata file in the correct location
+ self.zfile = zipfile.ZipFile(self.csar, 'r')
+ filelist = self.zfile.namelist()
+ if 'TOSCA-Metadata/TOSCA.meta' not in filelist:
+ err_msg = (_('"%s" is not a valid CSAR as it does not contain the '
+ 'required file "TOSCA.meta" in the folder '
+ '"TOSCA-Metadata".') % self.path)
+ ValidationIssueCollector.appendException(
+ ValidationError(message=err_msg))
+ return False
+
+ # validate that 'Entry-Definitions' property exists in TOSCA.meta
+ data = self.zfile.read('TOSCA-Metadata/TOSCA.meta')
+ invalid_yaml_err_msg = (_('The file "TOSCA-Metadata/TOSCA.meta" in '
+ 'the CSAR "%s" does not contain valid YAML '
+ 'content.') % self.path)
+ try:
+ meta = yaml.load(data)
+ if type(meta) is dict:
+ self.metadata = meta
+ else:
+ ValidationIssueCollector.appendException(
+ ValidationError(message=invalid_yaml_err_msg))
+ return False
+ except yaml.YAMLError:
+ ValidationIssueCollector.appendException(
+ ValidationError(message=invalid_yaml_err_msg))
+ return False
+
+ if 'Entry-Definitions' not in self.metadata:
+ err_msg = (_('The CSAR "%s" is missing the required metadata '
+ '"Entry-Definitions" in '
+ '"TOSCA-Metadata/TOSCA.meta".')
+ % self.path)
+ ValidationIssueCollector.appendException(
+ ValidationError(message=err_msg))
+ return False
+
+ # validate that 'Entry-Definitions' metadata value points to an
+ # existing file in the CSAR
+ entry = self.metadata.get('Entry-Definitions')
+ if entry and entry not in filelist:
+ err_msg = (_('The "Entry-Definitions" file defined in the '
+ 'CSAR "%s" does not exist.') % self.path)
+ ValidationIssueCollector.appendException(
+ ValidationError(message=err_msg))
+ return False
+
+ # validate that external references in the main template actually
+ # exist and are accessible
+ self._validate_external_references()
+ return not self.error_caught
+
+ def get_metadata(self):
+ """Return the metadata dictionary."""
+
+ # validate the csar if not already validated
+ if not self.is_validated:
+ self.validate()
+
+ # return a copy to avoid changes overwrite the original
+ return dict(self.metadata) if self.metadata else None
+
+ def _get_metadata(self, key):
+ if not self.is_validated:
+ self.validate()
+ return self.metadata.get(key)
+
+ def get_author(self):
+ return self._get_metadata('Created-By')
+
+ def get_version(self):
+ return self._get_metadata('CSAR-Version')
+
+ def get_main_template(self):
+ entry_def = self._get_metadata('Entry-Definitions')
+ if entry_def in self.zfile.namelist():
+ return entry_def
+
+ def get_main_template_yaml(self):
+ main_template = self.get_main_template()
+ if main_template:
+ data = self.zfile.read(main_template)
+ invalid_tosca_yaml_err_msg = (
+ _('The file "%(template)s" in the CSAR "%(csar)s" does not '
+ 'contain valid TOSCA YAML content.') %
+ {'template': main_template, 'csar': self.path})
+ try:
+ tosca_yaml = yaml.load(data)
+ if type(tosca_yaml) is not dict:
+ ValidationIssueCollector.appendException(
+ ValidationError(message=invalid_tosca_yaml_err_msg))
+ return tosca_yaml
+ except Exception:
+ ValidationIssueCollector.appendException(
+ ValidationError(message=invalid_tosca_yaml_err_msg))
+
+ def get_description(self):
+ desc = self._get_metadata('Description')
+ if desc is not None:
+ return desc
+
+ self.metadata['Description'] = \
+ self.get_main_template_yaml().get('description')
+ return self.metadata['Description']
+
+ def decompress(self):
+ if not self.is_validated:
+ self.validate()
+ self.temp_dir = tempfile.NamedTemporaryFile().name
+ with zipfile.ZipFile(self.csar, "r") as zf:
+ zf.extractall(self.temp_dir)
+
+ def _validate_external_references(self):
+ """Extracts files referenced in the main template
+
+ These references are currently supported:
+ * imports
+ * interface implementations
+ * artifacts
+ """
+ try:
+ self.decompress()
+ main_tpl_file = self.get_main_template()
+ if not main_tpl_file:
+ return
+ main_tpl = self.get_main_template_yaml()
+
+ if 'imports' in main_tpl:
+ ImportsLoader(main_tpl['imports'],
+ os.path.join(self.temp_dir, main_tpl_file))
+
+ if 'topology_template' in main_tpl:
+ topology_template = main_tpl['topology_template']
+
+ if 'node_templates' in topology_template:
+ node_templates = topology_template['node_templates']
+
+ for node_template_key in node_templates:
+ node_template = node_templates[node_template_key]
+ if 'artifacts' in node_template:
+ artifacts = node_template['artifacts']
+ for artifact_key in artifacts:
+ artifact = artifacts[artifact_key]
+ if isinstance(artifact, six.string_types):
+ self._validate_external_reference(
+ main_tpl_file,
+ artifact)
+ elif isinstance(artifact, dict):
+ if 'file' in artifact:
+ self._validate_external_reference(
+ main_tpl_file,
+ artifact['file'])
+ else:
+ ValidationIssueCollector.appendException(
+ ValueError(_('Unexpected artifact '
+ 'definition for "%s".')
+ % artifact_key))
+ self.error_caught = True
+ if 'interfaces' in node_template:
+ interfaces = node_template['interfaces']
+ for interface_key in interfaces:
+ interface = interfaces[interface_key]
+ for opertation_key in interface:
+ operation = interface[opertation_key]
+ if isinstance(operation, six.string_types):
+ self._validate_external_reference(
+ main_tpl_file,
+ operation,
+ False)
+ elif isinstance(operation, dict):
+ if 'implementation' in operation:
+ self._validate_external_reference(
+ main_tpl_file,
+ operation['implementation'])
+ finally:
+ if self.temp_dir:
+ shutil.rmtree(self.temp_dir)
+
+ def _validate_external_reference(self, tpl_file, resource_file,
+ raise_exc=True):
+ """Verify that the external resource exists
+
+ If resource_file is a URL verify that the URL is valid.
+ If resource_file is a relative path verify that the path is valid
+ considering base folder (self.temp_dir) and tpl_file.
+ Note that in a CSAR resource_file cannot be an absolute path.
+ """
+ if UrlUtils.validate_url(resource_file):
+ msg = (_('The resource at "%s" cannot be accessed.') %
+ resource_file)
+ try:
+ if UrlUtils.url_accessible(resource_file):
+ return
+ else:
+ ValidationIssueCollector.appendException(
+ URLException(what=msg))
+ self.error_caught = True
+ except Exception:
+ ValidationIssueCollector.appendException(
+ URLException(what=msg))
+ self.error_caught = True
+
+ if os.path.isfile(os.path.join(self.temp_dir,
+ os.path.dirname(tpl_file),
+ resource_file)):
+ return
+
+ if raise_exc:
+ ValidationIssueCollector.appendException(
+ ValueError(_('The resource "%s" does not exist.')
+ % resource_file))
+ self.error_caught = True
+*/
+
+
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java
new file mode 100644
index 0000000..237b738
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/CopyUtils.java
@@ -0,0 +1,50 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.utils;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public class CopyUtils {
+
+ private CopyUtils() {
+ }
+
+ @SuppressWarnings("unchecked")
+ public static Object copyLhmOrAl(Object src) {
+ if (src instanceof LinkedHashMap) {
+ LinkedHashMap<String, Object> dst = new LinkedHashMap<String, Object>();
+ for (Map.Entry<String, Object> me : ((LinkedHashMap<String, Object>) src).entrySet()) {
+ dst.put(me.getKey(), me.getValue());
+ }
+ return dst;
+ } else if (src instanceof ArrayList) {
+ ArrayList<Object> dst = new ArrayList<Object>();
+ for (Object o : (ArrayList<Object>) src) {
+ dst.add(o);
+ }
+ return dst;
+ } else {
+ return null;
+ }
+ }
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java
new file mode 100644
index 0000000..158a3e1
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/DumpUtils.java
@@ -0,0 +1,68 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.utils;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public class DumpUtils {
+
+ @SuppressWarnings("unchecked")
+ private static void dumpYaml(Object yo, int level) {
+ final String indent = " ";
+ try {
+ if (yo == null) {
+ System.out.println("<null>");
+ return;
+ }
+ String cname = yo.getClass().getSimpleName();
+ System.out.print(cname);
+ if (cname.equals("LinkedHashMap")) {
+ LinkedHashMap<String, Object> lhm = (LinkedHashMap<String, Object>) yo;
+ System.out.println();
+ for (Map.Entry<String, Object> me : lhm.entrySet()) {
+ System.out.print(indent.substring(0, level) + me.getKey() + ": ");
+ dumpYaml(me.getValue(), level + 2);
+ }
+ } else if (cname.equals("ArrayList")) {
+ ArrayList<Object> al = (ArrayList<Object>) yo;
+ System.out.println();
+ for (int i = 0; i < al.size(); i++) {
+ System.out.format("%s[%d] ", indent.substring(0, level), i);
+ dumpYaml(al.get(i), level + 2);
+ }
+ } else if (cname.equals("String")) {
+ System.out.println(" ==> \"" + (String) yo + "\"");
+ } else if (cname.equals("Integer")) {
+ System.out.println(" ==> " + (int) yo);
+ } else if (cname.equals("Boolean")) {
+ System.out.println(" ==> " + (boolean) yo);
+ } else if (cname.equals("Double")) {
+ System.out.println(" ==> " + (double) yo);
+ } else {
+ System.out.println(" !! unexpected type");
+ }
+ } catch (Exception e) {
+ System.out.println("Exception!! " + e.getMessage());
+ }
+ }
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java
new file mode 100644
index 0000000..3849ce0
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/JToscaErrorCodes.java
@@ -0,0 +1,52 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.utils;
+
+
+public enum JToscaErrorCodes {
+ MISSING_META_FILE("JE1001"),
+ INVALID_META_YAML_CONTENT("JE1002"),
+ ENTRY_DEFINITION_NOT_DEFINED("JE1003"),
+ MISSING_ENTRY_DEFINITION_FILE("JE1004"),
+ GENERAL_ERROR("JE1005"),
+ PATH_NOT_VALID("JE1006"),
+ CSAR_TOSCA_VALIDATION_ERROR("JE1007"),
+ INVALID_CSAR_FORMAT("JE1008");
+
+ private String value;
+
+ JToscaErrorCodes(String value) {
+ this.value = value;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ public static JToscaErrorCodes getByCode(String code) {
+ for (JToscaErrorCodes v : values()) {
+ if (v.getValue().equals(code)) {
+ return v;
+ }
+ }
+ return null;
+ }
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java
new file mode 100644
index 0000000..a753d62
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/TOSCAVersionProperty.java
@@ -0,0 +1,209 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.utils;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+// test with functions/test_concat.yaml
+public class TOSCAVersionProperty {
+
+ private String version;
+
+ private static final String VERSION_RE =
+ "^(?<gMajorVersion>([0-9][0-9]*))"
+ + "(\\.(?<gMinorVersion>([0-9][0-9]*)))?"
+ + "(\\.(?<gFixVersion>([0-9][0-9]*)))?"
+ + "(\\.(?<gQualifier>([0-9A-Za-z]+)))?"
+ + "(\\-(?<gBuildVersion>[0-9])*)?$";
+
+ private String minorVersion = null;
+ private String majorVersion = null;
+ private String fixVersion = null;
+ private String qualifier = null;
+ private String buildVersion = null;
+
+
+ public TOSCAVersionProperty(String version) {
+
+ if (version.equals("0") || version.equals("0.0") || version.equals("0.0.0")) {
+ return;
+ }
+
+ Pattern pattern = Pattern.compile(VERSION_RE);
+ Matcher matcher = pattern.matcher(version);
+ if (!matcher.find()) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(
+ new JToscaValidationIssue(
+ "JE252",
+ "InvalidTOSCAVersionPropertyException: "
+ + "Value of TOSCA version property \"" + version + "\" is invalid"
+ ));
+ return;
+ }
+ minorVersion = matcher.group("gMinorVersion");
+ majorVersion = matcher.group("gMajorVersion");
+ fixVersion = matcher.group("gFixVersion");
+ qualifier = validateQualifier(matcher.group("gQualifier"));
+ buildVersion = validateBuild(matcher.group("gBuildVersion"));
+ validateMajorVersion(majorVersion);
+
+ this.version = version;
+
+ }
+
+ private String validateMajorVersion(String value) {
+ // Validate major version
+
+ // Checks if only major version is provided and assumes
+ // minor version as 0.
+ // Eg: If version = 18, then it returns version = '18.0'
+
+ if (minorVersion == null && buildVersion == null && !value.equals("0")) {
+ //log.warning(_('Minor version assumed "0".'))
+ version = version + "0";
+ }
+ return value;
+ }
+
+ private String validateQualifier(String value) {
+ // Validate qualifier
+
+ // TOSCA version is invalid if a qualifier is present without the
+ // fix version or with all of major, minor and fix version 0s.
+
+ // For example, the following versions are invalid
+ // 18.0.abc
+ // 0.0.0.abc
+
+ if ((fixVersion == null && value != null) || (minorVersion.equals("0") && majorVersion.equals("0")
+ && fixVersion.equals("0") && value != null)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(
+ new JToscaValidationIssue(
+ "JE253",
+ "InvalidTOSCAVersionPropertyException: Value of TOSCA version property \""
+ + version
+ + "\" is invalid"
+ ));
+ }
+ return value;
+ }
+
+ private String validateBuild(String value) {
+ // Validate build version
+
+ // TOSCA version is invalid if build version is present without the qualifier.
+ // Eg: version = 18.0.0-1 is invalid.
+
+ if (qualifier == null && value != null) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(
+ new JToscaValidationIssue(
+ "JE254",
+ "InvalidTOSCAVersionPropertyException: "
+ + "Value of TOSCA version property \"" + version + "\" is invalid"
+ )
+ );
+ }
+ return value;
+ }
+
+ public Object getVersion() {
+ return version;
+ }
+
+}
+
+/*python
+
+class TOSCAVersionProperty(object):
+
+ VERSION_RE = re.compile('^(?P<major_version>([0-9][0-9]*))'
+ '(\.(?P<minor_version>([0-9][0-9]*)))?'
+ '(\.(?P<fix_version>([0-9][0-9]*)))?'
+ '(\.(?P<qualifier>([0-9A-Za-z]+)))?'
+ '(\-(?P<build_version>[0-9])*)?$')
+
+ def __init__(self, version):
+ self.version = str(version)
+ match = self.VERSION_RE.match(self.version)
+ if not match:
+ ValidationIssueCollector.appendException(
+ InvalidTOSCAVersionPropertyException(what=(self.version)))
+ return
+ ver = match.groupdict()
+ if self.version in ['0', '0.0', '0.0.0']:
+ log.warning(_('Version assumed as not provided'))
+ self.version = None
+ self.minor_version = ver['minor_version']
+ self.major_version = ver['major_version']
+ self.fix_version = ver['fix_version']
+ self.qualifier = self._validate_qualifier(ver['qualifier'])
+ self.build_version = self._validate_build(ver['build_version'])
+ self._validate_major_version(self.major_version)
+
+ def _validate_major_version(self, value):
+ """Validate major version
+
+ Checks if only major version is provided and assumes
+ minor version as 0.
+ Eg: If version = 18, then it returns version = '18.0'
+ """
+
+ if self.minor_version is None and self.build_version is None and \
+ value != '0':
+ log.warning(_('Minor version assumed "0".'))
+ self.version = '.'.join([value, '0'])
+ return value
+
+ def _validate_qualifier(self, value):
+ """Validate qualifier
+
+ TOSCA version is invalid if a qualifier is present without the
+ fix version or with all of major, minor and fix version 0s.
+
+ For example, the following versions are invalid
+ 18.0.abc
+ 0.0.0.abc
+ """
+ if (self.fix_version is None and value) or \
+ (self.minor_version == self.major_version ==
+ self.fix_version == '0' and value):
+ ValidationIssueCollector.appendException(
+ InvalidTOSCAVersionPropertyException(what=(self.version)))
+ return value
+
+ def _validate_build(self, value):
+ """Validate build version
+
+ TOSCA version is invalid if build version is present without the
+ qualifier.
+ Eg: version = 18.0.0-1 is invalid.
+ """
+ if not self.qualifier and value:
+ ValidationIssueCollector.appendException(
+ InvalidTOSCAVersionPropertyException(what=(self.version)))
+ return value
+
+ def get_version(self):
+ return self.version
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java
new file mode 100644
index 0000000..4c4581b
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ThreadLocalsHolder.java
@@ -0,0 +1,45 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.utils;
+
+import org.onap.sdc.toscaparser.api.common.ValidationIssueCollector;
+
+public class ThreadLocalsHolder {
+
+ private static final ThreadLocal<ValidationIssueCollector> EXCEPTION_COLLECTOR_THREAD_LOCAL = new ThreadLocal<>();
+
+ private ThreadLocalsHolder() {
+ }
+
+ public static ValidationIssueCollector getCollector() {
+ return EXCEPTION_COLLECTOR_THREAD_LOCAL.get();
+ }
+
+ public static void setCollector(ValidationIssueCollector validationIssueCollector) {
+ cleanup();
+ EXCEPTION_COLLECTOR_THREAD_LOCAL.set(validationIssueCollector);
+ }
+
+ public static void cleanup() {
+ EXCEPTION_COLLECTOR_THREAD_LOCAL.remove();
+ }
+
+}
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java
new file mode 100644
index 0000000..d081d91
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/UrlUtils.java
@@ -0,0 +1,145 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.utils;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.URL;
+
+public class UrlUtils {
+
+ private static final int HTTP_STATUS_OK = 200;
+
+ private UrlUtils() {
+ }
+
+ public static boolean validateUrl(String sUrl) {
+ // Validates whether the given path is a URL or not
+
+ // If the given path includes a scheme (http, https, ftp, ...) and a net
+ // location (a domain name such as www.github.com) it is validated as a URL
+ try {
+ URL url = new URL(sUrl);
+ if (url.getProtocol().equals("file")) {
+ return true;
+ }
+ return url.getAuthority() != null;
+ } catch (MalformedURLException e) {
+ return false;
+ }
+ }
+
+ public static String joinUrl(String sUrl, String relativePath) {
+ // Builds a new URL from the given URL and the relative path
+
+ // Example:
+ // url: http://www.githib.com/openstack/heat
+ // relative_path: heat-translator
+ // - joined: http://www.githib.com/openstack/heat-translator
+ if (!validateUrl(sUrl)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE255", String.format(
+ "ValueError: The URL \"%s\" is malformed", sUrl)));
+ }
+ try {
+ URL base = new URL(sUrl);
+ return (new URL(base, relativePath)).toString();
+ } catch (MalformedURLException e) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE256", String.format(
+ "ValueError: Joining URL \"%s\" and relative path \"%s\" caused an exception", sUrl, relativePath)));
+ return sUrl;
+ }
+ }
+
+ public static boolean isUrlAccessible(String sUrl) {
+ // Validates whether the given URL is accessible
+
+ // Returns true if the get call returns a 200 response code.
+ // Otherwise, returns false.
+ try {
+ HttpURLConnection connection = (HttpURLConnection) new URL(sUrl).openConnection();
+ connection.setRequestMethod("HEAD");
+ int responseCode = connection.getResponseCode();
+ return responseCode == HTTP_STATUS_OK;
+ } catch (IOException e) {
+ return false;
+ }
+ }
+
+}
+
+/*python
+
+from six.moves.urllib.parse import urljoin
+from six.moves.urllib.parse import urlparse
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.utils.gettextutils import _
+
+try:
+ # Python 3.x
+ import urllib.request as urllib2
+except ImportError:
+ # Python 2.x
+ import urllib2
+
+
+class UrlUtils(object):
+
+ @staticmethod
+ def validate_url(path):
+ """Validates whether the given path is a URL or not.
+
+ If the given path includes a scheme (http, https, ftp, ...) and a net
+ location (a domain name such as www.github.com) it is validated as a
+ URL.
+ """
+ parsed = urlparse(path)
+ if parsed.scheme == 'file':
+ # If the url uses the file scheme netloc will be ""
+ return True
+ else:
+ return bool(parsed.scheme) and bool(parsed.netloc)
+
+ @staticmethod
+ def join_url(url, relative_path):
+ """Builds a new URL from the given URL and the relative path.
+
+ Example:
+ url: http://www.githib.com/openstack/heat
+ relative_path: heat-translator
+ - joined: http://www.githib.com/openstack/heat-translator
+ """
+ if not UrlUtils.validate_url(url):
+ ValidationIssueCollector.appendException(
+ ValueError(_('"%s" is not a valid URL.') % url))
+ return urljoin(url, relative_path)
+
+ @staticmethod
+ def url_accessible(url):
+ """Validates whether the given URL is accessible.
+
+ Returns true if the get call returns a 200 response code.
+ Otherwise, returns false.
+ """
+ return urllib2.urlopen(url).getcode() == 200
+*/
diff --git a/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java
new file mode 100644
index 0000000..b90d882
--- /dev/null
+++ b/jtosca/src/main/java/org/onap/sdc/toscaparser/api/utils/ValidateUtils.java
@@ -0,0 +1,439 @@
+/*-
+ * ============LICENSE_START=======================================================
+ * SDC
+ * ================================================================================
+ * Copyright (C) 2019 AT&T Intellectual Property. All rights reserved.
+ * ================================================================================
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * ============LICENSE_END=========================================================
+ */
+
+package org.onap.sdc.toscaparser.api.utils;
+
+import org.onap.sdc.toscaparser.api.common.JToscaValidationIssue;
+
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.LinkedHashMap;
+
+public class ValidateUtils {
+
+ private static final String RANGE_UNBOUNDED = "UNBOUNDED";
+
+ private ValidateUtils() {
+ }
+
+ public static Object strToNum(Object value) {
+ // Convert a string representation of a number into a numeric type
+ // TODO(TBD) we should not allow numeric values in, input should be str
+ if (value instanceof Number) {
+ return value;
+ }
+ try {
+ return Integer.parseInt((String) value);
+ } catch (NumberFormatException e) {
+ }
+ try {
+ return Float.parseFloat((String) value);
+ } catch (Exception e) {
+ }
+ return null;
+ }
+
+ public static Object validateNumeric(Object value) {
+ if (value != null) {
+ if (!(value instanceof Number)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE257", String.format(
+ "ValueError: \"%s\" is not a numeric", value.toString())));
+ }
+ }
+ return value;
+ }
+
+ public static Object validateInteger(Object value) {
+ if (value != null) {
+ if (!(value instanceof Integer)) {
+ // allow "true" and "false"
+ if (value instanceof Boolean) {
+ return (Boolean) value ? 1 : 0;
+ }
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE258", String.format(
+ "ValueError: \"%s\" is not an integer", value.toString())));
+ }
+ }
+ return value;
+ }
+
+ public static Object validateFloat(Object value) {
+ if (value != null) {
+ if (!(value instanceof Float || value instanceof Double)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE259", String.format(
+ "ValueError: \"%s\" is not a float", value.toString())));
+ }
+ }
+ return value;
+ }
+
+ public static Object validateString(Object value) {
+ if (value != null) {
+ if (!(value instanceof String)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE260", String.format(
+ "ValueError: \'%s\' is not a string", value.toString())));
+ }
+ }
+ return value;
+ }
+
+ public static Object validateList(Object value) {
+ if (value != null) {
+ if (!(value instanceof ArrayList)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE261", String.format(
+ "ValueError: \"%s\" is not a list", value.toString())));
+ }
+ }
+ return value;
+ }
+
+
+ @SuppressWarnings("unchecked")
+ public static Object validateRange(Object range) {
+ // list class check
+ validateList(range);
+ // validate range list has a min and max
+ if (range instanceof ArrayList && ((ArrayList<Object>) range).size() != 2) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE262", String.format(
+ "ValueError: \"%s\" is not a valid range", range.toString())));
+ // too dangerous to continue...
+ return range;
+ }
+ // validate min and max are numerics or the keyword UNBOUNDED
+ boolean minTest = false;
+ boolean maxTest = false;
+ Object r0 = ((ArrayList<Object>) range).get(0);
+ Object r1 = ((ArrayList<Object>) range).get(1);
+
+ if (!(r0 instanceof Integer) && !(r0 instanceof Float)
+ || !(r1 instanceof Integer) && !(r1 instanceof Float)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE263", String.format(
+ "ValueError: \"%s\" is not a valid range", range.toString())));
+ // too dangerous to continue...
+ return range;
+ }
+
+ Float min = 0.0F;
+ Float max = 0.0F;
+ if (r0 instanceof String && ((String) r0).equals(RANGE_UNBOUNDED)) {
+ minTest = true;
+ } else {
+ min = r0 instanceof Integer ? ((Integer) r0).floatValue() : (Float) r0;
+ }
+ if (r1 instanceof String && ((String) r1).equals(RANGE_UNBOUNDED)) {
+ maxTest = true;
+ } else {
+ max = r1 instanceof Integer ? ((Integer) r1).floatValue() : (Float) r1;
+ }
+
+ // validate the max > min (account for UNBOUNDED)
+ if (!minTest && !maxTest) {
+ // Note: min == max is allowed
+ if (min > max) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE264", String.format(
+ "ValueError:\"%s\" is not a valid range", range.toString())));
+ }
+ }
+ return range;
+ }
+
+ @SuppressWarnings("unchecked")
+ public static Object validateValueInRange(Object value, Object range, String propName) {
+ // verify all 3 are numeric and convert to Floats
+ if (!(value instanceof Integer || value instanceof Float)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE265", String.format(
+ "ValueError: validateInRange: \"%s\" is not a number", range.toString())));
+ return value;
+ }
+ Float fval = value instanceof Integer ? ((Integer) value).floatValue() : (Float) value;
+
+ //////////////////////////
+ //"validateRange(range);"
+ //////////////////////////
+ // better safe than sorry...
+ // validate that range list has a min and max
+ if (range instanceof ArrayList && ((ArrayList<Object>) range).size() != 2) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE266", String.format(
+ "ValueError: \"%s\" is not a valid range", range.toString())));
+ // too dangerous to continue...
+ return value;
+ }
+ // validate min and max are numerics or the keyword UNBOUNDED
+ boolean minTest = false;
+ boolean maxTest = false;
+ Object r0 = ((ArrayList<Object>) range).get(0);
+ Object r1 = ((ArrayList<Object>) range).get(1);
+
+ if (!(r0 instanceof Integer) && !(r0 instanceof Float)
+ || !(r1 instanceof Integer) && !(r1 instanceof Float)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE267", String.format(
+ "ValueError: \"%s\" is not a valid range", range.toString())));
+ // too dangerous to continue...
+ return value;
+ }
+
+ Float min = 0.0F;
+ Float max = 0.0F;
+ if (r0 instanceof String && ((String) r0).equals(RANGE_UNBOUNDED)) {
+ minTest = true;
+ } else {
+ min = r0 instanceof Integer ? ((Integer) r0).floatValue() : (Float) r0;
+ }
+ if (r1 instanceof String && ((String) r1).equals(RANGE_UNBOUNDED)) {
+ maxTest = true;
+ } else {
+ max = r1 instanceof Integer ? ((Integer) r1).floatValue() : (Float) r1;
+ }
+
+ // validate the max > min (account for UNBOUNDED)
+ if (!minTest && !maxTest) {
+ // Note: min == max is allowed
+ if (min > max) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE268", String.format(
+ "ValueError:\"%s\" is not a valid range", range.toString())));
+ }
+ }
+ // finally...
+ boolean bError = false;
+ //Note: value is valid if equal to min
+ if (!minTest) {
+ if (fval < min) {
+ bError = true;
+ }
+ }
+ // Note: value is valid if equal to max
+ if (!maxTest) {
+ if (fval > max) {
+ bError = true;
+ }
+ }
+ if (bError) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE269", String.format(
+ "RangeValueError: Property \"%s\", \"%s\" not in range [\"%s\" - \"%s\"",
+ propName, value.toString(), r0.toString(), r1.toString())));
+ }
+ return value;
+ }
+
+ public static Object validateMap(Object ob) {
+ if (ob != null) {
+ if (!(ob instanceof LinkedHashMap)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE270", String.format(
+ "ValueError\"%s\" is not a map.", ob.toString())));
+ }
+ }
+ return ob;
+ }
+
+ public static Object validateBoolean(Object value) {
+ if (value != null) {
+ if (value instanceof Boolean) {
+ return value;
+ }
+ if (value instanceof String) {
+ String normalized = ((String) value).toLowerCase();
+ if (normalized.equals("true") || normalized.equals("false")) {
+ return normalized.equals("true");
+ }
+ }
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE271", String.format(
+ "ValueError: \"%s\" is not a boolean", value.toString())));
+ }
+ return value;
+ }
+
+ public static Object validateTimestamp(Object value) {
+
+ /*
+ try:
+ # Note: we must return our own exception message
+ # as dateutil's parser returns different types / values on
+ # different systems. OSX, for example, returns a tuple
+ # containing a different error message than Linux
+ dateutil.parser.parse(value)
+ except Exception as e:
+ original_err_msg = str(e)
+ log.error(original_err_msg)
+ ValidationIssueCollector.appendException(
+ ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') %
+ {'val': value, 'msg': original_err_msg}))
+ */
+ // timestamps are loaded as Date objects by the YAML parser
+ if (value != null) {
+ if (!(value instanceof Date)) {
+ ThreadLocalsHolder.getCollector().appendValidationIssue(new JToscaValidationIssue("JE272", String.format(
+ "ValueError: \"%s\" is not a valid timestamp",
+ value.toString())));
+
+ }
+ }
+ return value;
+ }
+
+}
+
+/*python
+
+from toscaparser.elements import constraints
+from toscaparser.common.exception import ValidationIssueCollector
+from toscaparser.common.exception import InvalidTOSCAVersionPropertyException
+from toscaparser.common.exception import RangeValueError
+from toscaparser.utils.gettextutils import _
+
+log = logging.getLogger('tosca')
+
+RANGE_UNBOUNDED = 'UNBOUNDED'
+
+
+def str_to_num(value):
+ '''Convert a string representation of a number into a numeric type.'''
+ # tODO(TBD) we should not allow numeric values in, input should be str
+ if isinstance(value, numbers.Number):
+ return value
+ try:
+ return int(value)
+ except ValueError:
+ return float(value)
+
+
+def validate_numeric(value):
+ if not isinstance(value, numbers.Number):
+ ValidationIssueCollector.appendException(
+ ValueError(_('"%s" is not a numeric.') % value))
+ return value
+
+
+def validate_integer(value):
+ if not isinstance(value, int):
+ try:
+ value = int(value)
+ except Exception:
+ ValidationIssueCollector.appendException(
+ ValueError(_('"%s" is not an integer.') % value))
+ return value
+
+
+def validate_float(value):
+ if not isinstance(value, float):
+ ValidationIssueCollector.appendException(
+ ValueError(_('"%s" is not a float.') % value))
+ return value
+
+
+def validate_string(value):
+ if not isinstance(value, six.string_types):
+ ValidationIssueCollector.appendException(
+ ValueError(_('"%s" is not a string.') % value))
+ return value
+
+
+def validate_list(value):
+ if not isinstance(value, list):
+ ValidationIssueCollector.appendException(
+ ValueError(_('"%s" is not a list.') % value))
+ return value
+
+
+def validate_range(range):
+ # list class check
+ validate_list(range)
+ # validate range list has a min and max
+ if len(range) != 2:
+ ValidationIssueCollector.appendException(
+ ValueError(_('"%s" is not a valid range.') % range))
+ # validate min and max are numerics or the keyword UNBOUNDED
+ min_test = max_test = False
+ if not range[0] == RANGE_UNBOUNDED:
+ min = validate_numeric(range[0])
+ else:
+ min_test = True
+ if not range[1] == RANGE_UNBOUNDED:
+ max = validate_numeric(range[1])
+ else:
+ max_test = True
+ # validate the max > min (account for UNBOUNDED)
+ if not min_test and not max_test:
+ # Note: min == max is allowed
+ if min > max:
+ ValidationIssueCollector.appendException(
+ ValueError(_('"%s" is not a valid range.') % range))
+
+ return range
+
+
+def validate_value_in_range(value, range, prop_name):
+ validate_numeric(value)
+ validate_range(range)
+
+ # Note: value is valid if equal to min
+ if range[0] != RANGE_UNBOUNDED:
+ if value < range[0]:
+ ValidationIssueCollector.appendException(
+ RangeValueError(pname=prop_name,
+ pvalue=value,
+ vmin=range[0],
+ vmax=range[1]))
+ # Note: value is valid if equal to max
+ if range[1] != RANGE_UNBOUNDED:
+ if value > range[1]:
+ ValidationIssueCollector.appendException(
+ RangeValueError(pname=prop_name,
+ pvalue=value,
+ vmin=range[0],
+ vmax=range[1]))
+ return value
+
+
+def validate_map(value):
+ if not isinstance(value, collections.Mapping):
+ ValidationIssueCollector.appendException(
+ ValueError(_('"%s" is not a map.') % value))
+ return value
+
+
+def validate_boolean(value):
+ if isinstance(value, bool):
+ return value
+
+ if isinstance(value, str):
+ normalised = value.lower()
+ if normalised in ['true', 'false']:
+ return normalised == 'true'
+
+ ValidationIssueCollector.appendException(
+ ValueError(_('"%s" is not a boolean.') % value))
+
+
+def validate_timestamp(value):
+ try:
+ # Note: we must return our own exception message
+ # as dateutil's parser returns different types / values on
+ # different systems. OSX, for example, returns a tuple
+ # containing a different error message than Linux
+ dateutil.parser.parse(value)
+ except Exception as e:
+ original_err_msg = str(e)
+ log.error(original_err_msg)
+ ValidationIssueCollector.appendException(
+ ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') %
+ {'val': value, 'msg': original_err_msg}))
+ return
+
+*/
diff --git a/jtosca/src/main/resources/TOSCA_definition_1_0.yaml b/jtosca/src/main/resources/TOSCA_definition_1_0.yaml
new file mode 100644
index 0000000..d80ed17
--- /dev/null
+++ b/jtosca/src/main/resources/TOSCA_definition_1_0.yaml
@@ -0,0 +1,971 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+##########################################################################
+# The content of this file reflects TOSCA Simple Profile in YAML version
+# 1.0.0. It describes the definition for TOSCA types including Node Type,
+# Relationship Type, CapabilityAssignment Type and Interfaces.
+##########################################################################
+tosca_definitions_version: tosca_simple_yaml_1_0
+
+##########################################################################
+# Node Type.
+# A Node Type is a reusable entity that defines the type of one or more
+# Node Templates.
+##########################################################################
+node_types:
+ tosca.nodes.Root:
+ description: >
+ The TOSCA root node all other TOSCA base node types derive from.
+ attributes:
+ tosca_id:
+ type: string
+ tosca_name:
+ type: string
+ state:
+ type: string
+ capabilities:
+ feature:
+ type: tosca.capabilities.Node
+ requirements:
+ - dependency:
+ capability: tosca.capabilities.Node
+ node: tosca.nodes.Root
+ relationship: tosca.relationships.DependsOn
+ occurrences: [ 0, UNBOUNDED ]
+ interfaces:
+ Standard:
+ type: tosca.interfaces.node.lifecycle.Standard
+
+ tosca.nodes.Compute:
+ derived_from: tosca.nodes.Root
+ attributes:
+ private_address:
+ type: string
+ public_address:
+ type: string
+ networks:
+ type: map
+ entry_schema:
+ type: tosca.datatypes.network.NetworkInfo
+ ports:
+ type: map
+ entry_schema:
+ type: tosca.datatypes.network.PortInfo
+ capabilities:
+ host:
+ type: tosca.capabilities.Container
+ binding:
+ type: tosca.capabilities.network.Bindable
+ os:
+ type: tosca.capabilities.OperatingSystem
+ scalable:
+ type: tosca.capabilities.Scalable
+ endpoint:
+ type: tosca.capabilities.Endpoint.Admin
+ requirements:
+ - local_storage:
+ capability: tosca.capabilities.Attachment
+ node: tosca.nodes.BlockStorage
+ relationship: tosca.relationships.AttachesTo
+ occurrences: [0, UNBOUNDED]
+
+ tosca.nodes.SoftwareComponent:
+ derived_from: tosca.nodes.Root
+ properties:
+ # domain-specific software component version
+ component_version:
+ type: version
+ required: false
+ description: >
+ Software component version.
+ admin_credential:
+ type: tosca.datatypes.Credential
+ required: false
+ requirements:
+ - host:
+ capability: tosca.capabilities.Container
+ node: tosca.nodes.Compute
+ relationship: tosca.relationships.HostedOn
+
+ tosca.nodes.DBMS:
+ derived_from: tosca.nodes.SoftwareComponent
+ properties:
+ port:
+ required: false
+ type: integer
+ description: >
+ The port the DBMS service will listen to for data and requests.
+ root_password:
+ required: false
+ type: string
+ description: >
+ The root password for the DBMS service.
+ capabilities:
+ host:
+ type: tosca.capabilities.Container
+ valid_source_types: [tosca.nodes.Database]
+
+ tosca.nodes.Database:
+ derived_from: tosca.nodes.Root
+ properties:
+ user:
+ required: false
+ type: string
+ description: >
+ User account name for DB administration
+ port:
+ required: false
+ type: integer
+ description: >
+ The port the database service will use to listen for incoming data and
+ requests.
+ name:
+ required: false
+ type: string
+ description: >
+ The name of the database.
+ password:
+ required: false
+ type: string
+ description: >
+ The password for the DB user account
+ requirements:
+ - host:
+ capability: tosca.capabilities.Container
+ node: tosca.nodes.DBMS
+ relationship: tosca.relationships.HostedOn
+ capabilities:
+ database_endpoint:
+ type: tosca.capabilities.Endpoint.Database
+
+ tosca.nodes.WebServer:
+ derived_from: tosca.nodes.SoftwareComponent
+ capabilities:
+ data_endpoint:
+ type: tosca.capabilities.Endpoint
+ admin_endpoint:
+ type: tosca.capabilities.Endpoint.Admin
+ host:
+ type: tosca.capabilities.Container
+ valid_source_types: [tosca.nodes.WebApplication]
+
+ tosca.nodes.WebApplication:
+ derived_from: tosca.nodes.Root
+ properties:
+ context_root:
+ type: string
+ required: false
+ requirements:
+ - host:
+ capability: tosca.capabilities.Container
+ node: tosca.nodes.WebServer
+ relationship: tosca.relationships.HostedOn
+ capabilities:
+ app_endpoint:
+ type: tosca.capabilities.Endpoint
+
+ tosca.nodes.BlockStorage:
+ derived_from: tosca.nodes.Root
+ properties:
+ size:
+ type: scalar-unit.size
+ constraints:
+ - greater_or_equal: 1 MB
+ volume_id:
+ type: string
+ required: false
+ snapshot_id:
+ type: string
+ required: false
+ attributes:
+ volume_id:
+ type: string
+ capabilities:
+ attachment:
+ type: tosca.capabilities.Attachment
+
+ tosca.nodes.network.Network:
+ derived_from: tosca.nodes.Root
+ description: >
+ The TOSCA Network node represents a simple, logical network service.
+ properties:
+ ip_version:
+ type: integer
+ required: false
+ default: 4
+ constraints:
+ - valid_values: [ 4, 6 ]
+ description: >
+ The IP version of the requested network. Valid values are 4 for ipv4
+ or 6 for ipv6.
+ cidr:
+ type: string
+ required: false
+ description: >
+ The cidr block of the requested network.
+ start_ip:
+ type: string
+ required: false
+ description: >
+ The IP address to be used as the start of a pool of addresses within
+ the full IP range derived from the cidr block.
+ end_ip:
+ type: string
+ required: false
+ description: >
+ The IP address to be used as the end of a pool of addresses within
+ the full IP range derived from the cidr block.
+ gateway_ip:
+ type: string
+ required: false
+ description: >
+ The gateway IP address.
+ network_name:
+ type: string
+ required: false
+ description: >
+ An identifier that represents an existing Network instance in the
+ underlying cloud infrastructure or can be used as the name of the
+ newly created network. If network_name is provided and no other
+ properties are provided (with exception of network_id), then an
+ existing network instance will be used. If network_name is provided
+ alongside with more properties then a new network with this name will
+ be created.
+ network_id:
+ type: string
+ required: false
+ description: >
+ An identifier that represents an existing Network instance in the
+ underlying cloud infrastructure. This property is mutually exclusive
+ with all other properties except network_name. This can be used alone
+ or together with network_name to identify an existing network.
+ segmentation_id:
+ type: string
+ required: false
+ description: >
+ A segmentation identifier in the underlying cloud infrastructure.
+ E.g. VLAN ID, GRE tunnel ID, etc..
+ network_type:
+ type: string
+ required: false
+ description: >
+ It specifies the nature of the physical network in the underlying
+ cloud infrastructure. Examples are flat, vlan, gre or vxlan.
+ For flat and vlan types, physical_network should be provided too.
+ physical_network:
+ type: string
+ required: false
+ description: >
+ It identifies the physical network on top of which the network is
+ implemented, e.g. physnet1. This property is required if network_type
+ is flat or vlan.
+ dhcp_enabled:
+ type: boolean
+ required: false
+ default: true
+ description: >
+ Indicates should DHCP service be enabled on the network or not.
+ capabilities:
+ link:
+ type: tosca.capabilities.network.Linkable
+
+ tosca.nodes.network.Port:
+ derived_from: tosca.nodes.Root
+ description: >
+ The TOSCA Port node represents a logical entity that associates between
+ Compute and Network normative types. The Port node type effectively
+ represents a single virtual NIC on the Compute node instance.
+ properties:
+ ip_address:
+ type: string
+ required: false
+ description: >
+ Allow the user to set a static IP.
+ order:
+ type: integer
+ required: false
+ default: 0
+ constraints:
+ - greater_or_equal: 0
+ description: >
+ The order of the NIC on the compute instance (e.g. eth2).
+ is_default:
+ type: boolean
+ required: false
+ default: false
+ description: >
+ If is_default=true this port will be used for the default gateway
+ route. Only one port that is associated to single compute node can
+ set as is_default=true.
+ ip_range_start:
+ type: string
+ required: false
+ description: >
+ Defines the starting IP of a range to be allocated for the compute
+ instances that are associated with this Port.
+ ip_range_end:
+ type: string
+ required: false
+ description: >
+ Defines the ending IP of a range to be allocated for the compute
+ instances that are associated with this Port.
+ attributes:
+ ip_address:
+ type: string
+ requirements:
+ - binding:
+ description: >
+ Binding requirement expresses the relationship between Port and
+ Compute nodes. Effectively it indicates that the Port will be
+ attached to specific Compute node instance
+ capability: tosca.capabilities.network.Bindable
+ relationship: tosca.relationships.network.BindsTo
+ node: tosca.nodes.Compute
+ - link:
+ description: >
+ Link requirement expresses the relationship between Port and Network
+ nodes. It indicates which network this port will connect to.
+ capability: tosca.capabilities.network.Linkable
+ relationship: tosca.relationships.network.LinksTo
+ node: tosca.nodes.network.Network
+
+ tosca.nodes.network.FloatingIP:
+ derived_from: tosca.nodes.Root
+ description: >
+ The TOSCA FloatingIP node represents a floating IP that can associate to a Port.
+ properties:
+ floating_network:
+ type: string
+ required: true
+ floating_ip_address:
+ type: string
+ required: false
+ port_id:
+ type: string
+ required: false
+ requirements:
+ - link:
+ capability: tosca.capabilities.network.Linkable
+ relationship: tosca.relationships.network.LinksTo
+ node: tosca.nodes.network.Port
+
+ tosca.nodes.ObjectStorage:
+ derived_from: tosca.nodes.Root
+ description: >
+ The TOSCA ObjectStorage node represents storage that provides the ability
+ to store data as objects (or BLOBs of data) without consideration for the
+ underlying filesystem or devices
+ properties:
+ name:
+ type: string
+ required: true
+ description: >
+ The logical name of the object store (or container).
+ size:
+ type: scalar-unit.size
+ required: false
+ constraints:
+ - greater_or_equal: 0 GB
+ description: >
+ The requested initial storage size.
+ maxsize:
+ type: scalar-unit.size
+ required: false
+ constraints:
+ - greater_or_equal: 0 GB
+ description: >
+ The requested maximum storage size.
+ capabilities:
+ storage_endpoint:
+ type: tosca.capabilities.Endpoint
+
+ tosca.nodes.LoadBalancer:
+ derived_from: tosca.nodes.Root
+ properties:
+ algorithm:
+ type: string
+ required: false
+ status: experimental
+ capabilities:
+ client:
+ type: tosca.capabilities.Endpoint.Public
+ occurrences: [0, UNBOUNDED]
+ description: the Floating (IP) client’s on the public network can connect to
+ requirements:
+ - application:
+ capability: tosca.capabilities.Endpoint
+ relationship: tosca.relationships.RoutesTo
+ occurrences: [0, UNBOUNDED]
+ description: Connection to one or more load balanced applications
+
+ tosca.nodes.Container.Application:
+ derived_from: tosca.nodes.Root
+ requirements:
+ - host:
+ capability: tosca.capabilities.Container
+ node: tosca.nodes.Container.Runtime
+ relationship: tosca.relationships.HostedOn
+
+ tosca.nodes.Container.Runtime:
+ derived_from: tosca.nodes.SoftwareComponent
+ capabilities:
+ host:
+ type: tosca.capabilities.Container
+ scalable:
+ type: tosca.capabilities.Scalable
+
+ tosca.nodes.Container.Application.Docker:
+ derived_from: tosca.nodes.Container.Application
+ requirements:
+ - host:
+ capability: tosca.capabilities.Container.Docker
+
+##########################################################################
+# Relationship Type.
+# A Relationship Type is a reusable entity that defines the type of one
+# or more relationships between Node Types or Node Templates.
+##########################################################################
+relationship_types:
+ tosca.relationships.Root:
+ description: >
+ The TOSCA root Relationship Type all other TOSCA base Relationship Types
+ derive from.
+ attributes:
+ tosca_id:
+ type: string
+ tosca_name:
+ type: string
+ interfaces:
+ Configure:
+ type: tosca.interfaces.relationship.Configure
+
+ tosca.relationships.DependsOn:
+ derived_from: tosca.relationships.Root
+
+ tosca.relationships.HostedOn:
+ derived_from: tosca.relationships.Root
+ valid_target_types: [ tosca.capabilities.Container ]
+
+ tosca.relationships.ConnectsTo:
+ derived_from: tosca.relationships.Root
+ valid_target_types: [ tosca.capabilities.Endpoint ]
+ credential:
+ type: tosca.datatypes.Credential
+ required: false
+
+ tosca.relationships.AttachesTo:
+ derived_from: tosca.relationships.Root
+ valid_target_types: [ tosca.capabilities.Attachment ]
+ properties:
+ location:
+ required: true
+ type: string
+ constraints:
+ - min_length: 1
+ device:
+ required: false
+ type: string
+
+ tosca.relationships.RoutesTo:
+ derived_from: tosca.relationships.ConnectsTo
+ valid_target_types: [ tosca.capabilities.Endpoint ]
+
+ tosca.relationships.network.LinksTo:
+ derived_from: tosca.relationships.DependsOn
+ valid_target_types: [ tosca.capabilities.network.Linkable ]
+
+ tosca.relationships.network.BindsTo:
+ derived_from: tosca.relationships.DependsOn
+ valid_target_types: [ tosca.capabilities.network.Bindable ]
+
+##########################################################################
+# CapabilityAssignment Type.
+# A CapabilityAssignment Type is a reusable entity that describes a kind of
+# capability that a Node Type can declare to expose.
+##########################################################################
+capability_types:
+ tosca.capabilities.Root:
+ description: >
+ The TOSCA root Capability Type all other TOSCA base Capability Types
+ derive from.
+
+ tosca.capabilities.Node:
+ derived_from: tosca.capabilities.Root
+
+ tosca.capabilities.Container:
+ derived_from: tosca.capabilities.Root
+ properties:
+ num_cpus:
+ required: false
+ type: integer
+ constraints:
+ - greater_or_equal: 1
+ cpu_frequency:
+ required: false
+ type: scalar-unit.frequency
+ constraints:
+ - greater_or_equal: 0.1 GHz
+ disk_size:
+ required: false
+ type: scalar-unit.size
+ constraints:
+ - greater_or_equal: 0 MB
+ mem_size:
+ required: false
+ type: scalar-unit.size
+ constraints:
+ - greater_or_equal: 0 MB
+
+ tosca.capabilities.Endpoint:
+ derived_from: tosca.capabilities.Root
+ properties:
+ protocol:
+ type: string
+ required: true
+ default: tcp
+ port:
+ type: tosca.datatypes.network.PortDef
+ required: false
+ secure:
+ type: boolean
+ required: false
+ default: false
+ url_path:
+ type: string
+ required: false
+ port_name:
+ type: string
+ required: false
+ network_name:
+ type: string
+ required: false
+ default: PRIVATE
+ initiator:
+ type: string
+ required: false
+ default: source
+ constraints:
+ - valid_values: [source, target, peer]
+ ports:
+ type: map
+ required: false
+ constraints:
+ - min_length: 1
+ entry_schema:
+ type: tosca.datatypes.network.PortSpec
+ attributes:
+ ip_address:
+ type: string
+
+ tosca.capabilities.Endpoint.Admin:
+ derived_from: tosca.capabilities.Endpoint
+ properties:
+ secure:
+ type: boolean
+ default: true
+ constraints:
+ - equal: true
+
+ tosca.capabilities.Endpoint.Public:
+ derived_from: tosca.capabilities.Endpoint
+ properties:
+ # Change the default network_name to use the first public network found
+ network_name:
+ type: string
+ default: PUBLIC
+ constraints:
+ - equal: PUBLIC
+ floating:
+ description: >
+ Indicates that the public address should be allocated from a pool of
+ floating IPs that are associated with the network.
+ type: boolean
+ default: false
+ status: experimental
+ dns_name:
+ description: The optional name to register with DNS
+ type: string
+ required: false
+ status: experimental
+
+ tosca.capabilities.Scalable:
+ derived_from: tosca.capabilities.Root
+ properties:
+ min_instances:
+ type: integer
+ required: true
+ default: 1
+ description: >
+ This property is used to indicate the minimum number of instances
+ that should be created for the associated TOSCA Node Template by
+ a TOSCA orchestrator.
+ max_instances:
+ type: integer
+ required: true
+ default: 1
+ description: >
+ This property is used to indicate the maximum number of instances
+ that should be created for the associated TOSCA Node Template by
+ a TOSCA orchestrator.
+ default_instances:
+ type: integer
+ required: false
+ description: >
+ An optional property that indicates the requested default number
+ of instances that should be the starting number of instances a
+ TOSCA orchestrator should attempt to allocate.
+ The value for this property MUST be in the range between the values
+ set for min_instances and max_instances properties.
+
+ tosca.capabilities.Endpoint.Database:
+ derived_from: tosca.capabilities.Endpoint
+
+ tosca.capabilities.Attachment:
+ derived_from: tosca.capabilities.Root
+
+ tosca.capabilities.network.Linkable:
+ derived_from: tosca.capabilities.Root
+ description: >
+ A node type that includes the Linkable capability indicates that it can
+ be pointed by tosca.relationships.network.LinksTo relationship type, which
+ represents an association relationship between Port and Network node types.
+
+ tosca.capabilities.network.Bindable:
+ derived_from: tosca.capabilities.Root
+ description: >
+ A node type that includes the Bindable capability indicates that it can
+ be pointed by tosca.relationships.network.BindsTo relationship type, which
+ represents a network association relationship between Port and Compute node
+ types.
+
+ tosca.capabilities.OperatingSystem:
+ derived_from: tosca.capabilities.Root
+ properties:
+ architecture:
+ required: false
+ type: string
+ description: >
+ The host Operating System (OS) architecture.
+ type:
+ required: false
+ type: string
+ description: >
+ The host Operating System (OS) type.
+ distribution:
+ required: false
+ type: string
+ description: >
+ The host Operating System (OS) distribution. Examples of valid values
+ for an “type” of “Linux” would include:
+ debian, fedora, rhel and ubuntu.
+ version:
+ required: false
+ type: version
+ description: >
+ The host Operating System version.
+
+ tosca.capabilities.Container.Docker:
+ derived_from: tosca.capabilities.Container
+ properties:
+ version:
+ type: list
+ required: false
+ entry_schema:
+ type: version
+ description: >
+ The Docker version capability.
+ publish_all:
+ type: boolean
+ default: false
+ required: false
+ description: >
+ Indicates that all ports (ranges) listed in the dockerfile
+ using the EXPOSE keyword be published.
+ publish_ports:
+ type: list
+ entry_schema:
+ type: tosca.datatypes.network.PortSpec
+ required: false
+ description: >
+ List of ports mappings from source (Docker container)
+ to target (host) ports to publish.
+ expose_ports:
+ type: list
+ entry_schema:
+ type: tosca.datatypes.network.PortSpec
+ required: false
+ description: >
+ List of ports mappings from source (Docker container) to expose
+ to other Docker containers (not accessible outside host).
+ volumes:
+ type: list
+ entry_schema:
+ type: string
+ required: false
+ description: >
+ The dockerfile VOLUME command which is used to enable access
+ from the Docker container to a directory on the host machine.
+ host_id:
+ type: string
+ required: false
+ description: >
+ The optional identifier of an existing host resource
+ that should be used to run this container on.
+ volume_id:
+ type: string
+ required: false
+ description: >
+ The optional identifier of an existing storage volume (resource)
+ that should be used to create the container's mount point(s) on.
+
+##########################################################################
+ # Interfaces Type.
+ # The Interfaces element describes a list of one or more interface
+ # definitions for a modelable entity (e.g., a Node or Relationship Type)
+ # as defined within the TOSCA Simple Profile specification.
+##########################################################################
+interface_types:
+ tosca.interfaces.node.lifecycle.Standard:
+ create:
+ description: Standard lifecycle create operation.
+ configure:
+ description: Standard lifecycle configure operation.
+ start:
+ description: Standard lifecycle start operation.
+ stop:
+ description: Standard lifecycle stop operation.
+ delete:
+ description: Standard lifecycle delete operation.
+
+ tosca.interfaces.relationship.Configure:
+ pre_configure_source:
+ description: Operation to pre-configure the source endpoint.
+ pre_configure_target:
+ description: Operation to pre-configure the target endpoint.
+ post_configure_source:
+ description: Operation to post-configure the source endpoint.
+ post_configure_target:
+ description: Operation to post-configure the target endpoint.
+ add_target:
+ description: Operation to add a target node.
+ remove_target:
+ description: Operation to remove a target node.
+ add_source: >
+ description: Operation to notify the target node of a source node which
+ is now available via a relationship.
+ description:
+ target_changed: >
+ description: Operation to notify source some property or attribute of the
+ target changed
+
+##########################################################################
+ # Data Type.
+ # A Datatype is a complex data type declaration which contains other
+ # complex or simple data types.
+##########################################################################
+data_types:
+ tosca.datatypes.Root:
+ description: >
+ The TOSCA root Data Type all other TOSCA base Data Types derive from
+
+ tosca.datatypes.network.NetworkInfo:
+ derived_from: tosca.datatypes.Root
+ properties:
+ network_name:
+ type: string
+ network_id:
+ type: string
+ addresses:
+ type: list
+ entry_schema:
+ type: string
+
+ tosca.datatypes.network.PortInfo:
+ derived_from: tosca.datatypes.Root
+ properties:
+ port_name:
+ type: string
+ port_id:
+ type: string
+ network_id:
+ type: string
+ mac_address:
+ type: string
+ addresses:
+ type: list
+ entry_schema:
+ type: string
+
+ tosca.datatypes.network.PortDef:
+ derived_from: tosca.datatypes.Root
+ type: integer
+ constraints:
+ - in_range: [ 1, 65535 ]
+
+ tosca.datatypes.network.PortSpec:
+ derived_from: tosca.datatypes.Root
+ properties:
+ protocol:
+ type: string
+ required: true
+ default: tcp
+ constraints:
+ - valid_values: [ udp, tcp, igmp ]
+ target:
+ type: tosca.datatypes.network.PortDef
+ required: false
+ target_range:
+ type: range
+ required: false
+ constraints:
+ - in_range: [ 1, 65535 ]
+ source:
+ type: tosca.datatypes.network.PortDef
+ required: false
+ source_range:
+ type: range
+ required: false
+ constraints:
+ - in_range: [ 1, 65535 ]
+
+ tosca.datatypes.Credential:
+ derived_from: tosca.datatypes.Root
+ properties:
+ protocol:
+ type: string
+ required: false
+ token_type:
+ type: string
+ default: password
+ required: true
+ token:
+ type: string
+ required: true
+ keys:
+ type: map
+ entry_schema:
+ type: string
+ required: false
+ user:
+ type: string
+ required: false
+
+##########################################################################
+ # Artifact Type.
+ # An Artifact Type is a reusable entity that defines the type of one or more
+ # files which Node Types or Node Templates can have dependent relationships
+ # and used during operations such as during installation or deployment.
+##########################################################################
+artifact_types:
+ tosca.artifacts.Root:
+ description: >
+ The TOSCA Artifact Type all other TOSCA Artifact Types derive from
+ properties:
+ version: version
+
+ tosca.artifacts.File:
+ derived_from: tosca.artifacts.Root
+
+ tosca.artifacts.Deployment:
+ derived_from: tosca.artifacts.Root
+ description: TOSCA base type for deployment artifacts
+
+ tosca.artifacts.Deployment.Image:
+ derived_from: tosca.artifacts.Deployment
+
+ tosca.artifacts.Deployment.Image.VM:
+ derived_from: tosca.artifacts.Deployment.Image
+
+ tosca.artifacts.Implementation:
+ derived_from: tosca.artifacts.Root
+ description: TOSCA base type for implementation artifacts
+
+ tosca.artifacts.Implementation.Bash:
+ derived_from: tosca.artifacts.Implementation
+ description: Script artifact for the Unix Bash shell
+ mime_type: application/x-sh
+ file_ext: [ sh ]
+
+ tosca.artifacts.Implementation.Python:
+ derived_from: tosca.artifacts.Implementation
+ description: Artifact for the interpreted Python language
+ mime_type: application/x-python
+ file_ext: [ py ]
+
+ tosca.artifacts.Deployment.Image.Container.Docker:
+ derived_from: tosca.artifacts.Deployment.Image
+ description: Docker container image
+
+ tosca.artifacts.Deployment.Image.VM.ISO:
+ derived_from: tosca.artifacts.Deployment.Image
+ description: Virtual Machine (VM) image in ISO disk format
+ mime_type: application/octet-stream
+ file_ext: [ iso ]
+
+ tosca.artifacts.Deployment.Image.VM.QCOW2:
+ derived_from: tosca.artifacts.Deployment.Image
+ description: Virtual Machine (VM) image in QCOW v2 standard disk format
+ mime_type: application/octet-stream
+ file_ext: [ qcow2 ]
+
+##########################################################################
+ # Policy Type.
+ # TOSCA Policy Types represent logical grouping of TOSCA nodes that have
+ # an implied relationship and need to be orchestrated or managed together
+ # to achieve some result.
+##########################################################################
+policy_types:
+ tosca.policies.Root:
+ description: The TOSCA Policy Type all other TOSCA Policy Types derive from.
+
+ tosca.policies.Placement:
+ derived_from: tosca.policies.Root
+ description: The TOSCA Policy Type definition that is used to govern
+ placement of TOSCA nodes or groups of nodes.
+
+ tosca.policies.Scaling:
+ derived_from: tosca.policies.Root
+ description: The TOSCA Policy Type definition that is used to govern
+ scaling of TOSCA nodes or groups of nodes.
+
+ tosca.policies.Monitoring:
+ derived_from: tosca.policies.Root
+ description: The TOSCA Policy Type definition that is used to govern
+ monitoring of TOSCA nodes or groups of nodes.
+
+ tosca.policies.Update:
+ derived_from: tosca.policies.Root
+ description: The TOSCA Policy Type definition that is used to govern
+ update of TOSCA nodes or groups of nodes.
+
+ tosca.policies.Performance:
+ derived_from: tosca.policies.Root
+ description: The TOSCA Policy Type definition that is used to declare
+ performance requirements for TOSCA nodes or groups of nodes.
+
+ onap.policies.Monitoring:
+ derived_from: tosca.policies.Root
+ description: The ONAP Policy Type definition for DCAE uS component monitoring policies.
+
+##########################################################################
+ # Group Type.
+ # Group Type represents logical grouping of TOSCA nodes that have an
+ # implied membership relationship and may need to be orchestrated or
+ # managed together to achieve some result.
+##########################################################################
+group_types:
+ tosca.groups.Root:
+ description: The TOSCA Group Type all other TOSCA Group Types derive from
+ interfaces:
+ Standard:
+ type: tosca.interfaces.node.lifecycle.Standard
diff --git a/jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py b/jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py
new file mode 100644
index 0000000..a5bda4a
--- /dev/null
+++ b/jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.py
@@ -0,0 +1,19 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# VERSION and DEFS_FILE are required for all extensions
+
+VERSION = 'tosca_simple_yaml_1_0_0'
+
+DEFS_FILE = "TOSCA_simple_yaml_definition_1_0_0.yaml"
+
+SECTIONS = ('metadata')
diff --git a/jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml b/jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml
new file mode 100644
index 0000000..c645e27
--- /dev/null
+++ b/jtosca/src/main/resources/extensions/TOSCA_simple_yaml_definition_1_0_0/TOSCA_simple_yaml_definition_1_0_0.yaml
@@ -0,0 +1,240 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+##########################################################################
+# The content of this file reflects TOSCA NFV Profile in YAML version
+# 1.0.0. It describes the definition for TOSCA NFV types including Node Type,
+# Relationship Type, CapabilityAssignment Type and Interfaces.
+##########################################################################
+tosca_definitions_version: tosca_simple_yaml_1_0_0
+
+##########################################################################
+# Node Type.
+# A Node Type is a reusable entity that defines the type of one or more
+# Node Templates.
+##########################################################################
+node_types:
+ tosca.nodes.nfv.VNF:
+ derived_from: tosca.nodes.Root # Or should this be its own top - level type?
+ properties:
+ id:
+ type: string
+ description: ID of this VNF
+ vendor:
+ type: string
+ description: name of the vendor who generate this VNF
+ version:
+ type: version
+ description: version of the software for this VNF
+ requirements:
+ - virtualLink:
+ capability: tosca.capabilities.nfv.VirtualLinkable
+ relationship: tosca.relationships.nfv.VirtualLinksTo
+ node: tosca.nodes.nfv.VL
+
+ tosca.nodes.nfv.VDU:
+ derived_from: tosca.nodes.Compute
+ capabilities:
+ high_availability:
+ type: tosca.capabilities.nfv.HA
+ virtualbinding:
+ type: tosca.capabilities.nfv.VirtualBindable
+ monitoring_parameter:
+ type: tosca.capabilities.nfv.Metric
+ requirements:
+ - high_availability:
+ capability: tosca.capabilities.nfv.HA
+ relationship: tosca.relationships.nfv.HA
+ node: tosca.nodes.nfv.VDU
+ occurrences: [ 0, 1 ]
+
+ tosca.nodes.nfv.CP:
+ derived_from: tosca.nodes.network.Port
+ properties:
+ type:
+ type: string
+ required: false
+ requirements:
+ - virtualLink:
+ capability: tosca.capabilities.nfv.VirtualLinkable
+ relationship: tosca.relationships.nfv.VirtualLinksTo
+ node: tosca.nodes.nfv.VL
+ - virtualBinding:
+ capability: tosca.capabilities.nfv.VirtualBindable
+ relationship: tosca.relationships.nfv.VirtualBindsTo
+ node: tosca.nodes.nfv.VDU
+ attributes:
+ address:
+ type: string
+
+ tosca.nodes.nfv.VL:
+ derived_from: tosca.nodes.network.Network
+ properties:
+ vendor:
+ type: string
+ required: true
+ description: name of the vendor who generate this VL
+ capabilities:
+ virtual_linkable:
+ type: tosca.capabilities.nfv.VirtualLinkable
+
+ tosca.nodes.nfv.VL.ELine:
+ derived_from: tosca.nodes.nfv.VL
+ capabilities:
+ virtual_linkable:
+ occurrences: 2
+
+ tosca.nodes.nfv.VL.ELAN:
+ derived_from: tosca.nodes.nfv.VL
+
+ tosca.nodes.nfv.VL.ETree:
+ derived_from: tosca.nodes.nfv.VL
+
+ tosca.nodes.nfv.FP:
+ derived_from: tosca.nodes.Root
+ properties:
+ policy:
+ type: string
+ required: false
+ description: name of the vendor who generate this VL
+ requirements:
+ - forwarder:
+ capability: tosca.capabilities.nfv.Forwarder
+ relationship: tosca.relationships.nfv.ForwardsTo
+
+##########################################################################
+# Relationship Type.
+# A Relationship Type is a reusable entity that defines the type of one
+# or more relationships between Node Types or Node Templates.
+##########################################################################
+
+relationship_types:
+ tosca.relationships.nfv.VirtualLinksTo:
+ derived_from: tosca.relationships.network.LinksTo
+ valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ]
+
+ tosca.relationships.nfv.VirtualBindsTo:
+ derived_from: tosca.relationships.network.BindsTo
+ valid_target_types: [ tosca.capabilities.nfv.VirtualBindable ]
+
+ tosca.relationships.nfv.HA:
+ derived_from: tosca.relationships.Root
+ valid_target_types: [ tosca.capabilities.nfv.HA ]
+
+ tosca.relationships.nfv.Monitor:
+ derived_from: tosca.relationships.ConnectsTo
+ valid_target_types: [ tosca.capabilities.nfv.Metric ]
+
+ tosca.relationships.nfv.ForwardsTo:
+ derived_from: tosca.relationships.root
+ valid_target_types: [ tosca.capabilities.nfv.Forwarder]
+
+##########################################################################
+# CapabilityAssignment Type.
+# A CapabilityAssignment Type is a reusable entity that describes a kind of
+# capability that a Node Type can declare to expose.
+##########################################################################
+
+capability_types:
+ tosca.capabilities.nfv.VirtualLinkable:
+ derived_from: tosca.capabilities.network.Linkable
+
+ tosca.capabilities.nfv.VirtualBindable:
+ derived_from: tosca.capabilities.network.Bindable
+
+ tosca.capabilities.nfv.HA:
+ derived_from: tosca.capabilities.Root
+ valid_source_types: [ tosca.nodes.nfv.VDU ]
+
+ tosca.capabilities.nfv.HA.ActiveActive:
+ derived_from: tosca.capabilities.nfv.HA
+
+ tosca.capabilities.nfv.HA.ActivePassive:
+ derived_from: tosca.capabilities.nfv.HA
+
+ tosca.capabilities.nfv.Metric:
+ derived_from: tosca.capabilities.Root
+
+ tosca.capabilities.nfv.Forwarder:
+ derived_from: tosca.capabilities.Root
+
+##########################################################################
+ # Interfaces Type.
+ # The Interfaces element describes a list of one or more interface
+ # definitions for a modelable entity (e.g., a Node or Relationship Type)
+ # as defined within the TOSCA Simple Profile specification.
+##########################################################################
+
+##########################################################################
+ # Data Type.
+ # A Datatype is a complex data type declaration which contains other
+ # complex or simple data types.
+##########################################################################
+
+##########################################################################
+ # Artifact Type.
+ # An Artifact Type is a reusable entity that defines the type of one or more
+ # files which Node Types or Node Templates can have dependent relationships
+ # and used during operations such as during installation or deployment.
+##########################################################################
+
+##########################################################################
+ # Policy Type.
+ # TOSCA Policy Types represent logical grouping of TOSCA nodes that have
+ # an implied relationship and need to be orchestrated or managed together
+ # to achieve some result.
+##########################################################################
+
+##########################################################################
+ # Group Type
+ #
+##########################################################################
+group_types:
+ tosca.groups.nfv.VNFFG:
+ derived_from: tosca.groups.Root
+
+ properties:
+ vendor:
+ type: string
+ required: true
+ description: name of the vendor who generate this VNFFG
+
+ version:
+ type: string
+ required: true
+ description: version of this VNFFG
+
+ number_of_endpoints:
+ type: integer
+ required: true
+ description: count of the external endpoints included in this VNFFG
+
+ dependent_virtual_link:
+ type: list
+ entry_schema:
+ type: string
+ required: true
+ description: Reference to a VLD used in this Forwarding Graph
+
+ connection_point:
+ type: list
+ entry_schema:
+ type: string
+ required: true
+ description: Reference to Connection Points forming the VNFFG
+
+ constituent_vnfs:
+ type: list
+ entry_schema:
+ type: string
+ required: true
+ description: Reference to a list of VNFD used in this VNF Forwarding Graph
diff --git a/jtosca/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml b/jtosca/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml
new file mode 100644
index 0000000..8b08837
--- /dev/null
+++ b/jtosca/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml
@@ -0,0 +1,240 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+##########################################################################
+# The content of this file reflects TOSCA NFV Profile in YAML version
+# 1.0.0. It describes the definition for TOSCA NFV types including Node Type,
+# Relationship Type, CapabilityAssignment Type and Interfaces.
+##########################################################################
+tosca_definitions_version: tosca_simple_profile_for_nfv_1_0_0
+
+##########################################################################
+# Node Type.
+# A Node Type is a reusable entity that defines the type of one or more
+# Node Templates.
+##########################################################################
+node_types:
+ tosca.nodes.nfv.VNF:
+ derived_from: tosca.nodes.Root # Or should this be its own top - level type?
+ properties:
+ id:
+ type: string
+ description: ID of this VNF
+ vendor:
+ type: string
+ description: name of the vendor who generate this VNF
+ version:
+ type: version
+ description: version of the software for this VNF
+ requirements:
+ - virtualLink:
+ capability: tosca.capabilities.nfv.VirtualLinkable
+ relationship: tosca.relationships.nfv.VirtualLinksTo
+ node: tosca.nodes.nfv.VL
+
+ tosca.nodes.nfv.VDU:
+ derived_from: tosca.nodes.Compute
+ capabilities:
+ high_availability:
+ type: tosca.capabilities.nfv.HA
+ virtualbinding:
+ type: tosca.capabilities.nfv.VirtualBindable
+ monitoring_parameter:
+ type: tosca.capabilities.nfv.Metric
+ requirements:
+ - high_availability:
+ capability: tosca.capabilities.nfv.HA
+ relationship: tosca.relationships.nfv.HA
+ node: tosca.nodes.nfv.VDU
+ occurrences: [ 0, 1 ]
+
+ tosca.nodes.nfv.CP:
+ derived_from: tosca.nodes.network.Port
+ properties:
+ type:
+ type: string
+ required: false
+ requirements:
+ - virtualLink:
+ capability: tosca.capabilities.nfv.VirtualLinkable
+ relationship: tosca.relationships.nfv.VirtualLinksTo
+ node: tosca.nodes.nfv.VL
+ - virtualBinding:
+ capability: tosca.capabilities.nfv.VirtualBindable
+ relationship: tosca.relationships.nfv.VirtualBindsTo
+ node: tosca.nodes.nfv.VDU
+ attributes:
+ address:
+ type: string
+
+ tosca.nodes.nfv.VL:
+ derived_from: tosca.nodes.network.Network
+ properties:
+ vendor:
+ type: string
+ required: true
+ description: name of the vendor who generate this VL
+ capabilities:
+ virtual_linkable:
+ type: tosca.capabilities.nfv.VirtualLinkable
+
+ tosca.nodes.nfv.VL.ELine:
+ derived_from: tosca.nodes.nfv.VL
+ capabilities:
+ virtual_linkable:
+ occurrences: 2
+
+ tosca.nodes.nfv.VL.ELAN:
+ derived_from: tosca.nodes.nfv.VL
+
+ tosca.nodes.nfv.VL.ETree:
+ derived_from: tosca.nodes.nfv.VL
+
+ tosca.nodes.nfv.FP:
+ derived_from: tosca.nodes.Root
+ properties:
+ policy:
+ type: string
+ required: false
+ description: name of the vendor who generate this VL
+ requirements:
+ - forwarder:
+ capability: tosca.capabilities.nfv.Forwarder
+ relationship: tosca.relationships.nfv.ForwardsTo
+
+##########################################################################
+# Relationship Type.
+# A Relationship Type is a reusable entity that defines the type of one
+# or more relationships between Node Types or Node Templates.
+##########################################################################
+
+relationship_types:
+ tosca.relationships.nfv.VirtualLinksTo:
+ derived_from: tosca.relationships.network.LinksTo
+ valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ]
+
+ tosca.relationships.nfv.VirtualBindsTo:
+ derived_from: tosca.relationships.network.BindsTo
+ valid_target_types: [ tosca.capabilities.nfv.VirtualBindable ]
+
+ tosca.relationships.nfv.HA:
+ derived_from: tosca.relationships.Root
+ valid_target_types: [ tosca.capabilities.nfv.HA ]
+
+ tosca.relationships.nfv.Monitor:
+ derived_from: tosca.relationships.ConnectsTo
+ valid_target_types: [ tosca.capabilities.nfv.Metric ]
+
+ tosca.relationships.nfv.ForwardsTo:
+ derived_from: tosca.relationships.root
+ valid_target_types: [ tosca.capabilities.nfv.Forwarder]
+
+##########################################################################
+# CapabilityAssignment Type.
+# A CapabilityAssignment Type is a reusable entity that describes a kind of
+# capability that a Node Type can declare to expose.
+##########################################################################
+
+capability_types:
+ tosca.capabilities.nfv.VirtualLinkable:
+ derived_from: tosca.capabilities.network.Linkable
+
+ tosca.capabilities.nfv.VirtualBindable:
+ derived_from: tosca.capabilities.network.Bindable
+
+ tosca.capabilities.nfv.HA:
+ derived_from: tosca.capabilities.Root
+ valid_source_types: [ tosca.nodes.nfv.VDU ]
+
+ tosca.capabilities.nfv.HA.ActiveActive:
+ derived_from: tosca.capabilities.nfv.HA
+
+ tosca.capabilities.nfv.HA.ActivePassive:
+ derived_from: tosca.capabilities.nfv.HA
+
+ tosca.capabilities.nfv.Metric:
+ derived_from: tosca.capabilities.Root
+
+ tosca.capabilities.nfv.Forwarder:
+ derived_from: tosca.capabilities.Root
+
+##########################################################################
+ # Interfaces Type.
+ # The Interfaces element describes a list of one or more interface
+ # definitions for a modelable entity (e.g., a Node or Relationship Type)
+ # as defined within the TOSCA Simple Profile specification.
+##########################################################################
+
+##########################################################################
+ # Data Type.
+ # A Datatype is a complex data type declaration which contains other
+ # complex or simple data types.
+##########################################################################
+
+##########################################################################
+ # Artifact Type.
+ # An Artifact Type is a reusable entity that defines the type of one or more
+ # files which Node Types or Node Templates can have dependent relationships
+ # and used during operations such as during installation or deployment.
+##########################################################################
+
+##########################################################################
+ # Policy Type.
+ # TOSCA Policy Types represent logical grouping of TOSCA nodes that have
+ # an implied relationship and need to be orchestrated or managed together
+ # to achieve some result.
+##########################################################################
+
+##########################################################################
+ # Group Type
+ #
+##########################################################################
+group_types:
+ tosca.groups.nfv.VNFFG:
+ derived_from: tosca.groups.Root
+
+ properties:
+ vendor:
+ type: string
+ required: true
+ description: name of the vendor who generate this VNFFG
+
+ version:
+ type: string
+ required: true
+ description: version of this VNFFG
+
+ number_of_endpoints:
+ type: integer
+ required: true
+ description: count of the external endpoints included in this VNFFG
+
+ dependent_virtual_link:
+ type: list
+ entry_schema:
+ type: string
+ required: true
+ description: Reference to a VLD used in this Forwarding Graph
+
+ connection_point:
+ type: list
+ entry_schema:
+ type: string
+ required: true
+ description: Reference to Connection Points forming the VNFFG
+
+ constituent_vnfs:
+ type: list
+ entry_schema:
+ type: string
+ required: true
+ description: Reference to a list of VNFD used in this VNF Forwarding Graph
diff --git a/jtosca/src/main/resources/extensions/nfv/nfv.py b/jtosca/src/main/resources/extensions/nfv/nfv.py
new file mode 100644
index 0000000..0c7c2b9
--- /dev/null
+++ b/jtosca/src/main/resources/extensions/nfv/nfv.py
@@ -0,0 +1,19 @@
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# VERSION and DEFS_FILE are required for all extensions
+
+VERSION = 'tosca_simple_profile_for_nfv_1_0_0'
+
+DEFS_FILE = "TOSCA_nfv_definition_1_0.yaml"
+
+SECTIONS = ('metadata')