summaryrefslogtreecommitdiffstats
path: root/jtosca
diff options
context:
space:
mode:
Diffstat (limited to 'jtosca')
-rw-r--r--jtosca/pom.xml43
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Capability.java121
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java450
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java832
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Group.java137
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java728
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java755
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java187
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Property.java177
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/RelationshipTemplate.java199
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java117
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java520
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java857
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaGraph.java109
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java1002
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java183
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/UnsupportedType.java78
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/common/ExceptionCollector.java122
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaException.java27
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/common/TOSCAException.java39
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ArtifactTypeDef.java105
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/AttributeDef.java40
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java222
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/DataType.java116
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java418
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java215
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java228
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java35
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java523
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java290
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java160
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java231
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java103
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java262
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitFrequency.java14
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitSize.java19
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitTime.java17
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java220
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java151
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java237
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Equal.java61
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java113
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java102
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java171
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java79
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java106
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java104
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java90
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java90
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java96
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java278
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java84
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java210
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java77
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java191
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java535
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java110
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java225
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java636
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java112
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java226
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java109
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java782
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/CopyUtils.java29
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/DumpUtils.java55
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java32
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java182
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java24
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java123
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java409
-rw-r--r--jtosca/src/main/resources/TOSCA_definition_1_0.yaml967
-rw-r--r--jtosca/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml240
-rw-r--r--jtosca/src/main/resources/extensions/nfv/nfv.py19
-rw-r--r--jtosca/src/test/java/org.openecomp.sdc.toscaparser/JToscaMetadataParse.java26
-rw-r--r--jtosca/src/test/resources/csars/csar_hello_world.csarbin936 -> 0 bytes
-rw-r--r--jtosca/src/test/resources/csars/service-ServiceFdnt-csar.csarbin40171 -> 0 bytes
76 files changed, 0 insertions, 16982 deletions
diff --git a/jtosca/pom.xml b/jtosca/pom.xml
deleted file mode 100644
index 8886012..0000000
--- a/jtosca/pom.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <parent>
- <groupId>org.openecomp.sdc.sdc-distribution-client</groupId>
- <artifactId>sdc-main-distribution-client</artifactId>
- <version>1.1.14-SNAPSHOT</version>
- </parent>
-
- <artifactId>jtosca</artifactId>
- <version>0.1.7-SNAPSHOT</version>
-
- <dependencies>
-
- <!-- YAML parser -->
- <dependency>
- <groupId>org.yaml</groupId>
- <artifactId>snakeyaml</artifactId>
- <version>${snakeyaml.version}</version>
- <scope>compile</scope>
- </dependency>
-
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- <version>1.7.25</version>
- </dependency>
-
- <!-- <dependency>
- <groupId>ch.qos.logback</groupId>
- <artifactId>logback-classic</artifactId>
- <version>1.1.2</version>
- <scope>test</scope>
- </dependency> -->
-
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>4.12</version>
- </dependency>
- </dependencies>
-</project> \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Capability.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Capability.java
deleted file mode 100644
index 09571db..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Capability.java
+++ /dev/null
@@ -1,121 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import org.openecomp.sdc.toscaparser.api.elements.CapabilityTypeDef;
-import org.openecomp.sdc.toscaparser.api.elements.PropertyDef;
-
-public class Capability {
-
- private String name;
- private LinkedHashMap<String,Object> _properties;
- private CapabilityTypeDef _definition;
-
- public Capability(String cname,
- LinkedHashMap<String,Object> cproperties,
- CapabilityTypeDef cdefinition) {
- name = cname;
- _properties = cproperties;
- _definition = cdefinition;
- }
-
- public ArrayList<Property> getPropertiesObjects() {
- // Return a list of property objects
- ArrayList<Property> properties = new ArrayList<Property>();
- LinkedHashMap<String,Object> props = _properties;
- if(props != null) {
- for(Map.Entry<String,Object> me: props.entrySet()) {
- String pname = me.getKey();
- Object pvalue = me.getValue();
-
- LinkedHashMap<String,PropertyDef> propsDef = _definition.getPropertiesDef();
- if(propsDef != null) {
- PropertyDef pd = (PropertyDef)propsDef.get(pname);
- if(pd != null) {
- properties.add(new Property(pname,pvalue,pd.getSchema(),null));
- }
- }
- }
- }
- return properties;
- }
-
- public LinkedHashMap<String,Property> getProperties() {
- // Return a dictionary of property name-object pairs
- LinkedHashMap<String,Property> npps = new LinkedHashMap<>();
- for(Property p: getPropertiesObjects()) {
- npps.put(p.getName(),p);
- }
- return npps;
- }
-
- public Object getPropertyValue(String pname) {
- // Return the value of a given property name
- LinkedHashMap<String,Property> props = getProperties();
- if(props != null && props.get(pname) != null) {
- return props.get(name).getValue();
- }
- return null;
- }
-
- public String getName() {
- return name;
- }
-
- public CapabilityTypeDef getDefinition() {
- return _definition;
- }
-
- // setter
- public void setProperty(String pname,Object pvalue) {
- _properties.put(pname,pvalue);
- }
-
- @Override
- public String toString() {
- return "Capability{" +
- "name='" + name + '\'' +
- ", _properties=" + _properties +
- ", _definition=" + _definition +
- '}';
- }
-}
-
-/*python
-
-from toscaparser.properties import Property
-
-
-class Capability(object):
- '''TOSCA built-in capabilities type.'''
-
- def __init__(self, name, properties, definition):
- self.name = name
- self._properties = properties
- self.definition = definition
-
- def get_properties_objects(self):
- '''Return a list of property objects.'''
- properties = []
- props = self._properties
- if props:
- for name, value in props.items():
- props_def = self.definition.get_properties_def()
- if props_def and name in props_def:
- properties.append(Property(name, value,
- props_def[name].schema))
- return properties
-
- def get_properties(self):
- '''Return a dictionary of property name-object pairs.'''
- return {prop.name: prop
- for prop in self.get_properties_objects()}
-
- def get_property_value(self, name):
- '''Return the value of a given property name.'''
- props = self.get_properties()
- if props and name in props:
- return props[name].value
-*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java
deleted file mode 100644
index 350068b..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/DataEntity.java
+++ /dev/null
@@ -1,450 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.LinkedHashMap;
-import java.util.List;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.elements.*;
-import org.openecomp.sdc.toscaparser.api.elements.constraints.Constraint;
-import org.openecomp.sdc.toscaparser.api.elements.constraints.Schema;
-import org.openecomp.sdc.toscaparser.api.functions.Function;
-import org.openecomp.sdc.toscaparser.api.utils.TOSCAVersionProperty;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils;
-
-public class DataEntity {
- // A complex data value entity
-
- private LinkedHashMap<String,Object> customDef;
- private DataType dataType;
- private LinkedHashMap<String,PropertyDef> schema;
- private Object value;
- private String propertyName;
-
- public DataEntity(String _dataTypeName,Object _valueDict,
- LinkedHashMap<String,Object> _customDef,String _propName) {
-
- customDef = _customDef;
- dataType = new DataType(_dataTypeName,_customDef);
- schema = dataType.getAllProperties();
- value = _valueDict;
- propertyName = _propName;
- }
-
- @SuppressWarnings("unchecked")
- public Object validate() {
- // Validate the value by the definition of the datatype
-
- // A datatype can not have both 'type' and 'properties' definitions.
- // If the datatype has 'type' definition
- if(dataType.getValueType() != null) {
- value = DataEntity.validateDatatype(dataType.getValueType(),value,null,customDef,null);
- Schema schemaCls = new Schema(propertyName,dataType.getDefs());
- for(Constraint constraint: schemaCls.getConstraints()) {
- constraint.validate(value);
- }
- }
- // If the datatype has 'properties' definition
- else {
- if(!(value instanceof LinkedHashMap)) {
- //ERROR under investigation
- ThreadLocalsHolder.getCollector().appendWarning(String.format(
- "TypeMismatchError: \"%s\" is not a map. The type is \"%s\"",
- value.toString(),dataType.getType()));
-
- if (value instanceof List && ((List) value).size() > 0) {
- value = ((List) value).get(0);
- }
-
- if (!(value instanceof LinkedHashMap)) {
- return value;
- }
- }
-
-
-
- LinkedHashMap<String,Object> valueDict = (LinkedHashMap<String,Object>)value;
- ArrayList<String> allowedProps = new ArrayList<>();
- ArrayList<String> requiredProps = new ArrayList<>();
- LinkedHashMap<String,Object> defaultProps = new LinkedHashMap<>();
- if(schema != null) {
- allowedProps.addAll(schema.keySet());
- for(String name: schema.keySet()) {
- PropertyDef propDef = schema.get(name);
- if(propDef.isRequired()) {
- requiredProps.add(name);
- }
- if(propDef.getDefault() != null) {
- defaultProps.put(name,propDef.getDefault());
- }
- }
- }
-
- // check allowed field
- for(String valueKey: valueDict.keySet()) {
- //1710 devlop JSON validation
- if(!("json").equals(dataType.getType()) && !allowedProps.contains(valueKey)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Data value of type \"%s\" contains unknown field \"%s\"",
- dataType.getType(),valueKey));
- }
- }
-
- // check default field
- for(String defKey: defaultProps.keySet()) {
- Object defValue = defaultProps.get(defKey);
- if(valueDict.get(defKey) == null) {
- valueDict.put(defKey, defValue);
- }
-
- }
-
- // check missing field
- ArrayList<String> missingProp = new ArrayList<>();
- for(String reqKey: requiredProps) {
- if(!valueDict.keySet().contains(reqKey)) {
- missingProp.add(reqKey);
- }
- }
- if(missingProp.size() > 0) {
- ThreadLocalsHolder.getCollector().appendWarning(String.format(
- "MissingRequiredFieldError: Data value of type \"%s\" is missing required field(s) \"%s\"",
- dataType.getType(),missingProp.toString()));
- }
-
- // check every field
- for(String vname: valueDict.keySet()) {
- Object vvalue = valueDict.get(vname);
- LinkedHashMap<String,Object> schemaName = _findSchema(vname);
- if(schemaName == null) {
- continue;
- }
- Schema propSchema = new Schema(vname,schemaName);
- // check if field value meets type defined
- DataEntity.validateDatatype(propSchema.getType(),
- vvalue,
- propSchema.getEntrySchema(),
- customDef,
- null);
-
- // check if field value meets constraints defined
- if(propSchema.getConstraints() != null) {
- for(Constraint constraint: propSchema.getConstraints()) {
- if(vvalue instanceof ArrayList) {
- for(Object val: (ArrayList<Object>)vvalue) {
- constraint.validate(val);
- }
- }
- else {
- constraint.validate(vvalue);
- }
- }
- }
- }
- }
- return value;
- }
-
- private LinkedHashMap<String,Object> _findSchema(String name) {
- if(schema != null && schema.get(name) != null) {
- return schema.get(name).getSchema();
- }
- return null;
- }
-
- public static Object validateDatatype(String type,
- Object value,
- LinkedHashMap<String,Object> entrySchema,
- LinkedHashMap<String,Object> customDef,
- String propName) {
- // Validate value with given type
-
- // If type is list or map, validate its entry by entry_schema(if defined)
- // If type is a user-defined complex datatype, custom_def is required.
-
- if(Function.isFunction(value)) {
- return value;
- }
- else if (type == null) {
- //NOT ANALYZED
- ThreadLocalsHolder.getCollector().appendWarning(String.format(
- "MissingType: Type is missing for value \"%s\"",
- value.toString()));
- return value;
- }
- else if(type.equals(Schema.STRING)) {
- return ValidateUtils.validateString(value);
- }
- else if(type.equals(Schema.INTEGER)) {
- return ValidateUtils.validateInteger(value);
- }
- else if(type.equals(Schema.FLOAT)) {
- return ValidateUtils.validateFloat(value);
- }
- else if(type.equals(Schema.NUMBER)) {
- return ValidateUtils.validateNumeric(value);
- }
- else if(type.equals(Schema.BOOLEAN)) {
- return ValidateUtils.validateBoolean(value);
- }
- else if(type.equals(Schema.RANGE)) {
- return ValidateUtils.validateRange(value);
- }
- else if(type.equals(Schema.TIMESTAMP)) {
- ValidateUtils.validateTimestamp(value);
- return value;
- }
- else if(type.equals(Schema.LIST)) {
- ValidateUtils.validateList(value);
- if(entrySchema != null) {
- DataEntity.validateEntry(value,entrySchema,customDef);
- }
- return value;
- }
- else if(type.equals(Schema.SCALAR_UNIT_SIZE)) {
- return (new ScalarUnitSize(value)).validateScalarUnit();
- }
- else if(type.equals(Schema.SCALAR_UNIT_FREQUENCY)) {
- return (new ScalarUnitFrequency(value)).validateScalarUnit();
- }
- else if(type.equals(Schema.SCALAR_UNIT_TIME)) {
- return (new ScalarUnitTime(value)).validateScalarUnit();
- }
- else if(type.equals(Schema.VERSION)) {
- return (new TOSCAVersionProperty(value)).getVersion();
- }
- else if(type.equals(Schema.MAP)) {
- ValidateUtils.validateMap(value);
- if(entrySchema != null) {
- DataEntity.validateEntry(value,entrySchema,customDef);
- }
- return value;
- }
- else if(type.equals(Schema.PORTSPEC)) {
- // tODO(TBD) bug 1567063, validate source & target as PortDef type
- // as complex types not just as integers
- PortSpec.validateAdditionalReq(value,propName,customDef);
- }
- else {
- DataEntity data = new DataEntity(type,value,customDef,null);
- return data.validate();
- }
-
- return value;
- }
-
- @SuppressWarnings("unchecked")
- public static Object validateEntry(Object value,
- LinkedHashMap<String,Object> entrySchema,
- LinkedHashMap<String,Object> customDef) {
-
- // Validate entries for map and list
- Schema schema = new Schema(null,entrySchema);
- Object valueob = value;
- ArrayList<Object> valueList = null;
- if(valueob instanceof LinkedHashMap) {
- valueList = new ArrayList<Object>(((LinkedHashMap<String,Object>)valueob).values());
- }
- else if(valueob instanceof ArrayList) {
- valueList = (ArrayList<Object>)valueob;
- }
- if(valueList != null) {
- for(Object v: valueList) {
- DataEntity.validateDatatype(schema.getType(),v,schema.getEntrySchema(),customDef,null);
- if(schema.getConstraints() != null) {
- for(Constraint constraint: schema.getConstraints()) {
- constraint.validate(v);
- }
- }
- }
- }
- return value;
- }
-
- @Override
- public String toString() {
- return "DataEntity{" +
- "customDef=" + customDef +
- ", dataType=" + dataType +
- ", schema=" + schema +
- ", value=" + value +
- ", propertyName='" + propertyName + '\'' +
- '}';
- }
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import MissingRequiredFieldError
-from toscaparser.common.exception import TypeMismatchError
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.elements.constraints import Schema
-from toscaparser.elements.datatype import DataType
-from toscaparser.elements.portspectype import PortSpec
-from toscaparser.elements.scalarunit import ScalarUnit_Frequency
-from toscaparser.elements.scalarunit import ScalarUnit_Size
-from toscaparser.elements.scalarunit import ScalarUnit_Time
-from toscaparser.utils.gettextutils import _
-from toscaparser.utils import validateutils
-
-
-class DataEntity(object):
- '''A complex data value entity.'''
-
- def __init__(self, datatypename, value_dict, custom_def=None,
- prop_name=None):
- self.custom_def = custom_def
- self.datatype = DataType(datatypename, custom_def)
- self.schema = self.datatype.get_all_properties()
- self.value = value_dict
- self.property_name = prop_name
-
- def validate(self):
- '''Validate the value by the definition of the datatype.'''
-
- # A datatype can not have both 'type' and 'properties' definitions.
- # If the datatype has 'type' definition
- if self.datatype.value_type:
- self.value = DataEntity.validate_datatype(self.datatype.value_type,
- self.value,
- None,
- self.custom_def)
- schema = Schema(self.property_name, self.datatype.defs)
- for constraint in schema.constraints:
- constraint.validate(self.value)
- # If the datatype has 'properties' definition
- else:
- if not isinstance(self.value, dict):
- ExceptionCollector.appendException(
- TypeMismatchError(what=self.value,
- type=self.datatype.type))
- allowed_props = []
- required_props = []
- default_props = {}
- if self.schema:
- allowed_props = self.schema.keys()
- for name, prop_def in self.schema.items():
- if prop_def.required:
- required_props.append(name)
- if prop_def.default:
- default_props[name] = prop_def.default
-
- # check allowed field
- for value_key in list(self.value.keys()):
- if value_key not in allowed_props:
- ExceptionCollector.appendException(
- UnknownFieldError(what=(_('Data value of type "%s"')
- % self.datatype.type),
- field=value_key))
-
- # check default field
- for def_key, def_value in list(default_props.items()):
- if def_key not in list(self.value.keys()):
- self.value[def_key] = def_value
-
- # check missing field
- missingprop = []
- for req_key in required_props:
- if req_key not in list(self.value.keys()):
- missingprop.append(req_key)
- if missingprop:
- ExceptionCollector.appendException(
- MissingRequiredFieldError(
- what=(_('Data value of type "%s"')
- % self.datatype.type), required=missingprop))
-
- # check every field
- for name, value in list(self.value.items()):
- schema_name = self._find_schema(name)
- if not schema_name:
- continue
- prop_schema = Schema(name, schema_name)
- # check if field value meets type defined
- DataEntity.validate_datatype(prop_schema.type, value,
- prop_schema.entry_schema,
- self.custom_def)
- # check if field value meets constraints defined
- if prop_schema.constraints:
- for constraint in prop_schema.constraints:
- if isinstance(value, list):
- for val in value:
- constraint.validate(val)
- else:
- constraint.validate(value)
-
- return self.value
-
- def _find_schema(self, name):
- if self.schema and name in self.schema.keys():
- return self.schema[name].schema
-
- @staticmethod
- def validate_datatype(type, value, entry_schema=None, custom_def=None,
- prop_name=None):
- '''Validate value with given type.
-
- If type is list or map, validate its entry by entry_schema(if defined)
- If type is a user-defined complex datatype, custom_def is required.
- '''
- from toscaparser.functions import is_function
- if is_function(value):
- return value
- if type == Schema.STRING:
- return validateutils.validate_string(value)
- elif type == Schema.INTEGER:
- return validateutils.validate_integer(value)
- elif type == Schema.FLOAT:
- return validateutils.validate_float(value)
- elif type == Schema.NUMBER:
- return validateutils.validate_numeric(value)
- elif type == Schema.BOOLEAN:
- return validateutils.validate_boolean(value)
- elif type == Schema.RANGE:
- return validateutils.validate_range(value)
- elif type == Schema.TIMESTAMP:
- validateutils.validate_timestamp(value)
- return value
- elif type == Schema.LIST:
- validateutils.validate_list(value)
- if entry_schema:
- DataEntity.validate_entry(value, entry_schema, custom_def)
- return value
- elif type == Schema.SCALAR_UNIT_SIZE:
- return ScalarUnit_Size(value).validate_scalar_unit()
- elif type == Schema.SCALAR_UNIT_FREQUENCY:
- return ScalarUnit_Frequency(value).validate_scalar_unit()
- elif type == Schema.SCALAR_UNIT_TIME:
- return ScalarUnit_Time(value).validate_scalar_unit()
- elif type == Schema.VERSION:
- return validateutils.TOSCAVersionProperty(value).get_version()
- elif type == Schema.MAP:
- validateutils.validate_map(value)
- if entry_schema:
- DataEntity.validate_entry(value, entry_schema, custom_def)
- return value
- elif type == Schema.PORTSPEC:
- # tODO(TBD) bug 1567063, validate source & target as PortDef type
- # as complex types not just as integers
- PortSpec.validate_additional_req(value, prop_name, custom_def)
- else:
- data = DataEntity(type, value, custom_def)
- return data.validate()
-
- @staticmethod
- def validate_entry(value, entry_schema, custom_def=None):
- '''Validate entries for map and list.'''
- schema = Schema(None, entry_schema)
- valuelist = value
- if isinstance(value, dict):
- valuelist = list(value.values())
- for v in valuelist:
- DataEntity.validate_datatype(schema.type, v, schema.entry_schema,
- custom_def)
- if schema.constraints:
- for constraint in schema.constraints:
- constraint.validate(v)
- return value
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java
deleted file mode 100644
index e896905..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/EntityTemplate.java
+++ /dev/null
@@ -1,832 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.elements.*;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public abstract class EntityTemplate {
- // Base class for TOSCA templates
-
- protected static final String DERIVED_FROM = "derived_from";
- protected static final String PROPERTIES = "properties";
- protected static final String REQUIREMENTS = "requirements";
- protected static final String INTERFACES = "interfaces";
- protected static final String CAPABILITIES = "capabilities";
- protected static final String TYPE = "type";
- protected static final String DESCRIPTION = "description";
- protected static final String DIRECTIVES = "directives";
- protected static final String ATTRIBUTES = "attributes";
- protected static final String ARTIFACTS = "artifacts";
- protected static final String NODE_FILTER = "node_filter";
- protected static final String COPY = "copy";
-
- protected static final String SECTIONS[] = {
- DERIVED_FROM, PROPERTIES, REQUIREMENTS,INTERFACES,
- CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES,
- ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY};
-
- private static final String NODE = "node";
- private static final String CAPABILITY = "capability";
- private static final String RELATIONSHIP = "relationship";
- private static final String OCCURRENCES = "occurrences";
-
- protected static final String REQUIREMENTS_SECTION[] = {
- NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER};
-
- //# Special key names
- private static final String METADATA = "metadata";
- protected static final String SPECIAL_SECTIONS[] = {METADATA};
-
- protected String name;
- protected LinkedHashMap<String,Object> entityTpl;
- protected LinkedHashMap<String,Object> customDef;
- protected StatefulEntityType typeDefinition;
- private ArrayList<Property> _properties;
- private ArrayList<InterfacesDef> _interfaces;
- private ArrayList<Object> _requirements;
- private ArrayList<Capability> _capabilities;
-
- // dummy constructor for subclasses that don't want super
- public EntityTemplate() {
- return;
- }
-
- @SuppressWarnings("unchecked")
- public EntityTemplate(String _name,
- LinkedHashMap<String,Object> _template,
- String _entityName,
- LinkedHashMap<String,Object> _customDef) {
- name = _name;
- entityTpl = _template;
- customDef = _customDef;
- _validateField(entityTpl);
- String type = (String)entityTpl.get("type");
- UnsupportedType.validateType(type);
- if(_entityName.equals("node_type")) {
- if(type != null) {
- typeDefinition = new NodeType(type, customDef);
- }
- else {
- typeDefinition = null;
- }
- }
- if(_entityName.equals("relationship_type")) {
- Object relationship = _template.get("relationship");
- type = null;
- if(relationship != null && relationship instanceof LinkedHashMap) {
- type = (String)((LinkedHashMap<String,Object>)relationship).get("type");
- }
- else if(relationship instanceof String) {
- type = (String)entityTpl.get("relationship");
- }
- else {
- type = (String)entityTpl.get("type");
- }
- UnsupportedType.validateType(type);
- typeDefinition = new RelationshipType(type,null, customDef);
- }
- if(_entityName.equals("policy_type")) {
- if(type == null) {
- //msg = (_('Policy definition of "%(pname)s" must have'
- // ' a "type" ''attribute.') % dict(pname=name))
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValidationError: Policy definition of \"%s\" must have a \"type\" attribute",name));
- }
- typeDefinition = new PolicyType(type, customDef);
- }
- if(_entityName.equals("group_type")) {
- if(type != null) {
- typeDefinition = new GroupType(type, customDef);
- }
- else {
- typeDefinition = null;
- }
- }
- _properties = null;
- _interfaces = null;
- _requirements = null;
- _capabilities = null;
- }
-
- public String getType() {
- if(typeDefinition != null) {
- String clType = typeDefinition.getClass().getSimpleName();
- if(clType.equals("NodeType")) {
- return (String)((NodeType)typeDefinition).getType();
- }
- else if(clType.equals("PolicyType")) {
- return (String)((PolicyType)typeDefinition).getType();
- }
- else if(clType.equals("GroupType")) {
- return (String)((GroupType)typeDefinition).getType();
- }
- else if(clType.equals("RelationshipType")) {
- return (String)((RelationshipType)typeDefinition).getType();
- }
- }
- return null;
- }
-
- public Object getParentType() {
- if(typeDefinition != null) {
- String clType = typeDefinition.getClass().getSimpleName();
- if(clType.equals("NodeType")) {
- return ((NodeType)typeDefinition).getParentType();
- }
- else if(clType.equals("PolicyType")) {
- return ((PolicyType)typeDefinition).getParentType();
- }
- else if(clType.equals("GroupType")) {
- return ((GroupType)typeDefinition).getParentType();
- }
- else if(clType.equals("RelationshipType")) {
- return ((RelationshipType)typeDefinition).getParentType();
- }
- }
- return null;
- }
-
- @SuppressWarnings("unchecked")
- public ArrayList<Object> getRequirements() {
- if(_requirements == null) {
- _requirements = new ArrayList<Object>();
- Object ob = ((EntityType)typeDefinition).getValue(REQUIREMENTS,entityTpl,false);
- if(ob != null) {
- _requirements.addAll((ArrayList<Object>)ob);
- }
-
- }
- return _requirements;
- }
-
- public ArrayList<Property> getPropertiesObjects() {
- // Return properties objects for this template
- if(_properties ==null) {
- _properties = _createProperties();
- }
- return _properties;
- }
-
- public LinkedHashMap<String,Property> getProperties() {
- LinkedHashMap<String,Property> props = new LinkedHashMap<>();
- for(Property po: getPropertiesObjects()) {
- props.put(((Property)po).getName(),po);
- }
- return props;
- }
-
- public Object getPropertyValue(String name) {
- LinkedHashMap<String,Property> props = getProperties();
- Property p = (Property)props.get(name);
- return p != null ? p.getValue() : null;
- }
-
- public ArrayList<InterfacesDef> getInterfaces() {
- if(_interfaces == null) {
- _interfaces = _createInterfaces();
- }
- return _interfaces;
- }
-
- public ArrayList<Capability> getCapabilitiesObjects() {
- // Return capabilities objects for this template
- if(_capabilities == null) {
- _capabilities = _createCapabilities();
- }
- return _capabilities;
-
- }
-
- public LinkedHashMap<String,Capability> getCapabilities() {
- LinkedHashMap<String,Capability> caps = new LinkedHashMap<String,Capability>();
- for(Capability cap: getCapabilitiesObjects()) {
- caps.put(cap.getName(),cap);
- }
- return caps;
- }
-
- public boolean isDerivedFrom(String typeStr) {
- // Returns true if this object is derived from 'type_str'.
- // False otherwise
-
- if(getType() == null) {
- return false;
- }
- else if(getType().equals(typeStr)) {
- return true;
- }
- else if(getParentType() != null) {
- return ((EntityType)getParentType()).isDerivedFrom(typeStr);
- }
- return false;
- }
-
- @SuppressWarnings("unchecked")
- private ArrayList<Capability> _createCapabilities() {
- ArrayList<Capability> capability = new ArrayList<Capability>();
- LinkedHashMap<String,Object> caps = (LinkedHashMap<String,Object>)
- ((EntityType)typeDefinition).getValue(CAPABILITIES,entityTpl,true);
- if(caps != null) {
- //?!? getCapabilities defined only for NodeType...
- LinkedHashMap<String,CapabilityTypeDef> capabilities = ((NodeType)typeDefinition).getCapabilities();
- for(Map.Entry<String,Object> me: caps.entrySet()) {
- String name = me. getKey();
- LinkedHashMap<String,Object> props = (LinkedHashMap<String,Object>)me.getValue();
- if(capabilities.get(name) != null) {
- CapabilityTypeDef c = capabilities.get(name); // a CapabilityTypeDef
- LinkedHashMap<String,Object> properties = new LinkedHashMap<String,Object>();
- // first use the definition default value
- LinkedHashMap<String,Object> cprops = c.getProperties();
- if(cprops != null) {
- for(Map.Entry<String,Object> cpe: cprops.entrySet()) {
- String propertyName = cpe.getKey();
- LinkedHashMap<String,Object> propertyDef = (LinkedHashMap<String,Object>)cpe.getValue();
- Object dob = propertyDef.get("default");
- if(dob != null) {
- properties.put(propertyName, dob);
-
- }
- }
- }
- // then update (if available) with the node properties
- LinkedHashMap<String,Object> pp = (LinkedHashMap<String,Object>)props.get("properties");
- if(pp != null) {
- properties.putAll(pp);
- }
- Capability cap = new Capability(name, properties, c);
- capability.add(cap);
- }
- }
- }
- return capability;
- }
-
- protected void _validateProperties(LinkedHashMap<String,Object> template,StatefulEntityType entityType) {
- @SuppressWarnings("unchecked")
- LinkedHashMap<String,Object> properties = (LinkedHashMap<String,Object>)entityType.getValue(PROPERTIES,template,false);
- _commonValidateProperties(entityType,properties);
- }
-
- protected void _validateCapabilities() {
- //BUG??? getCapabilities only defined in NodeType...
- LinkedHashMap<String,CapabilityTypeDef> typeCapabilities = ((NodeType)typeDefinition).getCapabilities();
- ArrayList<String> allowedCaps = new ArrayList<String>();
- if(typeCapabilities != null) {
- allowedCaps.addAll(typeCapabilities.keySet());
- }
- @SuppressWarnings("unchecked")
- LinkedHashMap<String,Object> capabilities = (LinkedHashMap<String,Object>)
- ((EntityType)typeDefinition).getValue(CAPABILITIES, entityTpl, false);
- if(capabilities != null) {
- _commonValidateField(capabilities, allowedCaps, "capabilities");
- _validateCapabilitiesProperties(capabilities);
- }
- }
-
- @SuppressWarnings("unchecked")
- private void _validateCapabilitiesProperties(LinkedHashMap<String,Object> capabilities) {
- for(Map.Entry<String,Object> me: capabilities.entrySet()) {
- String cap = me.getKey();
- LinkedHashMap<String,Object> props = (LinkedHashMap<String,Object>)me.getValue();
- Capability capability = getCapability(cap);
- if(capability == null) {
- continue;
- }
- CapabilityTypeDef capabilitydef = capability.getDefinition();
- _commonValidateProperties(capabilitydef,(LinkedHashMap<String,Object>)props.get(PROPERTIES));
-
- // validating capability properties values
- for(Property prop: getCapability(cap).getPropertiesObjects()) {
- prop.validate();
-
- if(cap.equals("scalable") && prop.getName().equals("default_instances")) {
- LinkedHashMap<String,Object> propDict = (LinkedHashMap<String,Object>)props.get(PROPERTIES);
- int minInstances = (int)propDict.get("min_instances");
- int maxInstances = (int)propDict.get("max_instances");
- int defaultInstances = (int)propDict.get("default_instances");
- if(defaultInstances < minInstances || defaultInstances > maxInstances) {
- //err_msg = ('"properties" of template "%s": '
- // '"default_instances" value is not between '
- // '"min_instances" and "max_instances".' %
- // self.name)
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValidationError: \"properties\" of template \"%s\": \"default_instances\" value is not between \"min_instances\" and \"max_instances\"",
- name));
- }
- }
- }
- }
- }
-
- private void _commonValidateProperties(StatefulEntityType entityType,LinkedHashMap<String,Object> properties) {
- ArrayList<String> allowedProps = new ArrayList<String>();
- ArrayList<String> requiredProps = new ArrayList<String>();
- for(PropertyDef p: entityType.getPropertiesDefObjects()) {
- allowedProps.add(p.getName());
- // If property is 'required' and has no 'default' value then record
- if(p.isRequired() && p.getDefault() == null) {
- requiredProps.add(p.getName());
- }
- }
- // validate all required properties have values
- if(properties != null) {
- ArrayList<String> reqPropsNoValueOrDefault = new ArrayList<String>();
- _commonValidateField(properties, allowedProps, "properties");
- // make sure it's not missing any property required by a tosca type
- for(String r: requiredProps) {
- if(properties.get(r) == null) {
- reqPropsNoValueOrDefault.add(r);
- }
- }
- // Required properties found without value or a default value
- if(!reqPropsNoValueOrDefault.isEmpty()) {
- ThreadLocalsHolder.getCollector().appendWarning(String.format(
- "MissingRequiredFieldError: properties of template \"%s\" are missing field(s): %s",
- name,reqPropsNoValueOrDefault.toString()));
- }
- }
- else {
- // Required properties in schema, but not in template
- if(!requiredProps.isEmpty()) {
- ThreadLocalsHolder.getCollector().appendWarning(String.format(
- "MissingRequiredFieldError2: properties of template \"%s\" are missing field(s): %s",
- name,requiredProps.toString()));
- }
- }
- }
-
- @SuppressWarnings("unchecked")
- private void _validateField(LinkedHashMap<String,Object> template) {
- if(!(template instanceof LinkedHashMap)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE));
- return;//???
- }
- boolean bBad = false;
- Object relationship = ((LinkedHashMap<String,Object>)template).get("relationship");
- if(relationship != null) {
- if(!(relationship instanceof String)) {
- bBad = (((LinkedHashMap<String,Object>)relationship).get(TYPE) == null);
- }
- else if(relationship instanceof String) {
- bBad = (template.get("relationship") == null);
- }
- }
- else {
- bBad = (template.get(TYPE) == null);
- }
- if(bBad) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "MissingRequiredFieldError: Template \"%s\" is missing required field \"%s\"",name,TYPE));
- }
- }
-
- protected void _commonValidateField(LinkedHashMap<String,Object> schema, ArrayList<String> allowedList,String section) {
- for(String sname: schema.keySet()) {
- boolean bFound = false;
- for(String allowed: allowedList) {
- if(sname.equals(allowed)) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Section \"%s\" of template \"%s\" contains unknown field \"%s\"",section,name,sname));
- }
- }
-
- }
-
- @SuppressWarnings("unchecked")
- private ArrayList<Property> _createProperties() {
- ArrayList<Property> props = new ArrayList<Property>();
- LinkedHashMap<String,Object> properties = (LinkedHashMap<String,Object>)
- ((EntityType)typeDefinition).getValue(PROPERTIES,entityTpl,false);
- if(properties == null) {
- properties = new LinkedHashMap<String,Object>();
- }
- for(Map.Entry<String,Object> me: properties.entrySet()) {
- String pname = me.getKey();
- Object pvalue = me.getValue();
- LinkedHashMap<String,PropertyDef> propsDef = ((StatefulEntityType)typeDefinition).getPropertiesDef();
- if(propsDef != null && propsDef.get(pname) != null) {
- PropertyDef pd = (PropertyDef)propsDef.get(pname);
- Property prop = new Property(pname,pvalue,pd.getSchema(),customDef);
- props.add(prop);
- }
- }
- ArrayList<PropertyDef> pds = ((StatefulEntityType)typeDefinition).getPropertiesDefObjects();
- for(Object pdo: pds) {
- PropertyDef pd = (PropertyDef)pdo;
- if(pd.getDefault() != null && properties.get(pd.getName()) == null) {
- Property prop = new Property(pd.getName(),pd.getDefault(),pd.getSchema(),customDef);
- props.add(prop);
- }
- }
- return props;
- }
-
- @SuppressWarnings("unchecked")
- private ArrayList<InterfacesDef> _createInterfaces() {
- ArrayList<InterfacesDef> interfaces = new ArrayList<>();
- LinkedHashMap<String,Object> typeInterfaces = new LinkedHashMap<String,Object>();
- if(typeDefinition instanceof RelationshipType) {
- if(entityTpl instanceof LinkedHashMap) {
- typeInterfaces = (LinkedHashMap<String,Object>)entityTpl.get(INTERFACES);
- if(typeInterfaces == null) {
- for(String relName: entityTpl.keySet()) {
- Object relValue = entityTpl.get(relName);
- if(!relName.equals("type")) {
- Object relDef = relValue;
- LinkedHashMap<String,Object> rel = null;
- if(relDef instanceof LinkedHashMap) {
- Object relob = ((LinkedHashMap<String,Object>)relDef).get("relationship");
- if(relob instanceof LinkedHashMap) {
- rel = (LinkedHashMap<String,Object>)relob;
- }
- }
- if(rel != null) {
- if(rel.get(INTERFACES) != null) {
- typeInterfaces = (LinkedHashMap<String,Object>)rel.get(INTERFACES);
- break;
- }
- }
- }
- }
- }
- }
- }
- else {
- typeInterfaces = (LinkedHashMap<String,Object>)
- ((EntityType)typeDefinition).getValue(INTERFACES,entityTpl,false);
- }
- if(typeInterfaces != null) {
- for(Map.Entry<String,Object> me: typeInterfaces.entrySet()) {
- String interfaceType = me.getKey();
- LinkedHashMap<String,Object> value = (LinkedHashMap<String,Object>)me.getValue();
- for(Map.Entry<String,Object> ve: value.entrySet()) {
- String op = ve.getKey();
- Object opDef = ve.getValue();
- InterfacesDef iface = new InterfacesDef((EntityType)typeDefinition,
- interfaceType,
- this,
- op,
- opDef);
- interfaces.add(iface);
- }
-
- }
- }
- return interfaces;
- }
-
- public Capability getCapability(String name) {
- // Provide named capability
- // :param name: name of capability
- // :return: capability object if found, None otherwise
- LinkedHashMap<String,Capability> caps = getCapabilities();
- if(caps != null) {
- return caps.get(name);
- }
- return null;
- }
-
- // getter
- public String getName() {
- return name;
- }
-
- public StatefulEntityType getTypeDefinition() {
- return typeDefinition;
- }
-
- public LinkedHashMap<String,Object> getCustomDef() {
- return customDef;
- }
-
- @Override
- public String toString() {
- return "EntityTemplate{" +
- "name='" + name + '\'' +
- ", entityTpl=" + entityTpl +
- ", customDef=" + customDef +
- ", typeDefinition=" + typeDefinition +
- ", _properties=" + _properties +
- ", _interfaces=" + _interfaces +
- ", _requirements=" + _requirements +
- ", _capabilities=" + _capabilities +
- '}';
- }
-}
-
-/*python
-
-class EntityTemplate(object):
- '''Base class for TOSCA templates.'''
-
- SECTIONS = (DERIVED_FROM, PROPERTIES, REQUIREMENTS,
- INTERFACES, CAPABILITIES, TYPE, DESCRIPTION, DIRECTIVES,
- ATTRIBUTES, ARTIFACTS, NODE_FILTER, COPY) = \
- ('derived_from', 'properties', 'requirements', 'interfaces',
- 'capabilities', 'type', 'description', 'directives',
- 'attributes', 'artifacts', 'node_filter', 'copy')
- REQUIREMENTS_SECTION = (NODE, CAPABILITY, RELATIONSHIP, OCCURRENCES, NODE_FILTER) = \
- ('node', 'capability', 'relationship',
- 'occurrences', 'node_filter')
- # Special key names
- SPECIAL_SECTIONS = (METADATA) = ('metadata')
-
- def __init__(self, name, template, entity_name, custom_def=None):
- self.name = name
- self.entity_tpl = template
- self.custom_def = custom_def
- self._validate_field(self.entity_tpl)
- type = self.entity_tpl.get('type')
- UnsupportedType.validate_type(type)
- if entity_name == 'node_type':
- self.type_definition = NodeType(type, custom_def) \
- if type is not None else None
- if entity_name == 'relationship_type':
- relationship = template.get('relationship')
- type = None
- if relationship and isinstance(relationship, dict):
- type = relationship.get('type')
- elif isinstance(relationship, str):
- type = self.entity_tpl['relationship']
- else:
- type = self.entity_tpl['type']
- UnsupportedType.validate_type(type)
- self.type_definition = RelationshipType(type,
- None, custom_def)
- if entity_name == 'policy_type':
- if not type:
- msg = (_('Policy definition of "%(pname)s" must have'
- ' a "type" ''attribute.') % dict(pname=name))
- ExceptionCollector.appendException(
- ValidationError(msg))
-
- self.type_definition = PolicyType(type, custom_def)
- if entity_name == 'group_type':
- self.type_definition = GroupType(type, custom_def) \
- if type is not None else None
- self._properties = None
- self._interfaces = None
- self._requirements = None
- self._capabilities = None
-
- @property
- def type(self):
- if self.type_definition:
- return self.type_definition.type
-
- @property
- def parent_type(self):
- if self.type_definition:
- return self.type_definition.parent_type
-
- @property
- def requirements(self):
- if self._requirements is None:
- self._requirements = self.type_definition.get_value(
- self.REQUIREMENTS,
- self.entity_tpl) or []
- return self._requirements
-
- def get_properties_objects(self):
- '''Return properties objects for this template.'''
- if self._properties is None:
- self._properties = self._create_properties()
- return self._properties
-
- def get_properties(self):
- '''Return a dictionary of property name-object pairs.'''
- return {prop.name: prop
- for prop in self.get_properties_objects()}
-
- def get_property_value(self, name):
- '''Return the value of a given property name.'''
- props = self.get_properties()
- if props and name in props.keys():
- return props[name].value
-
- @property
- def interfaces(self):
- if self._interfaces is None:
- self._interfaces = self._create_interfaces()
- return self._interfaces
-
- def get_capabilities_objects(self):
- '''Return capabilities objects for this template.'''
- if not self._capabilities:
- self._capabilities = self._create_capabilities()
- return self._capabilities
-
- def get_capabilities(self):
- '''Return a dictionary of capability name-object pairs.'''
- return {cap.name: cap
- for cap in self.get_capabilities_objects()}
-
- def is_derived_from(self, type_str):
- '''Check if object inherits from the given type.
-
- Returns true if this object is derived from 'type_str'.
- False otherwise.
- '''
- if not self.type:
- return False
- elif self.type == type_str:
- return True
- elif self.parent_type:
- return self.parent_type.is_derived_from(type_str)
- else:
- return False
-
- def _create_capabilities(self):
- capability = []
- caps = self.type_definition.get_value(self.CAPABILITIES,
- self.entity_tpl, True)
- if caps:
- for name, props in caps.items():
- capabilities = self.type_definition.get_capabilities()
- if name in capabilities.keys():
- c = capabilities[name]
- properties = {}
- # first use the definition default value
- if c.properties:
- for property_name in c.properties.keys():
- prop_def = c.properties[property_name]
- if 'default' in prop_def:
- properties[property_name] = prop_def['default']
- # then update (if available) with the node properties
- if 'properties' in props and props['properties']:
- properties.update(props['properties'])
-
- cap = Capability(name, properties, c)
- capability.append(cap)
- return capability
-
- def _validate_properties(self, template, entitytype):
- properties = entitytype.get_value(self.PROPERTIES, template)
- self._common_validate_properties(entitytype, properties)
-
- def _validate_capabilities(self):
- type_capabilities = self.type_definition.get_capabilities()
- allowed_caps = \
- type_capabilities.keys() if type_capabilities else []
- capabilities = self.type_definition.get_value(self.CAPABILITIES,
- self.entity_tpl)
- if capabilities:
- self._common_validate_field(capabilities, allowed_caps,
- 'capabilities')
- self._validate_capabilities_properties(capabilities)
-
- def _validate_capabilities_properties(self, capabilities):
- for cap, props in capabilities.items():
- capability = self.get_capability(cap)
- if not capability:
- continue
- capabilitydef = capability.definition
- self._common_validate_properties(capabilitydef,
- props[self.PROPERTIES])
-
- # validating capability properties values
- for prop in self.get_capability(cap).get_properties_objects():
- prop.validate()
-
- # tODO(srinivas_tadepalli): temporary work around to validate
- # default_instances until standardized in specification
- if cap == "scalable" and prop.name == "default_instances":
- prop_dict = props[self.PROPERTIES]
- min_instances = prop_dict.get("min_instances")
- max_instances = prop_dict.get("max_instances")
- default_instances = prop_dict.get("default_instances")
- if not (min_instances <= default_instances
- <= max_instances):
- err_msg = ('"properties" of template "%s": '
- '"default_instances" value is not between '
- '"min_instances" and "max_instances".' %
- self.name)
- ExceptionCollector.appendException(
- ValidationError(message=err_msg))
-
- def _common_validate_properties(self, entitytype, properties):
- allowed_props = []
- required_props = []
- for p in entitytype.get_properties_def_objects():
- allowed_props.append(p.name)
- # If property is 'required' and has no 'default' value then record
- if p.required and p.default is None:
- required_props.append(p.name)
- # validate all required properties have values
- if properties:
- req_props_no_value_or_default = []
- self._common_validate_field(properties, allowed_props,
- 'properties')
- # make sure it's not missing any property required by a tosca type
- for r in required_props:
- if r not in properties.keys():
- req_props_no_value_or_default.append(r)
- # Required properties found without value or a default value
- if req_props_no_value_or_default:
- ExceptionCollector.appendException(
- MissingRequiredFieldError(
- what='"properties" of template "%s"' % self.name,
- required=req_props_no_value_or_default))
- else:
- # Required properties in schema, but not in template
- if required_props:
- ExceptionCollector.appendException(
- MissingRequiredFieldError(
- what='"properties" of template "%s"' % self.name,
- required=required_props))
-
- def _validate_field(self, template):
- if not isinstance(template, dict):
- ExceptionCollector.appendException(
- MissingRequiredFieldError(
- what='Template "%s"' % self.name, required=self.TYPE))
- try:
- relationship = template.get('relationship')
- if relationship and not isinstance(relationship, str):
- relationship[self.TYPE]
- elif isinstance(relationship, str):
- template['relationship']
- else:
- template[self.TYPE]
- except KeyError:
- ExceptionCollector.appendException(
- MissingRequiredFieldError(
- what='Template "%s"' % self.name, required=self.TYPE))
-
- def _common_validate_field(self, schema, allowedlist, section):
- for name in schema:
- if name not in allowedlist:
- ExceptionCollector.appendException(
- UnknownFieldError(
- what=('"%(section)s" of template "%(nodename)s"'
- % {'section': section, 'nodename': self.name}),
- field=name))
-
- def _create_properties(self):
- props = []
- properties = self.type_definition.get_value(self.PROPERTIES,
- self.entity_tpl) or {}
- for name, value in properties.items():
- props_def = self.type_definition.get_properties_def()
- if props_def and name in props_def:
- prop = Property(name, value,
- props_def[name].schema, self.custom_def)
- props.append(prop)
- for p in self.type_definition.get_properties_def_objects():
- if p.default is not None and p.name not in properties.keys():
- prop = Property(p.name, p.default, p.schema, self.custom_def)
- props.append(prop)
- return props
-
- def _create_interfaces(self):
- interfaces = []
- type_interfaces = None
- if isinstance(self.type_definition, RelationshipType):
- if isinstance(self.entity_tpl, dict):
- if self.INTERFACES in self.entity_tpl:
- type_interfaces = self.entity_tpl[self.INTERFACES]
- else:
- for rel_def, value in self.entity_tpl.items():
- if rel_def != 'type':
- rel_def = self.entity_tpl.get(rel_def)
- rel = None
- if isinstance(rel_def, dict):
- rel = rel_def.get('relationship')
- if rel:
- if self.INTERFACES in rel:
- type_interfaces = rel[self.INTERFACES]
- break
- else:
- type_interfaces = self.type_definition.get_value(self.INTERFACES,
- self.entity_tpl)
- if type_interfaces:
- for interface_type, value in type_interfaces.items():
- for op, op_def in value.items():
- iface = InterfacesDef(self.type_definition,
- interfacetype=interface_type,
- node_template=self,
- name=op,
- value=op_def)
- interfaces.append(iface)
- return interfaces
-
- def get_capability(self, name):
- """Provide named capability
-
- :param name: name of capability
- :return: capability object if found, None otherwise
- """
- caps = self.get_capabilities()
- if caps and name in caps.keys():
- return caps[name]
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Group.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Group.java
deleted file mode 100644
index 8ed623f..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Group.java
+++ /dev/null
@@ -1,137 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.elements.Metadata;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils;
-
-public class Group extends EntityTemplate {
-
- private static final String TYPE = "type";
- private static final String METADATA = "metadata";
- private static final String DESCRIPTION = "description";
- private static final String PROPERTIES = "properties";
- private static final String MEMBERS = "members";
- private static final String INTERFACES = "interfaces";
- private static final String SECTIONS[] = {
- TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES};
-
- private String name;
- LinkedHashMap<String,Object> tpl;
- ArrayList<NodeTemplate> memberNodes;
- LinkedHashMap<String,Object> customDef;
- Metadata metaData;
-
-
- public Group(String _name, LinkedHashMap<String, Object> _templates,
- ArrayList<NodeTemplate> _memberNodes,
- LinkedHashMap<String, Object> _customDef) {
- super(_name, _templates, "group_type", _customDef);
-
- name = _name;
- tpl = _templates;
- if(tpl.get(METADATA) != null) {
- Object metadataObject = tpl.get(METADATA);
- ValidateUtils.validateMap(metadataObject);
- metaData = new Metadata((Map<String,Object>)metadataObject);
- }
- memberNodes = _memberNodes;
- _validateKeys();
- }
-
- public Metadata getMetadata() {
- return metaData;
- }
-
- public ArrayList<String> getMembers() {
- return (ArrayList<String>)entityTpl.get("members");
- }
-
- public String getDescription() {
- return (String)entityTpl.get("description");
-
- }
-
- public ArrayList<NodeTemplate> getMemberNodes() {
- return memberNodes;
- }
-
- private void _validateKeys() {
- for(String key: entityTpl.keySet()) {
- boolean bFound = false;
- for(String sect: SECTIONS) {
- if(key.equals(sect)) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Groups \"%s\" contains unknown field \"%s\"",
- name,key));
- }
- }
- }
-
- @Override
- public String toString() {
- return "Group{" +
- "name='" + name + '\'' +
- ", tpl=" + tpl +
- ", memberNodes=" + memberNodes +
- ", customDef=" + customDef +
- ", metaData=" + metaData +
- '}';
- }
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.entity_template import EntityTemplate
-from toscaparser.utils import validateutils
-
-SECTIONS = (TYPE, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES) = \
- ('type', 'metadata', 'description',
- 'properties', 'members', 'interfaces')
-
-
-class Group(EntityTemplate):
-
- def __init__(self, name, group_templates, member_nodes, custom_defs=None):
- super(Group, self).__init__(name,
- group_templates,
- 'group_type',
- custom_defs)
- self.name = name
- self.tpl = group_templates
- self.meta_data = None
- if self.METADATA in self.tpl:
- self.meta_data = self.tpl.get(self.METADATA)
- validateutils.validate_map(self.meta_data)
- self.member_nodes = member_nodes
- self._validate_keys()
-
- @property
- def members(self):
- return self.entity_tpl.get('members')
-
- @property
- def description(self):
- return self.entity_tpl.get('description')
-
- def get_member_nodes(self):
- return self.member_nodes
-
- def _validate_keys(self):
- for key in self.entity_tpl.keys():
- if key not in SECTIONS:
- ExceptionCollector.appendException(
- UnknownFieldError(what='Groups "%s"' % self.name,
- field=key))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java
deleted file mode 100644
index a97a360..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/ImportsLoader.java
+++ /dev/null
@@ -1,728 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.elements.TypeValidation;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-import org.openecomp.sdc.toscaparser.api.utils.UrlUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
-
-import java.io.*;
-import java.net.URL;
-import java.nio.file.Paths;
-import java.util.*;
-
-public class ImportsLoader {
-
- private static Logger log = LoggerFactory.getLogger(ImportsLoader.class.getName());
- private static final String FILE = "file";
- private static final String REPOSITORY = "repository";
- private static final String NAMESPACE_URI = "namespace_uri";
- private static final String NAMESPACE_PREFIX = "namespace_prefix";
- private String IMPORTS_SECTION[] = {FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX};
-
- private ArrayList<Object> importslist;
- private String path;
- private ArrayList<String> typeDefinitionList;
-
- private LinkedHashMap<String,Object> customDefs;
- private ArrayList<LinkedHashMap<String,Object>> nestedToscaTpls;
- private LinkedHashMap<String,Object> repositories;
-
- @SuppressWarnings("unchecked")
- public ImportsLoader(ArrayList<Object>_importslist,
- String _path,
- Object _typeDefinitionList,
- LinkedHashMap<String,Object> tpl) {
-
- this.importslist = _importslist;
- customDefs = new LinkedHashMap<String,Object>();
- nestedToscaTpls = new ArrayList<LinkedHashMap<String,Object>>();
- if((_path == null || _path.isEmpty()) && tpl == null) {
- //msg = _('Input tosca template is not provided.')
- //log.warning(msg)
- ThreadLocalsHolder.getCollector().appendException("ValidationError: Input tosca template is not provided");
- }
-
- this.path = _path;
- this.repositories = new LinkedHashMap<String,Object>();
-
- if(tpl != null && tpl.get("repositories") != null) {
- this.repositories = (LinkedHashMap<String,Object>)tpl.get("repositories");
- }
- this.typeDefinitionList = new ArrayList<String>();
- if(_typeDefinitionList != null) {
- if(_typeDefinitionList instanceof ArrayList) {
- this.typeDefinitionList = (ArrayList<String>)_typeDefinitionList;
- }
- else {
- this.typeDefinitionList.add((String)_typeDefinitionList);
- }
- }
- _validateAndLoadImports();
- }
-
- public LinkedHashMap<String,Object> getCustomDefs() {
- return customDefs;
- }
-
- public ArrayList<LinkedHashMap<String,Object>> getNestedToscaTpls() {
- return nestedToscaTpls;
- }
-
- @SuppressWarnings({ "unchecked", "unused" })
- public void _validateAndLoadImports() {
- Set<String> importNames = new HashSet<String>();
-
- if(importslist == null) {
- //msg = _('"imports" keyname is defined without including templates.')
- //log.error(msg)
- ThreadLocalsHolder.getCollector().appendException(
- "ValidationError: \"imports\" keyname is defined without including templates");
- return;
- }
-
- for(Object importDef: importslist) {
- String fullFileName = null;
- LinkedHashMap<String,Object> customType = null;
- if(importDef instanceof LinkedHashMap) {
- for(Map.Entry<String,Object> me: ((LinkedHashMap<String,Object>)importDef).entrySet()) {
- String importName = me.getKey();
- Object importUri = me.getValue();
- if(importNames.contains(importName)) {
- //msg = (_('Duplicate import name "%s" was found.') % import_name)
- //log.error(msg)
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValidationError: Duplicate import name \"%s\" was found",importName));
- }
- importNames.add(importName); //???
-
- // _loadImportTemplate returns 2 objects
- Object ffnct[] = _loadImportTemplate(importName, importUri);
- fullFileName = (String)ffnct[0];
- customType = (LinkedHashMap<String,Object>)ffnct[1];
- String namespacePrefix = "";
- if(importUri instanceof LinkedHashMap) {
- namespacePrefix = (String)
- ((LinkedHashMap<String,Object>)importUri).get(NAMESPACE_PREFIX);
- }
-
- if(customType != null) {
- TypeValidation tv = new TypeValidation(customType, importDef);
- _updateCustomDefs(customType, namespacePrefix);
- }
- }
- }
- else { // old style of imports
- // _loadImportTemplate returns 2 objects
- Object ffnct[] = _loadImportTemplate(null,importDef);
- fullFileName = (String)ffnct[0];
- customType = (LinkedHashMap<String,Object>)ffnct[1];
- if(customType != null) {
- TypeValidation tv = new TypeValidation(customType,importDef);
- _updateCustomDefs(customType,null);
- }
- }
- _updateNestedToscaTpls(fullFileName, customType);
-
-
- }
- }
-
- @SuppressWarnings("unchecked")
- private void _updateCustomDefs(LinkedHashMap<String,Object> customType, String namespacePrefix) {
- LinkedHashMap<String,Object> outerCustomTypes;// = new LinkedHashMap<String,Object>();
- for(String typeDef: typeDefinitionList) {
- if(typeDef.equals("imports")) {
- // imports are ArrayList...
- customDefs.put("imports",(ArrayList<Object>)customType.get(typeDef));
- }
- else {
- outerCustomTypes = (LinkedHashMap<String,Object>)customType.get(typeDef);
- if(outerCustomTypes != null) {
- if(namespacePrefix != null && !namespacePrefix.isEmpty()) {
- LinkedHashMap<String,Object> prefixCustomTypes = new LinkedHashMap<String,Object>();
- for(Map.Entry<String,Object> me: outerCustomTypes.entrySet()) {
- String typeDefKey = me.getKey();
- String nameSpacePrefixToKey = namespacePrefix + "." + typeDefKey;
- prefixCustomTypes.put(nameSpacePrefixToKey, outerCustomTypes.get(typeDefKey));
- }
- customDefs.putAll(prefixCustomTypes);
- }
- else {
- customDefs.putAll(outerCustomTypes);
- }
- }
- }
- }
- }
-
- private void _updateNestedToscaTpls(String fullFileName,LinkedHashMap<String,Object> customTpl) {
- if(fullFileName != null && customTpl != null) {
- LinkedHashMap<String,Object> tt = new LinkedHashMap<String,Object>();
- tt.put(fullFileName, customTpl);
- nestedToscaTpls.add(tt);
- }
- }
-
- private void _validateImportKeys(String importName, LinkedHashMap<String,Object> importUri) {
- if(importUri.get(FILE) == null) {
- //log.warning(_('Missing keyname "file" in import "%(name)s".') % {'name': import_name})
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "MissingRequiredFieldError: Import of template \"%s\" is missing field %s",importName,FILE));
- }
- for(String key: importUri.keySet()) {
- boolean bFound = false;
- for(String is: IMPORTS_SECTION) {
- if(is.equals(key)) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- //log.warning(_('Unknown keyname "%(key)s" error in '
- // 'imported definition "%(def)s".')
- // % {'key': key, 'def': import_name})
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Import of template \"%s\" has unknown fiels %s",importName,key));
- }
- }
- }
-
- @SuppressWarnings("unchecked")
- private Object[] _loadImportTemplate(String importName, Object importUriDef) {
- /*
- This method loads the custom type definitions referenced in "imports"
- section of the TOSCA YAML template by determining whether each import
- is specified via a file reference (by relative or absolute path) or a
- URL reference.
-
- Possibilities:
- +----------+--------+------------------------------+
- | template | import | comment |
- +----------+--------+------------------------------+
- | file | file | OK |
- | file | URL | OK |
- | preparsed| file | file must be a full path |
- | preparsed| URL | OK |
- | URL | file | file must be a relative path |
- | URL | URL | OK |
- +----------+--------+------------------------------+
- */
- Object al[] = new Object[2];
-
- boolean shortImportNotation = false;
- String fileName;
- String repository;
- if(importUriDef instanceof LinkedHashMap) {
- _validateImportKeys(importName, (LinkedHashMap<String,Object>)importUriDef);
- fileName = (String)((LinkedHashMap<String,Object>)importUriDef).get(FILE);
- repository = (String)((LinkedHashMap<String,Object>)importUriDef).get(REPOSITORY);
- if(repository != null) {
- if(!repositories.keySet().contains(repository)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidPropertyValueError: Repository \"%s\" not found in \"%s\"",
- repository,repositories.keySet().toString()));
- }
- }
- }
- else {
- fileName = (String)importUriDef;
- repository = null;
- shortImportNotation = true;
- }
-
- if(fileName == null || fileName.isEmpty()) {
- //msg = (_('A template file name is not provided with import '
- // 'definition "%(import_name)s".')
- // % {'import_name': import_name})
- //log.error(msg)
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValidationError: A template file name is not provided with import definition \"%s\"",importName));
- al[0] = al[1] = null;
- return al;
- }
-
- if(UrlUtils.validateUrl(fileName)) {
- try {
- al[0] = fileName;
- InputStream input = new URL(fileName).openStream();
- Yaml yaml = new Yaml();
- al[1] = yaml.load(input);
- return al;
- }
- catch(IOException e) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ImportError: \"%s\" loading YAML import from \"%s\"",e.getClass().getSimpleName(),fileName));
- al[0] = al[1] = null;
- return al;
- }
- }
- else if(repository == null || repository.isEmpty()) {
- boolean aFile = false;
- String importTemplate = null;
- if(path != null && !path.isEmpty()) {
- if(UrlUtils.validateUrl(path)) {
- File fp = new File(path);
- if(fp.isAbsolute()) {
- String msg = String.format(
- "ImportError: Absolute file name \"%s\" cannot be used in the URL-based input template \"%s\"",
- fileName,path);
- ThreadLocalsHolder.getCollector().appendException(msg);
- al[0] = al[1] = null;
- return al;
- }
- importTemplate = UrlUtils.joinUrl(path,fileName);
- aFile = false;
- }
- else {
-
- aFile = true;
- File fp = new File(path);
- if(fp.isFile()) {
- File fn = new File(fileName);
- if(fn.isFile()) {
- importTemplate = fileName;
- }
- else {
- String fullPath = Paths.get(path).toAbsolutePath().getParent().toString() + File.separator + fileName;
- File ffp = new File(fullPath);
- if(ffp.isFile()) {
- importTemplate = fullPath;
- }
- else {
- String dirPath = Paths.get(path).toAbsolutePath().getParent().toString();
- String filePath;
- if(Paths.get(fileName).getParent() != null) {
- filePath = Paths.get(fileName).getParent().toString();
- }
- else {
- filePath = "";
- }
- if(!filePath.isEmpty() && dirPath.endsWith(filePath)) {
- String sFileName = Paths.get(fileName).getFileName().toString();
- importTemplate = dirPath + File.separator + sFileName;
- File fit = new File(importTemplate);
- if(!fit.isFile()) {
- //msg = (_('"%(import_template)s" is'
- // 'not a valid file')
- // % {'import_template':
- // import_template})
- //log.error(msg)
- String msg = String.format(
- "ValueError: \"%s\" is not a valid file",importTemplate);
- ThreadLocalsHolder.getCollector().appendException(msg);
- log.debug("ImportsLoader - _loadImportTemplate - {}", msg);
- }
- }
- }
- }
- }
- }
- }
- else { // template is pre-parsed
- File fn = new File(fileName);
- if(fn.isAbsolute() && fn.isFile()) {
- aFile = true;
- importTemplate = fileName;
- }
- else {
- String msg = String.format(
- "Relative file name \"%s\" cannot be used in a pre-parsed input template",fileName);
- ThreadLocalsHolder.getCollector().appendException("ImportError: " + msg);
- al[0] = al[1] = null;
- return al;
- }
- }
-
- if(importTemplate == null || importTemplate.isEmpty()) {
- //log.error(_('Import "%(name)s" is not valid.') %
- // {'name': import_uri_def})
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ImportError: Import \"%s\" is not valid",importUriDef));
- al[0] = al[1] = null;
- return al;
- }
-
- // for now, this must be a file
- if(!aFile) {
- log.error("ImportsLoader - _loadImportTemplate - Error!! Expected a file. importUriDef = {}, importTemplate = {}", importUriDef, importTemplate);
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ImportError: Import \"%s\" is not a file",importName));
- al[0] = al[1] = null;
- return al;
- }
- try {
- al[0] = importTemplate;
- InputStream input = new FileInputStream(new File(importTemplate));
- Yaml yaml = new Yaml();
- al[1] = yaml.load(input);
- return al;
- }
- catch(FileNotFoundException e) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ImportError: Failed to load YAML from \"%s\"",importName));
- al[0] = al[1] = null;
- return al;
- }
- catch(Exception e) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ImportError: Exception from SnakeYAML file = \"%s\"",importName));
- al[0] = al[1] = null;
- return al;
- }
- }
-
- if(shortImportNotation) {
- //log.error(_('Import "%(name)s" is not valid.') % import_uri_def)
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ImportError: Import \"%s\" is not valid",importName));
- al[0] = al[1] = null;
- return al;
- }
-
- String fullUrl = "";
- String repoUrl = "";
- if(repository != null && !repository.isEmpty()) {
- if(repositories != null) {
- for(String repoName: repositories.keySet()) {
- if(repoName.equals(repository)) {
- Object repoDef = repositories.get(repoName);
- if(repoDef instanceof String) {
- repoUrl = (String)repoDef;
- }
- else if(repoDef instanceof LinkedHashMap) {
- repoUrl = (String)((LinkedHashMap<String,Object>)repoDef).get("url");
- }
- // Remove leading, ending spaces and strip
- // the last character if "/"
- repoUrl = repoUrl.trim();
- if(repoUrl.endsWith("/")) {
- repoUrl = repoUrl.substring(0,repoUrl.length()-1);
- }
- fullUrl = repoUrl + "/" + fileName;
- break;
- }
- }
- }
- if(fullUrl.isEmpty()) {
- String msg = String.format(
- "referenced repository \"%s\" in import definition \"%s\" not found",
- repository,importName);
- ThreadLocalsHolder.getCollector().appendException("ImportError: " + msg);
- al[0] = al[1] = null;
- return al;
- }
- }
- if(UrlUtils.validateUrl(fullUrl)) {
- try {
- al[0] = fullUrl;
- InputStream input = new URL(fullUrl).openStream();
- Yaml yaml = new Yaml();
- al[1] = yaml.load(input);
- return al;
- }
- catch(IOException e) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ImportError: Exception loading YAML import from \"%s\"",fullUrl));
- al[0] = al[1] = null;
- return al;
- }
- }
- else {
- String msg = String.format(
- "repository URL \"%s\" in import definition \"%s\" is not valid",
- repoUrl,importName);
- ThreadLocalsHolder.getCollector().appendException("ImportError: " + msg);
- }
-
- // if we got here something is wrong with the flow...
- log.error("ImportsLoader - _loadImportTemplate - got to dead end (importName {})", importName);
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ImportError: _loadImportTemplate got to dead end (importName %s)\n",importName));
- al[0] = al[1] = null;
- return al;
- }
-
- @Override
- public String toString() {
- return "ImportsLoader{" +
- "IMPORTS_SECTION=" + Arrays.toString(IMPORTS_SECTION) +
- ", importslist=" + importslist +
- ", path='" + path + '\'' +
- ", typeDefinitionList=" + typeDefinitionList +
- ", customDefs=" + customDefs +
- ", nestedToscaTpls=" + nestedToscaTpls +
- ", repositories=" + repositories +
- '}';
- }
-}
-
-/*python
-
-import logging
-import os
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import InvalidPropertyValueError
-from toscaparser.common.exception import MissingRequiredFieldError
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.common.exception import ValidationError
-from toscaparser.elements.tosca_type_validation import TypeValidation
-from toscaparser.utils.gettextutils import _
-import org.openecomp.sdc.toscaparser.api.utils.urlutils
-import org.openecomp.sdc.toscaparser.api.utils.yamlparser
-
-YAML_LOADER = toscaparser.utils.yamlparser.load_yaml
-log = logging.getLogger("tosca")
-
-
-class ImportsLoader(object):
-
- IMPORTS_SECTION = (FILE, REPOSITORY, NAMESPACE_URI, NAMESPACE_PREFIX) = \
- ('file', 'repository', 'namespace_uri',
- 'namespace_prefix')
-
- def __init__(self, importslist, path, type_definition_list=None,
- tpl=None):
- self.importslist = importslist
- self.custom_defs = {}
- if not path and not tpl:
- msg = _('Input tosca template is not provided.')
- log.warning(msg)
- ExceptionCollector.appendException(ValidationError(message=msg))
- self.path = path
- self.repositories = {}
- if tpl and tpl.get('repositories'):
- self.repositories = tpl.get('repositories')
- self.type_definition_list = []
- if type_definition_list:
- if isinstance(type_definition_list, list):
- self.type_definition_list = type_definition_list
- else:
- self.type_definition_list.append(type_definition_list)
- self._validate_and_load_imports()
-
- def get_custom_defs(self):
- return self.custom_defs
-
- def _validate_and_load_imports(self):
- imports_names = set()
-
- if not self.importslist:
- msg = _('"imports" keyname is defined without including '
- 'templates.')
- log.error(msg)
- ExceptionCollector.appendException(ValidationError(message=msg))
- return
-
- for import_def in self.importslist:
- if isinstance(import_def, dict):
- for import_name, import_uri in import_def.items():
- if import_name in imports_names:
- msg = (_('Duplicate import name "%s" was found.') %
- import_name)
- log.error(msg)
- ExceptionCollector.appendException(
- ValidationError(message=msg))
- imports_names.add(import_name)
-
- custom_type = self._load_import_template(import_name,
- import_uri)
- namespace_prefix = None
- if isinstance(import_uri, dict):
- namespace_prefix = import_uri.get(
- self.NAMESPACE_PREFIX)
- if custom_type:
- TypeValidation(custom_type, import_def)
- self._update_custom_def(custom_type, namespace_prefix)
- else: # old style of imports
- custom_type = self._load_import_template(None,
- import_def)
- if custom_type:
- TypeValidation(
- custom_type, import_def)
- self._update_custom_def(custom_type, None)
-
- def _update_custom_def(self, custom_type, namespace_prefix):
- outer_custom_types = {}
- for type_def in self.type_definition_list:
- outer_custom_types = custom_type.get(type_def)
- if outer_custom_types:
- if type_def == "imports":
- self.custom_defs.update({'imports': outer_custom_types})
- else:
- if namespace_prefix:
- prefix_custom_types = {}
- for type_def_key in outer_custom_types.keys():
- namespace_prefix_to_key = (namespace_prefix +
- "." + type_def_key)
- prefix_custom_types[namespace_prefix_to_key] = \
- outer_custom_types[type_def_key]
- self.custom_defs.update(prefix_custom_types)
- else:
- self.custom_defs.update(outer_custom_types)
-
- def _validate_import_keys(self, import_name, import_uri_def):
- if self.FILE not in import_uri_def.keys():
- log.warning(_('Missing keyname "file" in import "%(name)s".')
- % {'name': import_name})
- ExceptionCollector.appendException(
- MissingRequiredFieldError(
- what='Import of template "%s"' % import_name,
- required=self.FILE))
- for key in import_uri_def.keys():
- if key not in self.IMPORTS_SECTION:
- log.warning(_('Unknown keyname "%(key)s" error in '
- 'imported definition "%(def)s".')
- % {'key': key, 'def': import_name})
- ExceptionCollector.appendException(
- UnknownFieldError(
- what='Import of template "%s"' % import_name,
- field=key))
-
- def _load_import_template(self, import_name, import_uri_def):
- """Handle custom types defined in imported template files
-
- This method loads the custom type definitions referenced in "imports"
- section of the TOSCA YAML template by determining whether each import
- is specified via a file reference (by relative or absolute path) or a
- URL reference.
-
- Possibilities:
- +----------+--------+------------------------------+
- | template | import | comment |
- +----------+--------+------------------------------+
- | file | file | OK |
- | file | URL | OK |
- | preparsed| file | file must be a full path |
- | preparsed| URL | OK |
- | URL | file | file must be a relative path |
- | URL | URL | OK |
- +----------+--------+------------------------------+
- """
- short_import_notation = False
- if isinstance(import_uri_def, dict):
- self._validate_import_keys(import_name, import_uri_def)
- file_name = import_uri_def.get(self.FILE)
- repository = import_uri_def.get(self.REPOSITORY)
- repos = self.repositories.keys()
- if repository is not None:
- if repository not in repos:
- ExceptionCollector.appendException(
- InvalidPropertyValueError(
- what=_('Repository is not found in "%s"') % repos))
- else:
- file_name = import_uri_def
- repository = None
- short_import_notation = True
-
- if not file_name:
- msg = (_('A template file name is not provided with import '
- 'definition "%(import_name)s".')
- % {'import_name': import_name})
- log.error(msg)
- ExceptionCollector.appendException(ValidationError(message=msg))
- return
-
- if toscaparser.utils.urlutils.UrlUtils.validate_url(file_name):
- return YAML_LOADER(file_name, False)
- elif not repository:
- import_template = None
- if self.path:
- if toscaparser.utils.urlutils.UrlUtils.validate_url(self.path):
- if os.path.isabs(file_name):
- msg = (_('Absolute file name "%(name)s" cannot be '
- 'used in a URL-based input template '
- '"%(template)s".')
- % {'name': file_name, 'template': self.path})
- log.error(msg)
- ExceptionCollector.appendException(ImportError(msg))
- return
- import_template = toscaparser.utils.urlutils.UrlUtils.\
- join_url(self.path, file_name)
- a_file = False
- else:
- a_file = True
- main_a_file = os.path.isfile(self.path)
-
- if main_a_file:
- if os.path.isfile(file_name):
- import_template = file_name
- else:
- full_path = os.path.join(
- os.path.dirname(os.path.abspath(self.path)),
- file_name)
- if os.path.isfile(full_path):
- import_template = full_path
- else:
- file_path = file_name.rpartition("/")
- dir_path = os.path.dirname(os.path.abspath(
- self.path))
- if file_path[0] != '' and dir_path.endswith(
- file_path[0]):
- import_template = dir_path + "/" +\
- file_path[2]
- if not os.path.isfile(import_template):
- msg = (_('"%(import_template)s" is'
- 'not a valid file')
- % {'import_template':
- import_template})
- log.error(msg)
- ExceptionCollector.appendException
- (ValueError(msg))
- else: # template is pre-parsed
- if os.path.isabs(file_name) and os.path.isfile(file_name):
- a_file = True
- import_template = file_name
- else:
- msg = (_('Relative file name "%(name)s" cannot be used '
- 'in a pre-parsed input template.')
- % {'name': file_name})
- log.error(msg)
- ExceptionCollector.appendException(ImportError(msg))
- return
-
- if not import_template:
- log.error(_('Import "%(name)s" is not valid.') %
- {'name': import_uri_def})
- ExceptionCollector.appendException(
- ImportError(_('Import "%s" is not valid.') %
- import_uri_def))
- return
- return YAML_LOADER(import_template, a_file)
-
- if short_import_notation:
- log.error(_('Import "%(name)s" is not valid.') % import_uri_def)
- ExceptionCollector.appendException(
- ImportError(_('Import "%s" is not valid.') % import_uri_def))
- return
-
- full_url = ""
- if repository:
- if self.repositories:
- for repo_name, repo_def in self.repositories.items():
- if repo_name == repository:
- # Remove leading, ending spaces and strip
- # the last character if "/"
- repo_url = ((repo_def['url']).strip()).rstrip("//")
- full_url = repo_url + "/" + file_name
-
- if not full_url:
- msg = (_('referenced repository "%(n_uri)s" in import '
- 'definition "%(tpl)s" not found.')
- % {'n_uri': repository, 'tpl': import_name})
- log.error(msg)
- ExceptionCollector.appendException(ImportError(msg))
- return
-
- if toscaparser.utils.urlutils.UrlUtils.validate_url(full_url):
- return YAML_LOADER(full_url, False)
- else:
- msg = (_('repository url "%(n_uri)s" is not valid in import '
- 'definition "%(tpl)s".')
- % {'n_uri': repo_url, 'tpl': import_name})
- log.error(msg)
- ExceptionCollector.appendException(ImportError(msg))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java
deleted file mode 100644
index c8af559..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/NodeTemplate.java
+++ /dev/null
@@ -1,755 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.elements.*;
-import org.openecomp.sdc.toscaparser.api.utils.CopyUtils;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class NodeTemplate extends EntityTemplate {
-
- private LinkedHashMap<String,Object> templates;
- private LinkedHashMap<String,Object> customDef;
- private ArrayList<RelationshipTemplate> availableRelTpls;
- private LinkedHashMap<String,Object> availableRelTypes;
- private LinkedHashMap<NodeTemplate,RelationshipType> related;
- private ArrayList<RelationshipTemplate> relationshipTpl;
- private LinkedHashMap<RelationshipType,NodeTemplate> _relationships;
- private SubstitutionMappings subMappingToscaTemplate;
- private SubstitutionMappings subMappingToscaTemplate2;
- private Metadata metadata;
-
- private static final String METADATA = "metadata";
-
- @SuppressWarnings("unchecked")
- public NodeTemplate(String name,
- LinkedHashMap<String,Object> ntnodeTemplates,
- LinkedHashMap<String,Object> ntcustomDef,
- ArrayList<RelationshipTemplate> ntavailableRelTpls,
- LinkedHashMap<String,Object> ntavailableRelTypes) {
-
- super(name, (LinkedHashMap<String,Object>)ntnodeTemplates.get(name), "node_type", ntcustomDef);
-
- templates = ntnodeTemplates;
- _validateFields((LinkedHashMap<String,Object>)templates.get(name));
- customDef = ntcustomDef;
- related = new LinkedHashMap<NodeTemplate,RelationshipType>();
- relationshipTpl = new ArrayList<RelationshipTemplate>();
- availableRelTpls = ntavailableRelTpls;
- availableRelTypes = ntavailableRelTypes;
- _relationships = new LinkedHashMap<RelationshipType,NodeTemplate>();
- subMappingToscaTemplate = null;
- subMappingToscaTemplate2 = null;
- metadata = _metaData();
- }
-
- @SuppressWarnings("unchecked")
- public LinkedHashMap<RelationshipType,NodeTemplate> getRelationships() {
- if(_relationships.isEmpty()) {
- ArrayList<Object> requires = getRequirements();
- if(requires != null && requires instanceof ArrayList) {
- for(Object ro: requires) {
- LinkedHashMap<String,Object> r = (LinkedHashMap<String,Object>)ro;
- for(Map.Entry<String,Object> me: r.entrySet()) {
- LinkedHashMap<RelationshipType,NodeTemplate> explicit = _getExplicitRelationship(r,me.getValue());
- if(explicit != null) {
- // _relationships.putAll(explicit)...
- for(Map.Entry<RelationshipType,NodeTemplate> ee: explicit.entrySet()) {
- _relationships.put(ee.getKey(), ee.getValue());
- }
- }
- }
- }
- }
- }
- return _relationships;
- }
-
- @SuppressWarnings("unchecked")
- private LinkedHashMap<RelationshipType,NodeTemplate> _getExplicitRelationship(LinkedHashMap<String,Object> req,Object value) {
- // Handle explicit relationship
-
- // For example,
- // - req:
- // node: DBMS
- // relationship: tosca.relationships.HostedOn
-
- LinkedHashMap<RelationshipType,NodeTemplate> explicitRelation = new LinkedHashMap<RelationshipType,NodeTemplate>();
- String node;
- if(value instanceof LinkedHashMap) {
- node = (String)((LinkedHashMap<String,Object>)value).get("node");
- }
- else {
- node = (String)value;
- }
-
- if(node != null && !node.isEmpty()) {
- //msg = _('Lookup by TOSCA types is not supported. '
- // 'Requirement for "%s" can not be full-filled.') % self.name
- boolean bFound = false;
- for(String k: EntityType.TOSCA_DEF.keySet()) {
- if(k.equals(node)) {
- bFound = true;
- break;
- }
- }
- if(bFound || customDef.get(node) != null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "NotImplementedError: Lookup by TOSCA types is not supported. Requirement for \"%s\" can not be full-filled",
- getName()));
- return null;
- }
- if(templates.get(node) == null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "KeyError: Node template \"%s\" was not found",node));
- return null;
- }
- NodeTemplate relatedTpl = new NodeTemplate(node,templates,customDef,null,null);
- Object relationship = null;
- String relationshipString = null;
- if(value instanceof LinkedHashMap) {
- relationship = ((LinkedHashMap<String,Object>)value).get("relationship");
- // here relationship can be a string or a LHM with 'type':<relationship>
- }
- // check if its type has relationship defined
- if(relationship == null) {
- ArrayList<Object> parentReqs = ((NodeType)typeDefinition).getAllRequirements();
- if(parentReqs == null) {
- ThreadLocalsHolder.getCollector().appendException("ValidationError: parent_req is null");
- }
- else {
- for(String key: req.keySet()) {
- boolean bFoundRel = false;
- for(Object rdo: parentReqs) {
- LinkedHashMap<String,Object> reqDict = (LinkedHashMap<String,Object>)rdo;
- LinkedHashMap<String,Object> relDict = (LinkedHashMap<String,Object>)reqDict.get(key);
- if(relDict != null) {
- relationship = relDict.get("relationship");
- //BUG-python??? need to break twice?
- bFoundRel = true;
- break;
- }
- }
- if(bFoundRel) {
- break;
- }
- }
- }
- }
-
- if(relationship != null) {
- // here relationship can be a string or a LHM with 'type':<relationship>
- if(relationship instanceof String) {
- relationshipString = (String)relationship;
- }
- else if(relationship instanceof LinkedHashMap) {
- relationshipString = (String)((LinkedHashMap<String,Object>)relationship).get("type");
- }
-
- boolean foundRelationshipTpl = false;
- // apply available relationship templates if found
- if(availableRelTpls != null) {
- for(RelationshipTemplate tpl: availableRelTpls) {
- if(tpl.getName().equals(relationshipString)) {
- RelationshipType rtype = new RelationshipType(tpl.getType(),null,customDef);
- explicitRelation.put(rtype, relatedTpl);
- tpl.setTarget(relatedTpl);
- tpl.setSource(this);
- relationshipTpl.add(tpl);
- foundRelationshipTpl = true;
- }
- }
- }
- // create relationship template object.
- String relPrfx = EntityType.RELATIONSHIP_PREFIX;
- if(!foundRelationshipTpl) {
- if(relationship instanceof LinkedHashMap) {
- relationshipString = (String)((LinkedHashMap<String,Object>)relationship).get("type");
- if(relationshipString != null) {
- if(availableRelTypes != null && !availableRelTypes.isEmpty() &&
- availableRelTypes.get(relationshipString) != null) {
- ;
- }
- else if(!(relationshipString).startsWith(relPrfx)) {
- relationshipString = relPrfx + relationshipString;
- }
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "MissingRequiredFieldError: \"relationship\" used in template \"%s\" is missing required field \"type\"",
- relatedTpl.getName()));
- }
- }
- for(RelationshipType rtype: ((NodeType)typeDefinition).getRelationship().keySet()) {
- if(rtype.getType().equals(relationshipString)) {
- explicitRelation.put(rtype,relatedTpl);
- relatedTpl._addRelationshipTemplate(req,rtype.getType(),this);
- }
- else if(availableRelTypes != null && !availableRelTypes.isEmpty()) {
- LinkedHashMap<String,Object> relTypeDef = (LinkedHashMap<String,Object>)availableRelTypes.get(relationshipString);
- if(relTypeDef != null) {
- String superType = (String)relTypeDef.get("derived_from");
- if(superType != null) {
- if(!superType.startsWith(relPrfx)) {
- superType = relPrfx + superType;
- }
- if(rtype.getType().equals(superType)) {
- explicitRelation.put(rtype,relatedTpl);
- relatedTpl._addRelationshipTemplate(req,rtype.getType(),this);
- }
- }
- }
- }
- }
- }
- }
- }
- return explicitRelation;
- }
-
- @SuppressWarnings("unchecked")
- private void _addRelationshipTemplate(LinkedHashMap<String,Object> requirement, String rtype, NodeTemplate source) {
- LinkedHashMap<String,Object> req = (LinkedHashMap<String,Object>)CopyUtils.copyLhmOrAl(requirement);
- req.put("type",rtype);
- RelationshipTemplate tpl = new RelationshipTemplate(req, rtype, customDef, this, source);
- relationshipTpl.add(tpl);
- }
-
- public ArrayList<RelationshipTemplate> getRelationshipTemplate() {
- return relationshipTpl;
- }
-
- void _addNext(NodeTemplate nodetpl,RelationshipType relationship) {
- related.put(nodetpl,relationship);
- }
-
- public ArrayList<NodeTemplate> getRelatedNodes() {
- if(related.isEmpty()) {
- for(Map.Entry<RelationshipType,NodeType> me: ((NodeType)typeDefinition).getRelationship().entrySet()) {
- RelationshipType relation = me.getKey();
- NodeType node = me.getValue();
- for(String tpl: templates.keySet()) {
- if(tpl.equals(node.getType())) {
- //BUG.. python has
- // self.related[NodeTemplate(tpl)] = relation
- // but NodeTemplate doesn't have a constructor with just name...
- //????
- related.put(new NodeTemplate(tpl,null,null,null,null),relation);
- }
- }
- }
- }
- return new ArrayList<NodeTemplate>(related.keySet());
- }
-
- public void validate(/*tosca_tpl=none is not used...*/) {
- _validateCapabilities();
- _validateRequirements();
- _validateProperties(entityTpl,(NodeType)typeDefinition);
- _validateInterfaces();
- for(Property prop: getPropertiesObjects()) {
- prop.validate();
- }
- }
-
- private Metadata _metaData() {
- if(entityTpl.get(METADATA) != null) {
- return new Metadata((Map<String,Object>)entityTpl.get(METADATA));
- }
- else {
- return null;
- }
- }
-
- @SuppressWarnings("unchecked")
- private void _validateRequirements() {
- ArrayList<Object> typeRequires = ((NodeType)typeDefinition).getAllRequirements();
- ArrayList<String> allowedReqs = new ArrayList<>();
- allowedReqs.add("template");
- if(typeRequires != null) {
- for(Object to: typeRequires) {
- LinkedHashMap<String,Object> treq = (LinkedHashMap<String,Object>)to;
- for(Map.Entry<String,Object> me: treq.entrySet()) {
- String key = me.getKey();
- Object value = me.getValue();
- allowedReqs.add(key);
- if(value instanceof LinkedHashMap) {
- allowedReqs.addAll(((LinkedHashMap<String,Object>)value).keySet());
- }
- }
-
- }
- }
-
- ArrayList<Object> requires = (ArrayList<Object>)((NodeType)typeDefinition).getValue(REQUIREMENTS, entityTpl, false);
- if(requires != null) {
- if(!(requires instanceof ArrayList)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "TypeMismatchError: \"requirements\" of template \"%s\" are not of type \"list\"",name));
- }
- else {
- for(Object ro: requires) {
- LinkedHashMap<String,Object> req = (LinkedHashMap<String,Object>)ro;
- for(Map.Entry<String,Object> me: req.entrySet()) {
- String rl = me.getKey();
- Object vo = me.getValue();
- if(vo instanceof LinkedHashMap) {
- LinkedHashMap<String,Object> value = (LinkedHashMap<String,Object>)vo;
- _validateRequirementsKeys(value);
- _validateRequirementsProperties(value);
- allowedReqs.add(rl);
- }
- }
- _commonValidateField(req,allowedReqs,"requirements");
- }
- }
- }
- }
-
- @SuppressWarnings("unchecked")
- private void _validateRequirementsProperties(LinkedHashMap<String,Object> reqs) {
- // TO-DO(anyone): Only occurrences property of the requirements is
- // validated here. Validation of other requirement properties are being
- // validated in different files. Better to keep all the requirements
- // properties validation here.
- for(Map.Entry<String,Object> me: reqs.entrySet()) {
- if(me.getKey().equals("occurrences")) {
- ArrayList<Object> val = (ArrayList<Object>)me.getValue();
- _validateOccurrences(val);
- }
-
- }
- }
-
- private void _validateOccurrences(ArrayList<Object> occurrences) {
- DataEntity.validateDatatype("list",occurrences,null,null,null);
- for(Object val: occurrences) {
- DataEntity.validateDatatype("Integer",val,null,null,null);
- }
- if(occurrences.size() != 2 ||
- !(0 <= (int)occurrences.get(0) && (int)occurrences.get(0) <= (int)occurrences.get(1)) ||
- (int)occurrences.get(1) == 0) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidPropertyValueError: property has invalid value %s",occurrences.toString()));
- }
- }
-
- private void _validateRequirementsKeys(LinkedHashMap<String,Object> reqs) {
- for(String key: reqs.keySet()) {
- boolean bFound = false;
- for(int i=0; i< REQUIREMENTS_SECTION.length; i++) {
- if(key.equals(REQUIREMENTS_SECTION[i])) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: \"requirements\" of template \"%s\" contains unknown field \"%s\"",name,key));
- }
- }
- }
-
- @SuppressWarnings("unchecked")
- private void _validateInterfaces() {
- LinkedHashMap<String,Object> ifaces = (LinkedHashMap<String,Object>)
- ((NodeType)typeDefinition).getValue(INTERFACES, entityTpl, false);
- if(ifaces != null) {
- for(Map.Entry<String,Object> me: ifaces.entrySet()) {
- String iname = me.getKey();
- LinkedHashMap<String,Object> value = (LinkedHashMap<String,Object>)me.getValue();
- if(iname.equals(InterfacesDef.LIFECYCLE) || iname.equals(InterfacesDef.LIFECYCLE_SHORTNAME)) {
- // maybe we should convert [] to arraylist???
- ArrayList<String> inlo = new ArrayList<>();
- for(int i=0; i<InterfacesDef.interfacesNodeLifecycleOperations.length; i++) {
- inlo.add(InterfacesDef.interfacesNodeLifecycleOperations[i]);
- }
- _commonValidateField(value,inlo,"interfaces");
- }
- else if(iname.equals(InterfacesDef.CONFIGURE) || iname.equals(InterfacesDef.CONFIGURE_SHORTNAME)) {
- // maybe we should convert [] to arraylist???
- ArrayList<String> irco = new ArrayList<>();
- for(int i=0; i<InterfacesDef.interfacesRelationshipConfigureOperations.length; i++) {
- irco.add(InterfacesDef.interfacesRelationshipConfigureOperations[i]);
- }
- _commonValidateField(value,irco,"interfaces");
- }
- else if(((NodeType)typeDefinition).getInterfaces().keySet().contains(iname)) {
- _commonValidateField(value,_collectCustomIfaceOperations(iname),"interfaces");
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: \"interfaces\" of template \"%s\" contains unknown field %s",name,iname));
- }
- }
- }
- }
-
- @SuppressWarnings("unchecked")
- private ArrayList<String> _collectCustomIfaceOperations(String iname) {
- ArrayList<String> allowedOperations = new ArrayList<>();
- LinkedHashMap<String,Object> nodetypeIfaceDef = (LinkedHashMap<String,Object>)((NodeType)
- typeDefinition).getInterfaces().get(iname);
- allowedOperations.addAll(nodetypeIfaceDef.keySet());
- String ifaceType = (String)nodetypeIfaceDef.get("type");
- if(ifaceType != null) {
- LinkedHashMap<String,Object> ifaceTypeDef = null;
- if(((NodeType)typeDefinition).customDef != null) {
- ifaceTypeDef = (LinkedHashMap<String,Object>)((NodeType)typeDefinition).customDef.get(ifaceType);
- }
- if(ifaceTypeDef == null) {
- ifaceTypeDef = (LinkedHashMap<String,Object>)EntityType.TOSCA_DEF.get(ifaceType);
- }
- allowedOperations.addAll(ifaceTypeDef.keySet());
- }
- // maybe we should convert [] to arraylist???
- ArrayList<String> idrw = new ArrayList<>();
- for(int i=0; i<InterfacesDef.INTERFACE_DEF_RESERVED_WORDS.length; i++) {
- idrw.add(InterfacesDef.INTERFACE_DEF_RESERVED_WORDS[i]);
- }
- allowedOperations.removeAll(idrw);
- return allowedOperations;
- }
-
- private void _validateFields(LinkedHashMap<String,Object> nodetemplate) {
- for(String ntname: nodetemplate.keySet()) {
- boolean bFound = false;
- for(int i=0; i< SECTIONS.length; i++) {
- if(ntname.equals(SECTIONS[i])) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- for(int i=0; i< SPECIAL_SECTIONS.length; i++) {
- if(ntname.equals(SPECIAL_SECTIONS[i])) {
- bFound = true;
- break;
- }
- }
-
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Node template \"%s\" has unknown field \"%s\"",name,ntname));
- }
- }
- }
-
- // getter/setter
-
- public SubstitutionMappings getSubMappingToscaTemplate() {
- return subMappingToscaTemplate;
- }
-
- public void setSubMappingToscaTemplate(SubstitutionMappings sm) {
- subMappingToscaTemplate = sm;
- }
-
- // **experimental** (multilevel nesting)
- public SubstitutionMappings getSubMappingToscaTemplate2() {
- return subMappingToscaTemplate2;
- }
-
- public void setSubMappingToscaTemplate2(SubstitutionMappings sm) {
- subMappingToscaTemplate2 = sm;
- }
-
- public Metadata getMetaData() {
- return metadata;
- }
-
- public void setMetaData(Metadata metadata) {
- this.metadata = metadata;
- }
-
- @Override
- public String toString() {
- return getName();
- }
-
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import InvalidPropertyValueError
-from toscaparser.common.exception import MissingRequiredFieldError
-from toscaparser.common.exception import TypeMismatchError
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.common.exception import ValidationError
-from toscaparser.dataentity import DataEntity
-from toscaparser.elements.interfaces import CONFIGURE
-from toscaparser.elements.interfaces import CONFIGURE_SHORTNAME
-from toscaparser.elements.interfaces import INTERFACE_DEF_RESERVED_WORDS
-from toscaparser.elements.interfaces import InterfacesDef
-from toscaparser.elements.interfaces import LIFECYCLE
-from toscaparser.elements.interfaces import LIFECYCLE_SHORTNAME
-from toscaparser.elements.relationshiptype import RelationshipType
-from toscaparser.entity_template import EntityTemplate
-from toscaparser.relationship_template import RelationshipTemplate
-from toscaparser.utils.gettextutils import _
-
-log = logging.getLogger('tosca')
-
-
-class NodeTemplate(EntityTemplate):
- '''Node template from a Tosca profile.'''
- def __init__(self, name, node_templates, custom_def=None,
- available_rel_tpls=None, available_rel_types=None):
- super(NodeTemplate, self).__init__(name, node_templates[name],
- 'node_type',
- custom_def)
- self.templates = node_templates
- self._validate_fields(node_templates[name])
- self.custom_def = custom_def
- self.related = {}
- self.relationship_tpl = []
- self.available_rel_tpls = available_rel_tpls
- self.available_rel_types = available_rel_types
- self._relationships = {}
- self.sub_mapping_tosca_template = None
-
- @property
- def relationships(self):
- if not self._relationships:
- requires = self.requirements
- if requires and isinstance(requires, list):
- for r in requires:
- for r1, value in r.items():
- explicit = self._get_explicit_relationship(r, value)
- if explicit:
- for key, value in explicit.items():
- self._relationships[key] = value
- return self._relationships
-
- def _get_explicit_relationship(self, req, value):
- """Handle explicit relationship
-
- For example,
- - req:
- node: DBMS
- relationship: tosca.relationships.HostedOn
- """
- explicit_relation = {}
- node = value.get('node') if isinstance(value, dict) else value
-
- if node:
- # TO-DO(spzala) implement look up once Glance meta data is available
- # to find a matching TOSCA node using the TOSCA types
- msg = _('Lookup by TOSCA types is not supported. '
- 'Requirement for "%s" can not be full-filled.') % self.name
- if (node in list(self.type_definition.TOSCA_DEF.keys())
- or node in self.custom_def):
- ExceptionCollector.appendException(NotImplementedError(msg))
- return
-
- if node not in self.templates:
- ExceptionCollector.appendException(
- KeyError(_('Node template "%s" was not found.') % node))
- return
-
- related_tpl = NodeTemplate(node, self.templates, self.custom_def)
- relationship = value.get('relationship') \
- if isinstance(value, dict) else None
- # check if it's type has relationship defined
- if not relationship:
- parent_reqs = self.type_definition.get_all_requirements()
- if parent_reqs is None:
- ExceptionCollector.appendException(
- ValidationError(message='parent_req is ' +
- str(parent_reqs)))
- else:
- for key in req.keys():
- for req_dict in parent_reqs:
- if key in req_dict.keys():
- relationship = (req_dict.get(key).
- get('relationship'))
- break
- if relationship:
- found_relationship_tpl = False
- # apply available relationship templates if found
- if self.available_rel_tpls:
- for tpl in self.available_rel_tpls:
- if tpl.name == relationship:
- rtype = RelationshipType(tpl.type, None,
- self.custom_def)
- explicit_relation[rtype] = related_tpl
- tpl.target = related_tpl
- tpl.source = self
- self.relationship_tpl.append(tpl)
- found_relationship_tpl = True
- # create relationship template object.
- rel_prfx = self.type_definition.RELATIONSHIP_PREFIX
- if not found_relationship_tpl:
- if isinstance(relationship, dict):
- relationship = relationship.get('type')
- if relationship:
- if self.available_rel_types and \
- relationship in self.available_rel_types.keys():
- pass
- elif not relationship.startswith(rel_prfx):
- relationship = rel_prfx + relationship
- else:
- ExceptionCollector.appendException(
- MissingRequiredFieldError(
- what=_('"relationship" used in template '
- '"%s"') % related_tpl.name,
- required=self.TYPE))
- for rtype in self.type_definition.relationship.keys():
- if rtype.type == relationship:
- explicit_relation[rtype] = related_tpl
- related_tpl._add_relationship_template(req,
- rtype.type,
- self)
- elif self.available_rel_types:
- if relationship in self.available_rel_types.keys():
- rel_type_def = self.available_rel_types.\
- get(relationship)
- if 'derived_from' in rel_type_def:
- super_type = \
- rel_type_def.get('derived_from')
- if not super_type.startswith(rel_prfx):
- super_type = rel_prfx + super_type
- if rtype.type == super_type:
- explicit_relation[rtype] = related_tpl
- related_tpl.\
- _add_relationship_template(
- req, rtype.type, self)
- return explicit_relation
-
- def _add_relationship_template(self, requirement, rtype, source):
- req = requirement.copy()
- req['type'] = rtype
- tpl = RelationshipTemplate(req, rtype, self.custom_def, self, source)
- self.relationship_tpl.append(tpl)
-
- def get_relationship_template(self):
- return self.relationship_tpl
-
- def _add_next(self, nodetpl, relationship):
- self.related[nodetpl] = relationship
-
- @property
- def related_nodes(self):
- if not self.related:
- for relation, node in self.type_definition.relationship.items():
- for tpl in self.templates:
- if tpl == node.type:
- self.related[NodeTemplate(tpl)] = relation
- return self.related.keys()
-
- def validate(self, tosca_tpl=None):
- self._validate_capabilities()
- self._validate_requirements()
- self._validate_properties(self.entity_tpl, self.type_definition)
- self._validate_interfaces()
- for prop in self.get_properties_objects():
- prop.validate()
-
- def _validate_requirements(self):
- type_requires = self.type_definition.get_all_requirements()
- allowed_reqs = ["template"]
- if type_requires:
- for treq in type_requires:
- for key, value in treq.items():
- allowed_reqs.append(key)
- if isinstance(value, dict):
- for key in value:
- allowed_reqs.append(key)
-
- requires = self.type_definition.get_value(self.REQUIREMENTS,
- self.entity_tpl)
- if requires:
- if not isinstance(requires, list):
- ExceptionCollector.appendException(
- TypeMismatchError(
- what='"requirements" of template "%s"' % self.name,
- type='list'))
- else:
- for req in requires:
- for r1, value in req.items():
- if isinstance(value, dict):
- self._validate_requirements_keys(value)
- self._validate_requirements_properties(value)
- allowed_reqs.append(r1)
- self._common_validate_field(req, allowed_reqs,
- 'requirements')
-
- def _validate_requirements_properties(self, requirements):
- # TO-DO(anyone): Only occurrences property of the requirements is
- # validated here. Validation of other requirement properties are being
- # validated in different files. Better to keep all the requirements
- # properties validation here.
- for key, value in requirements.items():
- if key == 'occurrences':
- self._validate_occurrences(value)
- break
-
- def _validate_occurrences(self, occurrences):
- DataEntity.validate_datatype('list', occurrences)
- for value in occurrences:
- DataEntity.validate_datatype('integer', value)
- if len(occurrences) != 2 or not (0 <= occurrences[0] <= occurrences[1]) \
- or occurrences[1] == 0:
- ExceptionCollector.appendException(
- InvalidPropertyValueError(what=(occurrences)))
-
- def _validate_requirements_keys(self, requirement):
- for key in requirement.keys():
- if key not in self.REQUIREMENTS_SECTION:
- ExceptionCollector.appendException(
- UnknownFieldError(
- what='"requirements" of template "%s"' % self.name,
- field=key))
-
- def _validate_interfaces(self):
- ifaces = self.type_definition.get_value(self.INTERFACES,
- self.entity_tpl)
- if ifaces:
- for name, value in ifaces.items():
- if name in (LIFECYCLE, LIFECYCLE_SHORTNAME):
- self._common_validate_field(
- value, InterfacesDef.
- interfaces_node_lifecycle_operations,
- 'interfaces')
- elif name in (CONFIGURE, CONFIGURE_SHORTNAME):
- self._common_validate_field(
- value, InterfacesDef.
- interfaces_relationship_configure_operations,
- 'interfaces')
- elif name in self.type_definition.interfaces.keys():
- self._common_validate_field(
- value,
- self._collect_custom_iface_operations(name),
- 'interfaces')
- else:
- ExceptionCollector.appendException(
- UnknownFieldError(
- what='"interfaces" of template "%s"' %
- self.name, field=name))
-
- def _collect_custom_iface_operations(self, name):
- allowed_operations = []
- nodetype_iface_def = self.type_definition.interfaces[name]
- allowed_operations.extend(nodetype_iface_def.keys())
- if 'type' in nodetype_iface_def:
- iface_type = nodetype_iface_def['type']
- if iface_type in self.type_definition.custom_def:
- iface_type_def = self.type_definition.custom_def[iface_type]
- else:
- iface_type_def = self.type_definition.TOSCA_DEF[iface_type]
- allowed_operations.extend(iface_type_def.keys())
- allowed_operations = [op for op in allowed_operations if
- op not in INTERFACE_DEF_RESERVED_WORDS]
- return allowed_operations
-
- def _validate_fields(self, nodetemplate):
- for name in nodetemplate.keys():
- if name not in self.SECTIONS and name not in self.SPECIAL_SECTIONS:
- ExceptionCollector.appendException(
- UnknownFieldError(what='Node template "%s"' % self.name,
- field=name))*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java
deleted file mode 100644
index a59d9d5..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Policy.java
+++ /dev/null
@@ -1,187 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils;
-
-public class Policy extends EntityTemplate {
-
-
- private static final String TYPE = "type";
- private static final String METADATA = "metadata";
- private static final String DESCRIPTION = "description";
- private static final String PROPERTIES = "properties";
- private static final String TARGETS = "targets";
- private static final String TRIGGERS = "triggers";
- private static final String SECTIONS[] = {
- TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS};
-
- LinkedHashMap<String,Object> metaData;
- ArrayList<Object> targetsList; // *** a list of NodeTemplate OR a list of Group ***
- String targetsType;
- ArrayList<Object> triggers;
- LinkedHashMap<String,Object> properties;
-
- public Policy(String _name,
- LinkedHashMap<String,Object> _policy,
-// ArrayList<NodeTemplate> targetObjects,
- ArrayList<Object> targetObjects,
- String _targetsType,
- LinkedHashMap<String,Object> _customDef) {
- super(_name,_policy,"policy_type",_customDef);
-
- metaData = null;
- if(_policy.get(METADATA) != null) {
- metaData = (LinkedHashMap<String,Object>)_policy.get(METADATA);
- ValidateUtils.validateMap(metaData);
- }
-
- targetsList = targetObjects;
- targetsType = _targetsType;
- triggers = _triggers((LinkedHashMap<String,Object>)_policy.get(TRIGGERS));
- properties = null;
- if(_policy.get("properties") != null) {
- properties = (LinkedHashMap<String,Object>)_policy.get("properties");
- }
- _validateKeys();
- }
-
- public ArrayList<String> getTargets() {
- return (ArrayList<String>)entityTpl.get("targets");
- }
-
- public ArrayList<String> getDescription() {
- return (ArrayList<String>)entityTpl.get("description");
- }
-
- public ArrayList<String> getmetadata() {
- return (ArrayList<String>)entityTpl.get("metadata");
- }
-
- public String getTargetsType() {
- return targetsType;
- }
-
-// public ArrayList<NodeTemplate> getTargetsList() {
- public ArrayList<Object> getTargetsList() {
- return targetsList;
- }
-
- // entityTemplate already has a different getProperties...
- // this is to access the local properties variable
- public LinkedHashMap<String,Object> getPolicyProperties() {
- return properties;
- }
-
- private ArrayList<Object> _triggers(LinkedHashMap<String,Object> triggers) {
- ArrayList<Object> triggerObjs = new ArrayList<>();
- if(triggers != null) {
- for(Map.Entry<String,Object> me: triggers.entrySet()) {
- String tname = me.getKey();
- LinkedHashMap<String,Object> ttriggerTpl =
- (LinkedHashMap<String,Object>)me.getValue();
- Triggers triggersObj = new Triggers(tname,ttriggerTpl);
- triggerObjs.add(triggersObj);
- }
- }
- return triggerObjs;
- }
-
- private void _validateKeys() {
- for(String key: entityTpl.keySet()) {
- boolean bFound = false;
- for(int i=0; i<SECTIONS.length; i++) {
- if(key.equals(SECTIONS[i])) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"",
- name,key));
- }
- }
- }
-
- @Override
- public String toString() {
- return "Policy{" +
- "metaData=" + metaData +
- ", targetsList=" + targetsList +
- ", targetsType='" + targetsType + '\'' +
- ", triggers=" + triggers +
- ", properties=" + properties +
- '}';
- }
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.entity_template import EntityTemplate
-from toscaparser.triggers import Triggers
-from toscaparser.utils import validateutils
-
-
-SECTIONS = (TYPE, METADATA, DESCRIPTION, PROPERTIES, TARGETS, TRIGGERS) = \
- ('type', 'metadata', 'description',
- 'properties', 'targets', 'triggers')
-
-log = logging.getLogger('tosca')
-
-
-class Policy(EntityTemplate):
- '''Policies defined in Topology template.'''
- def __init__(self, name, policy, targets, targets_type, custom_def=None):
- super(Policy, self).__init__(name,
- policy,
- 'policy_type',
- custom_def)
- self.meta_data = None
- if self.METADATA in policy:
- self.meta_data = policy.get(self.METADATA)
- validateutils.validate_map(self.meta_data)
- self.targets_list = targets
- self.targets_type = targets_type
- self.triggers = self._triggers(policy.get(TRIGGERS))
- self._validate_keys()
-
- @property
- def targets(self):
- return self.entity_tpl.get('targets')
-
- @property
- def description(self):
- return self.entity_tpl.get('description')
-
- @property
- def metadata(self):
- return self.entity_tpl.get('metadata')
-
- def get_targets_type(self):
- return self.targets_type
-
- def get_targets_list(self):
- return self.targets_list
-
- def _triggers(self, triggers):
- triggerObjs = []
- if triggers:
- for name, trigger_tpl in triggers.items():
- triggersObj = Triggers(name, trigger_tpl)
- triggerObjs.append(triggersObj)
- return triggerObjs
-
- def _validate_keys(self):
- for key in self.entity_tpl.keys():
- if key not in SECTIONS:
- ExceptionCollector.appendException(
- UnknownFieldError(what='Policy "%s"' % self.name,
- field=key))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Property.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Property.java
deleted file mode 100644
index 731bc73..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Property.java
+++ /dev/null
@@ -1,177 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.elements.constraints.Constraint;
-import org.openecomp.sdc.toscaparser.api.elements.constraints.Schema;
-import org.openecomp.sdc.toscaparser.api.functions.Function;
-
-public class Property {
- // TOSCA built-in Property type
-
- private static final String TYPE = "type";
- private static final String REQUIRED = "required";
- private static final String DESCRIPTION = "description";
- private static final String DEFAULT = "default";
- private static final String CONSTRAINTS = "constraints";
-
- private static final String[] PROPERTY_KEYS = {
- TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS};
-
- private static final String ENTRYTYPE = "type";
- private static final String ENTRYPROPERTIES = "properties";
- private static final String[] ENTRY_SCHEMA_KEYS = {
- ENTRYTYPE, ENTRYPROPERTIES};
-
- private String name;
- private Object value;
- private Schema schema;
- private LinkedHashMap<String,Object> customDef;
-
- public Property(String propname,
- Object propvalue,
- LinkedHashMap<String,Object> propschemaDict,
- LinkedHashMap<String,Object> propcustomDef) {
-
- name = propname;
- value = propvalue;
- customDef = propcustomDef;
- schema = new Schema(propname, propschemaDict);
- }
-
- public String getType() {
- return schema.getType();
- }
-
- public boolean isRequired() {
- return schema.isRequired();
- }
-
- public String getDescription() {
- return schema.getDescription();
- }
-
- public Object getDefault() {
- return schema.getDefault();
- }
-
- public ArrayList<Constraint> getConstraints() {
- return schema.getConstraints();
- }
-
- public LinkedHashMap<String,Object> getEntrySchema() {
- return schema.getEntrySchema();
- }
-
-
- public String getName() {
- return name;
- }
-
- public Object getValue() {
- return value;
- }
-
- // setter
- public Object setValue(Object vob) {
- value = vob;
- return value;
- }
-
- public void validate() {
- // Validate if not a reference property
- if(!Function.isFunction(value)) {
- if(getType().equals(Schema.STRING)) {
- value = value.toString();
- }
- value = DataEntity.validateDatatype(getType(),value,
- getEntrySchema(),
- customDef,
- name);
- _validateConstraints();
- }
- }
-
- private void _validateConstraints() {
- if(getConstraints() != null) {
- for(Constraint constraint: getConstraints()) {
- constraint.validate(value);
- }
- }
- }
-
- @Override
- public String toString() {
- return "Property{" +
- "name='" + name + '\'' +
- ", value=" + value +
- ", schema=" + schema +
- ", customDef=" + customDef +
- '}';
- }
-}
-
-/*python
-
-class Property(object):
- '''TOSCA built-in Property type.'''
-
- PROPERTY_KEYS = (
- TYPE, REQUIRED, DESCRIPTION, DEFAULT, CONSTRAINTS
- ) = (
- 'type', 'required', 'description', 'default', 'constraints'
- )
-
- ENTRY_SCHEMA_KEYS = (
- ENTRYTYPE, ENTRYPROPERTIES
- ) = (
- 'type', 'properties'
- )
-
- def __init__(self, property_name, value, schema_dict, custom_def=None):
- self.name = property_name
- self.value = value
- self.custom_def = custom_def
- self.schema = Schema(property_name, schema_dict)
-
- @property
- def type(self):
- return self.schema.type
-
- @property
- def required(self):
- return self.schema.required
-
- @property
- def description(self):
- return self.schema.description
-
- @property
- def default(self):
- return self.schema.default
-
- @property
- def constraints(self):
- return self.schema.constraints
-
- @property
- def entry_schema(self):
- return self.schema.entry_schema
-
- def validate(self):
- '''Validate if not a reference property.'''
- if not is_function(self.value):
- if self.type == Schema.STRING:
- self.value = str(self.value)
- self.value = DataEntity.validate_datatype(self.type, self.value,
- self.entry_schema,
- self.custom_def,
- self.name)
- self._validate_constraints()
-
- def _validate_constraints(self):
- if self.constraints:
- for constraint in self.constraints:
- constraint.validate(self.value)
-*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/RelationshipTemplate.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/RelationshipTemplate.java
deleted file mode 100644
index 10d3ad9..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/RelationshipTemplate.java
+++ /dev/null
@@ -1,199 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import org.openecomp.sdc.toscaparser.api.elements.EntityType;
-import org.openecomp.sdc.toscaparser.api.elements.PropertyDef;
-import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType;
-
-public class RelationshipTemplate extends EntityTemplate {
-
- private static final String DERIVED_FROM = "derived_from";
- private static final String PROPERTIES = "properties";
- private static final String REQUIREMENTS = "requirements";
- private static final String INTERFACES = "interfaces";
- private static final String CAPABILITIES = "capabilities";
- private static final String TYPE = "type";
- @SuppressWarnings("unused")
- private static final String SECTIONS[] = {
- DERIVED_FROM, PROPERTIES, REQUIREMENTS, INTERFACES, CAPABILITIES, TYPE};
-
- private String name;
- private NodeTemplate target;
- private NodeTemplate source;
- private ArrayList<Property> _properties;
-
- public RelationshipTemplate(LinkedHashMap<String,Object> rtrelationshipTemplate,
- String rtname,
- LinkedHashMap<String,Object> rtcustomDef,
- NodeTemplate rttarget,
- NodeTemplate rtsource) {
- super(rtname,rtrelationshipTemplate,"relationship_type",rtcustomDef);
-
- name = rtname;
- target = rttarget;
- source = rtsource;
- _properties = null;
- }
-
- public ArrayList<Property> getPropertiesObjects() {
- // Return properties objects for this template
- if(_properties == null) {
- _properties = _createRelationshipProperties();
- }
- return _properties;
- }
-
- @SuppressWarnings({ "unchecked", "unused" })
- public ArrayList<Property> _createRelationshipProperties() {
- ArrayList<Property> props = new ArrayList<Property> ();
- LinkedHashMap<String,Object> properties = new LinkedHashMap<String,Object>();
- LinkedHashMap<String,Object> relationship = (LinkedHashMap<String,Object>)entityTpl.get("relationship");
-
- if(relationship == null) {
- for(Object val: entityTpl.values()) {
- if(val instanceof LinkedHashMap) {
- relationship = (LinkedHashMap<String,Object>)((LinkedHashMap<String,Object>)val).get("relationship");
- break;
- }
- }
- }
-
- if(relationship != null) {
- properties = (LinkedHashMap<String,Object>)((EntityType)typeDefinition).getValue(PROPERTIES,relationship,false);
- }
- if(properties == null) {
- properties = new LinkedHashMap<String,Object>();
- }
- if(properties == null) {
- properties = (LinkedHashMap<String,Object>)entityTpl.get(PROPERTIES);
- }
- if(properties == null) {
- properties = new LinkedHashMap<String,Object>();
- }
-
- if(properties != null) {
- for(Map.Entry<String,Object> me: properties.entrySet()) {
- String pname = me.getKey();
- Object pvalue = me.getValue();
- LinkedHashMap<String,PropertyDef> propsDef = ((StatefulEntityType)typeDefinition).getPropertiesDef();
- if(propsDef != null && propsDef.get(pname) != null) {
- if(properties.get(pname) != null) {
- pvalue = properties.get(name);
- }
- PropertyDef pd = (PropertyDef)propsDef.get(pname);
- Property prop = new Property(pname,pvalue,pd.getSchema(),customDef);
- props.add(prop);
- }
- }
- }
- ArrayList<PropertyDef> pds = ((StatefulEntityType)typeDefinition).getPropertiesDefObjects();
- for(PropertyDef p: pds) {
- if(p.getDefault() != null && properties.get(p.getName()) == null) {
- Property prop = new Property(p.getName(), (LinkedHashMap<String,Object>)p.getDefault(), p.getSchema(), customDef);
- props.add(prop);
- }
- }
- return props;
- }
-
- public void validate() {
- _validateProperties(entityTpl,(StatefulEntityType)typeDefinition);
- }
-
- // getters/setters
- public NodeTemplate getTarget() {
- return target;
- }
-
- public NodeTemplate getSource() {
- return source;
- }
-
- public void setSource(NodeTemplate nt) {
- source = nt;
- }
-
- public void setTarget(NodeTemplate nt) {
- target = nt;
- }
-
- @Override
- public String toString() {
- return "RelationshipTemplate{" +
- "name='" + name + '\'' +
- ", target=" + target.getName() +
- ", source=" + source.getName() +
- ", _properties=" + _properties +
- '}';
- }
-
-}
-
-/*python
-
-from toscaparser.entity_template import EntityTemplate
-from toscaparser.properties import Property
-
-SECTIONS = (DERIVED_FROM, PROPERTIES, REQUIREMENTS,
- INTERFACES, CAPABILITIES, TYPE) = \
- ('derived_from', 'properties', 'requirements', 'interfaces',
- 'capabilities', 'type')
-
-log = logging.getLogger('tosca')
-
-
-class RelationshipTemplate(EntityTemplate):
- '''Relationship template.'''
- def __init__(self, relationship_template, name, custom_def=None,
- target=None, source=None):
- super(RelationshipTemplate, self).__init__(name,
- relationship_template,
- 'relationship_type',
- custom_def)
- self.name = name.lower()
- self.target = target
- self.source = source
-
- def get_properties_objects(self):
- '''Return properties objects for this template.'''
- if self._properties is None:
- self._properties = self._create_relationship_properties()
- return self._properties
-
- def _create_relationship_properties(self):
- props = []
- properties = {}
- relationship = self.entity_tpl.get('relationship')
-
- if not relationship:
- for value in self.entity_tpl.values():
- if isinstance(value, dict):
- relationship = value.get('relationship')
- break
-
- if relationship:
- properties = self.type_definition.get_value(self.PROPERTIES,
- relationship) or {}
- if not properties:
- properties = self.entity_tpl.get(self.PROPERTIES) or {}
-
- if properties:
- for name, value in properties.items():
- props_def = self.type_definition.get_properties_def()
- if props_def and name in props_def:
- if name in properties.keys():
- value = properties.get(name)
- prop = Property(name, value,
- props_def[name].schema, self.custom_def)
- props.append(prop)
- for p in self.type_definition.get_properties_def_objects():
- if p.default is not None and p.name not in properties.keys():
- prop = Property(p.name, p.default, p.schema, self.custom_def)
- props.append(prop)
- return props
-
- def validate(self):
- self._validate_properties(self.entity_tpl, self.type_definition)*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java
deleted file mode 100644
index 92a90af..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Repository.java
+++ /dev/null
@@ -1,117 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-import org.openecomp.sdc.toscaparser.api.utils.UrlUtils;
-
-public class Repository {
-
- private static final String DESCRIPTION = "description";
- private static final String URL = "url";
- private static final String CREDENTIAL = "credential";
- private static final String SECTIONS[] ={DESCRIPTION, URL, CREDENTIAL};
-
- private String name;
- private Object reposit;
- private String url;
-
- @SuppressWarnings("unchecked")
- public Repository(String repName,Object repValue) {
- name = repName;
- reposit = repValue;
- if(reposit instanceof LinkedHashMap) {
- url = (String)((LinkedHashMap<String,Object>)reposit).get("url");
- if(url == null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "MissingRequiredFieldError: Repository \"%s\" is missing required field \"url\"",
- name));
- }
- }
- loadAndValidate(name,reposit);
- }
-
- @SuppressWarnings("unchecked")
- private void loadAndValidate(String val,Object repositDef) {
- String keyname = val;
- if(repositDef instanceof LinkedHashMap) {
- for(String key: ((LinkedHashMap<String,Object>)reposit).keySet()) {
- boolean bFound = false;
- for(String sect: SECTIONS) {
- if(key.equals(sect)) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: repositories \"%s\" contains unknown field \"%s\"",
- keyname,key));
- }
- }
-
- String repositUrl = (String)((LinkedHashMap<String,Object>)repositDef).get("url");
- if(repositUrl != null) {
- boolean urlVal = UrlUtils.validateUrl(repositUrl);
- if(!urlVal) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "URLException: repsositories \"%s\" Invalid Url",keyname));
- }
- }
- }
- }
-
- @Override
- public String toString() {
- return "Repository{" +
- "name='" + name + '\'' +
- ", reposit=" + reposit +
- ", url='" + url + '\'' +
- '}';
- }
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import MissingRequiredFieldError
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.common.exception import URLException
-from toscaparser.utils.gettextutils import _
-import org.openecomp.sdc.toscaparser.api.utils.urlutils
-
-SECTIONS = (DESCRIPTION, URL, CREDENTIAL) = \
- ('description', 'url', 'credential')
-
-
-class Repository(object):
- def __init__(self, repositories, values):
- self.name = repositories
- self.reposit = values
- if isinstance(self.reposit, dict):
- if 'url' not in self.reposit.keys():
- ExceptionCollector.appendException(
- MissingRequiredFieldError(what=_('Repository "%s"')
- % self.name, required='url'))
- self.url = self.reposit['url']
- self.load_and_validate(self.name, self.reposit)
-
- def load_and_validate(self, val, reposit_def):
- self.keyname = val
- if isinstance(reposit_def, dict):
- for key in reposit_def.keys():
- if key not in SECTIONS:
- ExceptionCollector.appendException(
- UnknownFieldError(what=_('repositories "%s"')
- % self.keyname, field=key))
-
- if URL in reposit_def.keys():
- reposit_url = reposit_def.get(URL)
- url_val = toscaparser.utils.urlutils.UrlUtils.\
- validate_url(reposit_url)
- if url_val is not True:
- ExceptionCollector.appendException(
- URLException(what=_('repsositories "%s" Invalid Url')
- % self.keyname))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java
deleted file mode 100644
index b9c2238..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/SubstitutionMappings.java
+++ /dev/null
@@ -1,520 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.elements.NodeType;
-import org.openecomp.sdc.toscaparser.api.elements.PropertyDef;
-import org.openecomp.sdc.toscaparser.api.parameters.Input;
-import org.openecomp.sdc.toscaparser.api.parameters.Output;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-
-public class SubstitutionMappings {
- // SubstitutionMappings class declaration
-
- // SubstitutionMappings exports the topology template as an
- // implementation of a Node type.
-
- private static final String NODE_TYPE = "node_type";
- private static final String REQUIREMENTS = "requirements";
- private static final String CAPABILITIES = "capabilities";
-
- private static final String SECTIONS[] = {NODE_TYPE, REQUIREMENTS, CAPABILITIES};
-
- private static final String OPTIONAL_OUTPUTS[] = {"tosca_id", "tosca_name", "state"};
-
- private LinkedHashMap<String,Object> subMappingDef;
- private ArrayList<NodeTemplate> nodetemplates;
- private ArrayList<Input> inputs;
- private ArrayList<Output> outputs;
- private ArrayList<Group> groups;
- private NodeTemplate subMappedNodeTemplate;
- private LinkedHashMap<String,Object> customDefs;
- private LinkedHashMap<String,Object> _capabilities;
- private LinkedHashMap<String,Object> _requirements;
-
- public SubstitutionMappings(LinkedHashMap<String,Object> smsubMappingDef,
- ArrayList<NodeTemplate> smnodetemplates,
- ArrayList<Input> sminputs,
- ArrayList<Output> smoutputs,
- ArrayList<Group> smgroups,
- NodeTemplate smsubMappedNodeTemplate,
- LinkedHashMap<String,Object> smcustomDefs) {
-
- subMappingDef = smsubMappingDef;
- nodetemplates = smnodetemplates;
- inputs = sminputs != null ? sminputs : new ArrayList<Input>();
- outputs = smoutputs != null ? smoutputs : new ArrayList<Output>();
- groups = smgroups != null ? smgroups : new ArrayList<Group>();
- subMappedNodeTemplate = smsubMappedNodeTemplate;
- customDefs = smcustomDefs != null ? smcustomDefs : new LinkedHashMap<String,Object>();
- _validate();
-
- _capabilities = null;
- _requirements = null;
- }
-
- public String getType() {
- if(subMappingDef != null) {
- return (String)subMappingDef.get(NODE_TYPE);
- }
- return null;
- }
-
- public ArrayList<NodeTemplate> getNodeTemplates() {
- return nodetemplates;
- }
-
- /*
- @classmethod
- def get_node_type(cls, sub_mapping_def):
- if isinstance(sub_mapping_def, dict):
- return sub_mapping_def.get(cls.NODE_TYPE)
- */
-
- public static String stGetNodeType(LinkedHashMap<String,Object> _subMappingDef) {
- if(_subMappingDef instanceof LinkedHashMap) {
- return (String)_subMappingDef.get(NODE_TYPE);
- }
- return null;
- }
-
- public String getNodeType() {
- return (String)subMappingDef.get(NODE_TYPE);
- }
-
- public ArrayList<Input> getInputs() {
- return inputs;
- }
-
- public ArrayList<Group> getGroups() {
- return groups;
- }
-
- public LinkedHashMap<String,Object> getCapabilities() {
- return (LinkedHashMap<String,Object>)subMappingDef.get(CAPABILITIES);
- }
-
- public LinkedHashMap<String,Object> getRequirements() {
- return (LinkedHashMap<String,Object>)subMappingDef.get(REQUIREMENTS);
- }
-
- public NodeType getNodeDefinition() {
- return new NodeType(getNodeType(), customDefs);
- }
-
- private void _validate() {
- // Basic validation
- _validateKeys();
- _validateType();
-
- // SubstitutionMapping class syntax validation
- _validateInputs();
- _validateCapabilities();
- _validateRequirements();
- _validateOutputs();
- }
-
- private void _validateKeys() {
- // validate the keys of substitution mappings
- for(String key: subMappingDef.keySet()) {
- boolean bFound = false;
- for(String s: SECTIONS) {
- if(s.equals(key)) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: SubstitutionMappings contain unknown field \"%s\"",
- key));
- }
- }
- }
-
- private void _validateType() {
- // validate the node_type of substitution mappings
- String nodeType = (String)subMappingDef.get(NODE_TYPE);
- if(nodeType == null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "MissingRequiredFieldError: SubstitutionMappings used in topology_template is missing required field \"%s\"",
- NODE_TYPE));
- }
- Object nodeTypeDef = customDefs.get(nodeType);
- if(nodeTypeDef == null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidNodeTypeError: \"%s\" is invalid",nodeType));
- }
- }
-
- private void _validateInputs() {
- // validate the inputs of substitution mappings.
-
- // The inputs defined by the topology template have to match the
- // properties of the node type or the substituted node. If there are
- // more inputs than the substituted node has properties, default values
- //must be defined for those inputs.
-
- HashSet<String> allInputs = new HashSet<>();
- for(Input inp: inputs) {
- allInputs.add(inp.getName());
- }
- HashSet<String> requiredProperties = new HashSet<>();
- for(PropertyDef pd: getNodeDefinition().getPropertiesDefObjects()) {
- if(pd.isRequired() && pd.getDefault() == null) {
- requiredProperties.add(pd.getName());
- }
- }
- // Must provide inputs for required properties of node type.
- for(String property: requiredProperties) {
- // Check property which is 'required' and has no 'default' value
- if(!allInputs.contains(property)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"",
- getNodeType(),property));
- }
- }
- // If the optional properties of node type need to be customized by
- // substituted node, it also is necessary to define inputs for them,
- // otherwise they are not mandatory to be defined.
- HashSet<String> customizedParameters = new HashSet<>();
- if(subMappedNodeTemplate != null) {
- customizedParameters.addAll(subMappedNodeTemplate.getProperties().keySet());
- }
- HashSet<String> allProperties = new HashSet<String>(
- getNodeDefinition().getPropertiesDef().keySet());
- HashSet<String> diffset = customizedParameters;
- diffset.removeAll(allInputs);
- for(String parameter: diffset) {
- if(allProperties.contains(parameter)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing required input \"%s\"",
- getNodeType(),parameter));
- }
- }
- // Additional inputs are not in the properties of node type must
- // provide default values. Currently the scenario may not happen
- // because of parameters validation in nodetemplate, here is a
- // guarantee.
- for(Input inp: inputs) {
- diffset = allInputs;
- diffset.removeAll(allProperties);
- if(diffset.contains(inp.getName()) && inp.getDefault() == null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "MissingRequiredInputError: SubstitutionMappings with node_type \"%s\" is missing rquired input \"%s\"",
- getNodeType(),inp.getName()));
- }
- }
- }
-
- private void _validateCapabilities() {
- // validate the capabilities of substitution mappings
-
- // The capabilities must be in node template which be mapped.
- LinkedHashMap<String,Object> tplsCapabilities =
- (LinkedHashMap<String,Object>)subMappingDef.get(CAPABILITIES);
- LinkedHashMap<String,Capability> nodeCapabilities = null;
- if(subMappedNodeTemplate != null) {
- nodeCapabilities = subMappedNodeTemplate.getCapabilities();
- }
- if(nodeCapabilities != null) {
- for(String cap: nodeCapabilities.keySet()) {
- if(tplsCapabilities != null && tplsCapabilities.get(cap) == null) {
- ; //pass
- // ExceptionCollector.appendException(
- // UnknownFieldError(what='SubstitutionMappings',
- // field=cap))
- }
- }
- }
- }
-
- private void _validateRequirements() {
- // validate the requirements of substitution mappings
- //*****************************************************
- //TO-DO - Different from Python code!! one is a bug...
- //*****************************************************
- // The requirements must be in node template which be mapped.
- LinkedHashMap<String,Object> tplsRequirements =
- (LinkedHashMap<String,Object>)subMappingDef.get(REQUIREMENTS);
- ArrayList<Object> nodeRequirements = null;
- if(subMappedNodeTemplate != null) {
- nodeRequirements = subMappedNodeTemplate.getRequirements();
- }
- if(nodeRequirements != null) {
- for(Object ro: nodeRequirements) {
- ArrayList<String> al = new ArrayList<String>(
- ((LinkedHashMap<String,Object>)ro).keySet());
- String cap = al.get(0);
- if(tplsRequirements != null && tplsRequirements.get(cap) == null) {
- ; //pass
- // ExceptionCollector.appendException(
- // UnknownFieldError(what='SubstitutionMappings',
- // field=cap))
- }
- }
- }
- }
-
- private void _validateOutputs() {
- // validate the outputs of substitution mappings.
-
- // The outputs defined by the topology template have to match the
- // attributes of the node type or the substituted node template,
- // and the observable attributes of the substituted node template
- // have to be defined as attributes of the node type or outputs in
- // the topology template.
-
- // The outputs defined by the topology template have to match the
- // attributes of the node type according to the specification, but
- // it's reasonable that there are more inputs than the node type
- // has properties, the specification will be amended?
-
- for(Output output: outputs) {
- Object ado = getNodeDefinition().getAttributesDef();
- if(ado != null && ((LinkedHashMap<String,Object>)ado).get(output.getName()) == null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownOutputError: Unknown output \"%s\" in SubstitutionMappings with node_type \"%s\"",
- output.getName(),getNodeType()));
- }
- }
- }
-
- @Override
- public String toString() {
- return "SubstitutionMappings{" +
-// "subMappingDef=" + subMappingDef +
-// ", nodetemplates=" + nodetemplates +
-// ", inputs=" + inputs +
-// ", outputs=" + outputs +
-// ", groups=" + groups +
- ", subMappedNodeTemplate=" + (subMappedNodeTemplate==null?"":subMappedNodeTemplate.getName()) +
-// ", customDefs=" + customDefs +
-// ", _capabilities=" + _capabilities +
-// ", _requirements=" + _requirements +
- '}';
- }
-
- @Deprecated
- public String toLimitedString() {
- return "SubstitutionMappings{" +
- "subMappingDef=" + subMappingDef +
- ", nodetemplates=" + nodetemplates +
- ", inputs=" + inputs +
- ", outputs=" + outputs +
- ", groups=" + groups +
- ", subMappedNodeTemplate=" + (subMappedNodeTemplate==null?"":subMappedNodeTemplate.getName()) +
- ", customDefs=" + customDefs +
- ", _capabilities=" + _capabilities +
- ", _requirements=" + _requirements +
- '}';
- }
-}
-
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import InvalidNodeTypeError
-from toscaparser.common.exception import MissingDefaultValueError
-from toscaparser.common.exception import MissingRequiredFieldError
-from toscaparser.common.exception import MissingRequiredInputError
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.common.exception import UnknownOutputError
-from toscaparser.elements.nodetype import NodeType
-from toscaparser.utils.gettextutils import _
-
-log = logging.getLogger('tosca')
-
-
-class SubstitutionMappings(object):
- '''SubstitutionMappings class declaration
-
- SubstitutionMappings exports the topology template as an
- implementation of a Node type.
- '''
-
- SECTIONS = (NODE_TYPE, REQUIREMENTS, CAPABILITIES) = \
- ('node_type', 'requirements', 'capabilities')
-
- OPTIONAL_OUTPUTS = ['tosca_id', 'tosca_name', 'state']
-
- def __init__(self, sub_mapping_def, nodetemplates, inputs, outputs,
- sub_mapped_node_template, custom_defs):
- self.nodetemplates = nodetemplates
- self.sub_mapping_def = sub_mapping_def
- self.inputs = inputs or []
- self.outputs = outputs or []
- self.sub_mapped_node_template = sub_mapped_node_template
- self.custom_defs = custom_defs or {}
- self._validate()
-
- self._capabilities = None
- self._requirements = None
-
- @property
- def type(self):
- if self.sub_mapping_def:
- return self.sub_mapping_def.get(self.NODE_TYPE)
-
- @classmethod
- def get_node_type(cls, sub_mapping_def):
- if isinstance(sub_mapping_def, dict):
- return sub_mapping_def.get(cls.NODE_TYPE)
-
- @property
- def node_type(self):
- return self.sub_mapping_def.get(self.NODE_TYPE)
-
- @property
- def capabilities(self):
- return self.sub_mapping_def.get(self.CAPABILITIES)
-
- @property
- def requirements(self):
- return self.sub_mapping_def.get(self.REQUIREMENTS)
-
- @property
- def node_definition(self):
- return NodeType(self.node_type, self.custom_defs)
-
- def _validate(self):
- # Basic validation
- self._validate_keys()
- self._validate_type()
-
- # SubstitutionMapping class syntax validation
- self._validate_inputs()
- self._validate_capabilities()
- self._validate_requirements()
- self._validate_outputs()
-
- def _validate_keys(self):
- """validate the keys of substitution mappings."""
- for key in self.sub_mapping_def.keys():
- if key not in self.SECTIONS:
- ExceptionCollector.appendException(
- UnknownFieldError(what=_('SubstitutionMappings'),
- field=key))
-
- def _validate_type(self):
- """validate the node_type of substitution mappings."""
- node_type = self.sub_mapping_def.get(self.NODE_TYPE)
- if not node_type:
- ExceptionCollector.appendException(
- MissingRequiredFieldError(
- what=_('SubstitutionMappings used in topology_template'),
- required=self.NODE_TYPE))
-
- node_type_def = self.custom_defs.get(node_type)
- if not node_type_def:
- ExceptionCollector.appendException(
- InvalidNodeTypeError(what=node_type))
-
- def _validate_inputs(self):
- """validate the inputs of substitution mappings.
-
- The inputs defined by the topology template have to match the
- properties of the node type or the substituted node. If there are
- more inputs than the substituted node has properties, default values
- must be defined for those inputs.
- """
-
- all_inputs = set([input.name for input in self.inputs])
- required_properties = set([p.name for p in
- self.node_definition.
- get_properties_def_objects()
- if p.required and p.default is None])
- # Must provide inputs for required properties of node type.
- for property in required_properties:
- # Check property which is 'required' and has no 'default' value
- if property not in all_inputs:
- ExceptionCollector.appendException(
- MissingRequiredInputError(
- what=_('SubstitutionMappings with node_type ')
- + self.node_type,
- input_name=property))
-
- # If the optional properties of node type need to be customized by
- # substituted node, it also is necessary to define inputs for them,
- # otherwise they are not mandatory to be defined.
- customized_parameters = set(self.sub_mapped_node_template
- .get_properties().keys()
- if self.sub_mapped_node_template else [])
- all_properties = set(self.node_definition.get_properties_def())
- for parameter in customized_parameters - all_inputs:
- if parameter in all_properties:
- ExceptionCollector.appendException(
- MissingRequiredInputError(
- what=_('SubstitutionMappings with node_type ')
- + self.node_type,
- input_name=parameter))
-
- # Additional inputs are not in the properties of node type must
- # provide default values. Currently the scenario may not happen
- # because of parameters validation in nodetemplate, here is a
- # guarantee.
- for input in self.inputs:
- if input.name in all_inputs - all_properties \
- and input.default is None:
- ExceptionCollector.appendException(
- MissingDefaultValueError(
- what=_('SubstitutionMappings with node_type ')
- + self.node_type,
- input_name=input.name))
-
- def _validate_capabilities(self):
- """validate the capabilities of substitution mappings."""
-
- # The capabilites must be in node template wchich be mapped.
- tpls_capabilities = self.sub_mapping_def.get(self.CAPABILITIES)
- node_capabiliteys = self.sub_mapped_node_template.get_capabilities() \
- if self.sub_mapped_node_template else None
- for cap in node_capabiliteys.keys() if node_capabiliteys else []:
- if (tpls_capabilities and
- cap not in list(tpls_capabilities.keys())):
- pass
- # ExceptionCollector.appendException(
- # UnknownFieldError(what='SubstitutionMappings',
- # field=cap))
-
- def _validate_requirements(self):
- """validate the requirements of substitution mappings."""
-
- # The requirements must be in node template wchich be mapped.
- tpls_requirements = self.sub_mapping_def.get(self.REQUIREMENTS)
- node_requirements = self.sub_mapped_node_template.requirements \
- if self.sub_mapped_node_template else None
- for req in node_requirements if node_requirements else []:
- if (tpls_requirements and
- req not in list(tpls_requirements.keys())):
- pass
- # ExceptionCollector.appendException(
- # UnknownFieldError(what='SubstitutionMappings',
- # field=req))
-
- def _validate_outputs(self):
- """validate the outputs of substitution mappings.
-
- The outputs defined by the topology template have to match the
- attributes of the node type or the substituted node template,
- and the observable attributes of the substituted node template
- have to be defined as attributes of the node type or outputs in
- the topology template.
- """
-
- # The outputs defined by the topology template have to match the
- # attributes of the node type according to the specification, but
- # it's reasonable that there are more inputs than the node type
- # has properties, the specification will be amended?
- for output in self.outputs:
- if output.name not in self.node_definition.get_attributes_def():
- ExceptionCollector.appendException(
- UnknownOutputError(
- where=_('SubstitutionMappings with node_type ')
- + self.node_type,
- output_name=output.name))*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java
deleted file mode 100644
index 25f118b..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/TopologyTemplate.java
+++ /dev/null
@@ -1,857 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.elements.InterfacesDef;
-import org.openecomp.sdc.toscaparser.api.elements.NodeType;
-import org.openecomp.sdc.toscaparser.api.elements.RelationshipType;
-import org.openecomp.sdc.toscaparser.api.functions.Function;
-import org.openecomp.sdc.toscaparser.api.functions.GetAttribute;
-import org.openecomp.sdc.toscaparser.api.functions.GetInput;
-import org.openecomp.sdc.toscaparser.api.parameters.Input;
-import org.openecomp.sdc.toscaparser.api.parameters.Output;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class TopologyTemplate {
-
- private static final String DESCRIPTION = "description";
- private static final String INPUTS = "inputs";
- private static final String NODE_TEMPLATES = "node_templates";
- private static final String RELATIONSHIP_TEMPLATES = "relationship_templates";
- private static final String OUTPUTS = "outputs";
- private static final String GROUPS = "groups";
- private static final String SUBSTITUTION_MAPPINGS = "substitution_mappings";
- private static final String POLICIES = "policies";
- private static final String METADATA = "metadata";
-
- private static String SECTIONS[] = {
- DESCRIPTION, INPUTS, NODE_TEMPLATES, RELATIONSHIP_TEMPLATES,
- OUTPUTS, GROUPS, SUBSTITUTION_MAPPINGS, POLICIES, METADATA
- };
-
- private LinkedHashMap<String,Object> tpl;
- LinkedHashMap<String,Object> metaData;
- private ArrayList<Input> inputs;
- private ArrayList<Output> outputs;
- private ArrayList<RelationshipTemplate> relationshipTemplates;
- private ArrayList<NodeTemplate> nodeTemplates;
- private LinkedHashMap<String,Object> customDefs;
- private LinkedHashMap<String,Object> relTypes;//TYPE
- private NodeTemplate subMappedNodeTemplate;
- private ArrayList<Group> groups;
- private ArrayList<Policy> policies;
- private LinkedHashMap<String,Object> parsedParams = null;//TYPE
- private String description;
- private ToscaGraph graph;
- private SubstitutionMappings substitutionMappings;
-
- public TopologyTemplate(
- LinkedHashMap<String,Object> _template,
- LinkedHashMap<String,Object> _customDefs,
- LinkedHashMap<String,Object> _relTypes,//TYPE
- LinkedHashMap<String, Object> _parsedParams,
- NodeTemplate _subMappedNodeTemplate) {
-
- tpl = _template;
- if(tpl != null) {
- subMappedNodeTemplate = _subMappedNodeTemplate;
- metaData = _metaData();
- customDefs = _customDefs;
- relTypes = _relTypes;
- parsedParams = _parsedParams;
- _validateField();
- description = _tplDescription();
- inputs = _inputs();
- relationshipTemplates =_relationshipTemplates();
- nodeTemplates = _nodeTemplates();
- outputs = _outputs();
- if(nodeTemplates != null) {
- graph = new ToscaGraph(nodeTemplates);
- }
- groups = _groups();
- policies = _policies();
- _processIntrinsicFunctions();
- substitutionMappings = _substitutionMappings();
- }
- }
-
- @SuppressWarnings("unchecked")
- private ArrayList<Input> _inputs() {
- //DumpUtils.dumpYaml(customDefs,0);
- ArrayList<Input> alInputs = new ArrayList<>();
- for(String name: _tplInputs().keySet()) {
- Object attrs = _tplInputs().get(name);
- Input input = new Input(name,(LinkedHashMap<String,Object>)attrs,customDefs);
- if(parsedParams != null && parsedParams.get(name) != null) {
- input.validate(parsedParams.get(name));
- }
- else {
- Object _default = input.getDefault();
- if(_default != null) {
- input.validate(_default);
- }
- }
- if((parsedParams != null && parsedParams.get(input.getName()) == null || parsedParams == null)
- && input.isRequired() && input.getDefault() == null) {
- System.out.format("Log warning: The required parameter \"%s\" is not provided\n",input.getName());
- }
- alInputs.add(input);
- }
- return alInputs;
-
- }
-
- private LinkedHashMap<String,Object> _metaData() {
- if(tpl.get(METADATA) != null) {
- return (LinkedHashMap<String,Object>)tpl.get(METADATA);
- }
- else {
- return new LinkedHashMap<String,Object>();
- }
-
- }
-
- private ArrayList<NodeTemplate> _nodeTemplates() {
- ArrayList<NodeTemplate> alNodeTemplates = new ArrayList<>();
- LinkedHashMap<String,Object> tpls = _tplNodeTemplates();
- if(tpls != null) {
- for(String name: tpls.keySet()) {
- NodeTemplate tpl = new NodeTemplate(name,
- tpls,
- customDefs,
- relationshipTemplates,
- relTypes);
- if(tpl.getTypeDefinition() != null) {
- boolean b = NodeType.TOSCA_DEF.get(tpl.getType()) != null;
- if(b || (tpl.getCustomDef() != null && !tpl.getCustomDef().isEmpty())) {
- tpl.validate();
- alNodeTemplates.add(tpl);
- }
- }
- }
- }
- return alNodeTemplates;
- }
-
- @SuppressWarnings("unchecked")
- private ArrayList<RelationshipTemplate> _relationshipTemplates() {
- ArrayList<RelationshipTemplate> alRelationshipTemplates = new ArrayList<>();
- LinkedHashMap<String,Object> tpls = _tplRelationshipTemplates();
- if(tpls != null) {
- for(String name: tpls.keySet()) {
- RelationshipTemplate tpl = new RelationshipTemplate(
- (LinkedHashMap<String,Object>)tpls.get(name),name,customDefs,null,null);
-
- alRelationshipTemplates.add(tpl);
- }
- }
- return alRelationshipTemplates;
- }
-
- private ArrayList<Output> _outputs() {
- ArrayList<Output> alOutputs = new ArrayList<>();
- for(Map.Entry<String,Object> me: _tplOutputs().entrySet()) {
- String oname = me.getKey();
- LinkedHashMap<String,Object> oattrs = (LinkedHashMap<String,Object>)me.getValue();
- Output o = new Output(oname,oattrs);
- o.validate();
- alOutputs.add(o);
- }
- return alOutputs;
- }
-
- private SubstitutionMappings _substitutionMappings() {
- LinkedHashMap<String,Object> tplSubstitutionMapping = (LinkedHashMap<String,Object>) _tplSubstitutionMappings();
-
- //*** the commenting-out below and the weaker condition are in the Python source
- // #if tpl_substitution_mapping and self.sub_mapped_node_template:
- if(tplSubstitutionMapping != null && tplSubstitutionMapping.size() > 0) {
- return new SubstitutionMappings(tplSubstitutionMapping,
- nodeTemplates,
- inputs,
- outputs,
- groups,
- subMappedNodeTemplate,
- customDefs);
- }
- return null;
-
- }
-
- @SuppressWarnings("unchecked")
- private ArrayList<Policy> _policies() {
- ArrayList<Policy> alPolicies = new ArrayList<>();
- for(Object po: _tplPolicies()) {
- LinkedHashMap<String,Object> policy = (LinkedHashMap<String,Object>)po;
- for(Map.Entry<String,Object> me: policy.entrySet()) {
- String policyName = me.getKey();
- LinkedHashMap<String,Object> policyTpl = (LinkedHashMap<String,Object>)me.getValue();
- ArrayList<String> targetList = (ArrayList<String>)policyTpl.get("targets");
- //ArrayList<Object> targetObjects = new ArrayList<>();
- ArrayList<NodeTemplate> targetNodes = new ArrayList<>();
- ArrayList<Object> targetObjects = new ArrayList<>();
- ArrayList<Group> targetGroups = new ArrayList<>();
- String targetsType = "groups";
- if(targetList != null && targetList.size() >= 1) {
- targetGroups = _getPolicyGroups(targetList);
- if(targetGroups == null) {
- targetsType = "node_templates";
- targetNodes = _getGroupMembers(targetList);
- for(NodeTemplate nt: targetNodes) {
- targetObjects.add(nt);
- }
- }
- else {
- for(Group gr: targetGroups) {
- targetObjects.add(gr);
- }
- }
- }
- Policy policyObj = new Policy(policyName,
- policyTpl,
- targetObjects,
- targetsType,
- customDefs);
- alPolicies.add(policyObj);
- }
- }
- return alPolicies;
- }
-
- private ArrayList<Group> _groups() {
- ArrayList<Group> groups = new ArrayList<>();
- ArrayList<NodeTemplate> memberNodes = null;
- for(Map.Entry<String,Object> me: _tplGroups().entrySet()) {
- String groupName = me.getKey();
- LinkedHashMap<String,Object> groupTpl = (LinkedHashMap<String,Object>)me.getValue();
- ArrayList<String> memberNames = (ArrayList<String>)groupTpl.get("members");
- if(memberNames != null) {
- DataEntity.validateDatatype("list", memberNames,null,null,null);
- if(memberNames.size() < 1 ||
- (new HashSet<String>(memberNames)).size() != memberNames.size()) {
- ThreadLocalsHolder.getCollector().appendWarning(String.format(
- "InvalidGroupTargetException: Member nodes \"%s\" should be >= 1 and not repeated",
- memberNames.toString()));
- }
- else {
- memberNodes = _getGroupMembers(memberNames);
- }
- }
- Group group = new Group(groupName,
- groupTpl,
- memberNodes,
- customDefs);
- groups.add(group);
- }
- return groups;
- }
-
- private ArrayList<NodeTemplate> _getGroupMembers(ArrayList<String> memberNames) {
- ArrayList<NodeTemplate> memberNodes = new ArrayList<>();
- _validateGroupMembers(memberNames);
- for(String member: memberNames) {
- for(NodeTemplate node: nodeTemplates) {
- if(member.equals(node.getName())) {
- memberNodes.add(node);
- }
- }
- }
- return memberNodes;
- }
-
- private ArrayList<Group> _getPolicyGroups(ArrayList<String> memberNames) {
- ArrayList<Group> memberGroups = new ArrayList<>();
- for(String member: memberNames) {
- for(Group group: groups) {
- if(member.equals(group.getName())) {
- memberGroups.add(group);
- }
- }
- }
- return memberGroups;
- }
-
- private void _validateGroupMembers(ArrayList<String> members) {
- ArrayList<String> nodeNames = new ArrayList<>();
- for(NodeTemplate node: nodeTemplates) {
- nodeNames.add(node.getName());
- }
- for(String member: members) {
- if(!nodeNames.contains(member)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidGroupTargetException: Target member \"%s\" is not found in \"nodeTemplates\"",member));
- }
- }
- }
-
- // topology template can act like node template
- // it is exposed by substitution_mappings.
-
- public String nodetype() {
- return substitutionMappings.getNodeType();
- }
-
- public LinkedHashMap<String,Object> capabilities() {
- return substitutionMappings.getCapabilities();
- }
-
- public LinkedHashMap<String,Object> requirements() {
- return substitutionMappings.getRequirements();
- }
-
- private String _tplDescription() {
- return (String)tpl.get(DESCRIPTION);
- //if description:
- // return description.rstrip()
- }
-
- @SuppressWarnings("unchecked")
- private LinkedHashMap<String,Object> _tplInputs() {
- if(tpl.get(INPUTS) != null) {
- return (LinkedHashMap<String,Object>)tpl.get(INPUTS);
- }
- else {
- return new LinkedHashMap<String,Object>();
- }
- }
-
- @SuppressWarnings("unchecked")
- private LinkedHashMap<String,Object> _tplNodeTemplates() {
- return (LinkedHashMap<String,Object>)tpl.get(NODE_TEMPLATES);
- }
-
- @SuppressWarnings("unchecked")
- private LinkedHashMap<String,Object> _tplRelationshipTemplates() {
- if(tpl.get(RELATIONSHIP_TEMPLATES) != null) {
- return (LinkedHashMap<String,Object>)tpl.get(RELATIONSHIP_TEMPLATES);
- }
- else {
- return new LinkedHashMap<String,Object>();
- }
- }
-
- @SuppressWarnings("unchecked")
- private LinkedHashMap<String,Object> _tplOutputs() {
- if(tpl.get(OUTPUTS) != null) {
- return (LinkedHashMap<String,Object>)tpl.get(OUTPUTS);
- }
- else {
- return new LinkedHashMap<String,Object>();
- }
- }
-
- @SuppressWarnings("unchecked")
- private LinkedHashMap<String,Object> _tplSubstitutionMappings() {
- if(tpl.get(SUBSTITUTION_MAPPINGS) != null) {
- return (LinkedHashMap<String,Object>)tpl.get(SUBSTITUTION_MAPPINGS);
- }
- else {
- return new LinkedHashMap<String,Object>();
- }
- }
-
- @SuppressWarnings("unchecked")
- private LinkedHashMap<String,Object> _tplGroups() {
- if(tpl.get(GROUPS) != null) {
- return (LinkedHashMap<String,Object>)tpl.get(GROUPS);
- }
- else {
- return new LinkedHashMap<String,Object>();
- }
- }
-
- @SuppressWarnings("unchecked")
- private ArrayList<Object> _tplPolicies() {
- if(tpl.get(POLICIES) != null) {
- return (ArrayList<Object>)tpl.get(POLICIES);
- }
- else {
- return new ArrayList<Object>();
- }
- }
-
- private void _validateField() {
- for(String name: tpl.keySet()) {
- boolean bFound = false;
- for(String section: SECTIONS) {
- if(name.equals(section)) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: TopologyTemplate contains unknown field \"%s\"",name));
- }
- }
- }
-
- @SuppressWarnings("unchecked")
- private void _processIntrinsicFunctions() {
- // Process intrinsic functions
-
- // Current implementation processes functions within node template
- // properties, requirements, interfaces inputs and template outputs.
-
- if(nodeTemplates != null) {
- for(NodeTemplate nt: nodeTemplates) {
- for(Property prop: nt.getPropertiesObjects()) {
- prop.setValue(Function.getFunction(this,nt,prop.getValue()));
- }
- for(InterfacesDef ifd: nt.getInterfaces()) {
- LinkedHashMap<String,Object> ifin = ifd.getInputs();
- if(ifin != null) {
- for(Map.Entry<String,Object> me: ifin.entrySet()) {
- String name = me.getKey();
- Object value = Function.getFunction(this,nt,me.getValue());
- ifd.setInput(name,value);
- }
- }
- }
- if(nt.getRequirements() != null &&
- nt.getRequirements() instanceof ArrayList) {
- for(Object oreq: nt.getRequirements()) {
- LinkedHashMap<String,Object> req = (LinkedHashMap<String,Object>)oreq;
- LinkedHashMap<String,Object> rel = req;
- for(String reqName: req.keySet()) {
- Object reqItem = req.get(reqName);
- if(reqItem instanceof LinkedHashMap) {
- Object t = ((LinkedHashMap<String,Object>)reqItem).get("relationship");
- // it can be a string or a LHM...
- if(t instanceof LinkedHashMap) {
- rel = (LinkedHashMap<String,Object>)t;
- }
- else {
- // we set it to null to fail the next test
- // and avoid the get("proprties")
- rel = null;
- }
- break;
- }
- }
- if(rel != null && rel.get("properties") != null) {
- LinkedHashMap<String,Object> relprops =
- (LinkedHashMap<String,Object>)rel.get("properties");
- for(String key: relprops.keySet()) {
- Object value = relprops.get(key);
- Object func = Function.getFunction(this,req,value);
- relprops.put(key,func);
- }
- }
- }
- }
- if(nt.getCapabilitiesObjects() != null) {
- for(Capability cap: nt.getCapabilitiesObjects()) {
- if(cap.getPropertiesObjects() != null) {
- for(Property prop: cap.getPropertiesObjects()) {
- Object propvalue = Function.getFunction(this,nt,prop.getValue());
- if(propvalue instanceof GetInput) {
- propvalue = ((GetInput)propvalue).result();
- for(String p: cap.getProperties().keySet()) {
- //Object v = cap.getProperties().get(p);
- if(p.equals(prop.getName())) {
- cap.setProperty(p,propvalue);
- }
- }
- }
- }
- }
- }
- }
- for(RelationshipType rel: nt.getRelationships().keySet()) {
- NodeTemplate node = nt.getRelationships().get(rel);
- ArrayList<RelationshipTemplate> relTpls = node.getRelationshipTemplate();
- if(relTpls != null) {
- for(RelationshipTemplate relTpl: relTpls) {
- // TT 5
- for(InterfacesDef iface: relTpl.getInterfaces()) {
- if(iface.getInputs() != null) {
- for(String name: iface.getInputs().keySet()) {
- Object value = iface.getInputs().get(name);
- Object func = Function.getFunction(
- this,
- relTpl,
- value);
- iface.setInput(name,func);
- }
- }
- }
- }
- }
- }
- }
- }
- for(Output output: outputs) {
- Object func = Function.getFunction(this,outputs,output.getValue());
- if(func instanceof GetAttribute) {
- output.setAttr(Output.VALUE,func);
- }
- }
- }
-
- public static String getSubMappingNodeType(LinkedHashMap<String,Object> topologyTpl) {
- if(topologyTpl != null && topologyTpl instanceof LinkedHashMap) {
- Object submapTpl = topologyTpl.get(SUBSTITUTION_MAPPINGS);
- return SubstitutionMappings.stGetNodeType((LinkedHashMap<String,Object>)submapTpl);
- }
- return null;
- }
-
- // getters
-
- public LinkedHashMap<String,Object> getTpl() {
- return tpl;
- }
-
- public LinkedHashMap<String,Object> getMetadata() {
- return metaData;
- }
-
- public ArrayList<Input> getInputs() {
- return inputs;
- }
-
- public ArrayList<Output> getOutputs() {
- return outputs;
- }
-
- public ArrayList<Policy> getPolicies() {
- return policies;
- }
-
- public ArrayList<RelationshipTemplate> getRelationshipTemplates() {
- return relationshipTemplates;
- }
-
- public ArrayList<NodeTemplate> getNodeTemplates() {
- return nodeTemplates;
- }
-
- public ArrayList<Group> getGroups() {
- return groups;
- }
-
- public SubstitutionMappings getSubstitutionMappings() {
- return substitutionMappings;
- }
-
- public LinkedHashMap<String,Object> getParsedParams() {
- return parsedParams;
- }
-}
-
-/*python
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-import logging
-
-from toscaparser.common import exception
-from toscaparser.dataentity import DataEntity
-from toscaparser import functions
-from toscaparser.groups import Group
-from toscaparser.nodetemplate import NodeTemplate
-from toscaparser.parameters import Input
-from toscaparser.parameters import Output
-from toscaparser.policy import Policy
-from toscaparser.relationship_template import RelationshipTemplate
-from toscaparser.substitution_mappings import SubstitutionMappings
-from toscaparser.tpl_relationship_graph import ToscaGraph
-from toscaparser.utils.gettextutils import _
-
-
-# Topology template key names
-SECTIONS = (DESCRIPTION, INPUTS, NODE_TEMPLATES,
- RELATIONSHIP_TEMPLATES, OUTPUTS, GROUPS,
- SUBSTITUION_MAPPINGS, POLICIES) = \
- ('description', 'inputs', 'node_templates',
- 'relationship_templates', 'outputs', 'groups',
- 'substitution_mappings', 'policies')
-
-log = logging.getLogger("tosca.model")
-
-
-class TopologyTemplate(object):
-
- '''Load the template data.'''
- def __init__(self, template, custom_defs,
- rel_types=None, parsed_params=None,
- sub_mapped_node_template=None):
- self.tpl = template
- self.sub_mapped_node_template = sub_mapped_node_template
- if self.tpl:
- self.custom_defs = custom_defs
- self.rel_types = rel_types
- self.parsed_params = parsed_params
- self._validate_field()
- self.description = self._tpl_description()
- self.inputs = self._inputs()
- self.relationship_templates = self._relationship_templates()
- self.nodetemplates = self._nodetemplates()
- self.outputs = self._outputs()
- if hasattr(self, 'nodetemplates'):
- self.graph = ToscaGraph(self.nodetemplates)
- self.groups = self._groups()
- self.policies = self._policies()
- self._process_intrinsic_functions()
- self.substitution_mappings = self._substitution_mappings()
-
- def _inputs(self):
- inputs = []
- for name, attrs in self._tpl_inputs().items():
- input = Input(name, attrs)
- if self.parsed_params and name in self.parsed_params:
- input.validate(self.parsed_params[name])
- else:
- default = input.default
- if default:
- input.validate(default)
- if (self.parsed_params and input.name not in self.parsed_params
- or self.parsed_params is None) and input.required \
- and input.default is None:
- log.warning(_('The required parameter %s '
- 'is not provided') % input.name)
-
- inputs.append(input)
- return inputs
-
- def _nodetemplates(self):
- nodetemplates = []
- tpls = self._tpl_nodetemplates()
- if tpls:
- for name in tpls:
- tpl = NodeTemplate(name, tpls, self.custom_defs,
- self.relationship_templates,
- self.rel_types)
- if (tpl.type_definition and
- (tpl.type in tpl.type_definition.TOSCA_DEF or
- (tpl.type not in tpl.type_definition.TOSCA_DEF and
- bool(tpl.custom_def)))):
- tpl.validate(self)
- nodetemplates.append(tpl)
- return nodetemplates
-
- def _relationship_templates(self):
- rel_templates = []
- tpls = self._tpl_relationship_templates()
- for name in tpls:
- tpl = RelationshipTemplate(tpls[name], name, self.custom_defs)
- rel_templates.append(tpl)
- return rel_templates
-
- def _outputs(self):
- outputs = []
- for name, attrs in self._tpl_outputs().items():
- output = Output(name, attrs)
- output.validate()
- outputs.append(output)
- return outputs
-
- def _substitution_mappings(self):
- tpl_substitution_mapping = self._tpl_substitution_mappings()
- # if tpl_substitution_mapping and self.sub_mapped_node_template:
- if tpl_substitution_mapping:
- return SubstitutionMappings(tpl_substitution_mapping,
- self.nodetemplates,
- self.inputs,
- self.outputs,
- self.sub_mapped_node_template,
- self.custom_defs)
-
- def _policies(self):
- policies = []
- for policy in self._tpl_policies():
- for policy_name, policy_tpl in policy.items():
- target_list = policy_tpl.get('targets')
- if target_list and len(target_list) >= 1:
- target_objects = []
- targets_type = "groups"
- target_objects = self._get_policy_groups(target_list)
- if not target_objects:
- targets_type = "node_templates"
- target_objects = self._get_group_members(target_list)
- policyObj = Policy(policy_name, policy_tpl,
- target_objects, targets_type,
- self.custom_defs)
- policies.append(policyObj)
- return policies
-
- def _groups(self):
- groups = []
- member_nodes = None
- for group_name, group_tpl in self._tpl_groups().items():
- member_names = group_tpl.get('members')
- if member_names is not None:
- DataEntity.validate_datatype('list', member_names)
- if len(member_names) < 1 or \
- len(member_names) != len(set(member_names)):
- exception.ExceptionCollector.appendException(
- exception.InvalidGroupTargetException(
- message=_('Member nodes "%s" should be >= 1 '
- 'and not repeated') % member_names))
- else:
- member_nodes = self._get_group_members(member_names)
- group = Group(group_name, group_tpl,
- member_nodes,
- self.custom_defs)
- groups.append(group)
- return groups
-
- def _get_group_members(self, member_names):
- member_nodes = []
- self._validate_group_members(member_names)
- for member in member_names:
- for node in self.nodetemplates:
- if node.name == member:
- member_nodes.append(node)
- return member_nodes
-
- def _get_policy_groups(self, member_names):
- member_groups = []
- for member in member_names:
- for group in self.groups:
- if group.name == member:
- member_groups.append(group)
- return member_groups
-
- def _validate_group_members(self, members):
- node_names = []
- for node in self.nodetemplates:
- node_names.append(node.name)
- for member in members:
- if member not in node_names:
- exception.ExceptionCollector.appendException(
- exception.InvalidGroupTargetException(
- message=_('Target member "%s" is not found in '
- 'node_templates') % member))
-
- # topology template can act like node template
- # it is exposed by substitution_mappings.
- def nodetype(self):
- return self.substitution_mappings.node_type \
- if self.substitution_mappings else None
-
- def capabilities(self):
- return self.substitution_mappings.capabilities \
- if self.substitution_mappings else None
-
- def requirements(self):
- return self.substitution_mappings.requirements \
- if self.substitution_mappings else None
-
- def _tpl_description(self):
- description = self.tpl.get(DESCRIPTION)
- if description:
- return description.rstrip()
-
- def _tpl_inputs(self):
- return self.tpl.get(INPUTS) or {}
-
- def _tpl_nodetemplates(self):
- return self.tpl.get(NODE_TEMPLATES)
-
- def _tpl_relationship_templates(self):
- return self.tpl.get(RELATIONSHIP_TEMPLATES) or {}
-
- def _tpl_outputs(self):
- return self.tpl.get(OUTPUTS) or {}
-
- def _tpl_substitution_mappings(self):
- return self.tpl.get(SUBSTITUION_MAPPINGS) or {}
-
- def _tpl_groups(self):
- return self.tpl.get(GROUPS) or {}
-
- def _tpl_policies(self):
- return self.tpl.get(POLICIES) or {}
-
- def _validate_field(self):
- for name in self.tpl:
- if name not in SECTIONS:
- exception.ExceptionCollector.appendException(
- exception.UnknownFieldError(what='Template', field=name))
-
- def _process_intrinsic_functions(self):
- """Process intrinsic functions
-
- Current implementation processes functions within node template
- properties, requirements, interfaces inputs and template outputs.
- """
- if hasattr(self, 'nodetemplates'):
- for node_template in self.nodetemplates:
- for prop in node_template.get_properties_objects():
- prop.value = functions.get_function(self,
- node_template,
- prop.value)
- for interface in node_template.interfaces:
- if interface.inputs:
- for name, value in interface.inputs.items():
- interface.inputs[name] = functions.get_function(
- self,
- node_template,
- value)
- if node_template.requirements and \
- isinstance(node_template.requirements, list):
- for req in node_template.requirements:
- rel = req
- for req_name, req_item in req.items():
- if isinstance(req_item, dict):
- rel = req_item.get('relationship')
- break
- if rel and 'properties' in rel:
- for key, value in rel['properties'].items():
- rel['properties'][key] = \
- functions.get_function(self,
- req,
- value)
- if node_template.get_capabilities_objects():
- for cap in node_template.get_capabilities_objects():
- if cap.get_properties_objects():
- for prop in cap.get_properties_objects():
- propvalue = functions.get_function(
- self,
- node_template,
- prop.value)
- if isinstance(propvalue, functions.GetInput):
- propvalue = propvalue.result()
- for p, v in cap._properties.items():
- if p == prop.name:
- cap._properties[p] = propvalue
- for rel, node in node_template.relationships.items():
- rel_tpls = node.relationship_tpl
- if rel_tpls:
- for rel_tpl in rel_tpls:
- for interface in rel_tpl.interfaces:
- if interface.inputs:
- for name, value in \
- interface.inputs.items():
- interface.inputs[name] = \
- functions.get_function(self,
- rel_tpl,
- value)
- for output in self.outputs:
- func = functions.get_function(self, self.outputs, output.value)
- if isinstance(func, functions.GetAttribute):
- output.attrs[output.VALUE] = func
-
- @classmethod
- def get_sub_mapping_node_type(cls, topology_tpl):
- if topology_tpl and isinstance(topology_tpl, dict):
- submap_tpl = topology_tpl.get(SUBSTITUION_MAPPINGS)
- return SubstitutionMappings.get_node_type(submap_tpl)
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaGraph.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaGraph.java
deleted file mode 100644
index 2de3bb9..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaGraph.java
+++ /dev/null
@@ -1,109 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.elements.RelationshipType;
-
-//import java.util.Iterator;
-
-public class ToscaGraph {
- // Graph of Tosca Node Templates
-
- private ArrayList<NodeTemplate> nodeTemplates;
- private LinkedHashMap<String,NodeTemplate> vertices;
-
- public ToscaGraph(ArrayList<NodeTemplate> inodeTemplates) {
- nodeTemplates = inodeTemplates;
- vertices = new LinkedHashMap<String,NodeTemplate>();
- _create();
- }
-
- private void _createVertex(NodeTemplate node) {
- if(vertices.get(node.getName()) == null) {
- vertices.put(node.getName(),node);
- }
- }
-
- private void _createEdge(NodeTemplate node1,
- NodeTemplate node2,
- RelationshipType relation) {
- if(vertices.get(node1.getName()) == null) {
- _createVertex(node1);
- vertices.get(node1.name)._addNext(node2,relation);
- }
- }
-
- public NodeTemplate vertex(String name) {
- if(vertices.get(name) != null) {
- return vertices.get(name);
- }
- return null;
- }
-
-// public Iterator getIter() {
-// return vertices.values().iterator();
-// }
-
- private void _create() {
- for(NodeTemplate node: nodeTemplates) {
- LinkedHashMap<RelationshipType,NodeTemplate> relation = node.getRelationships();
- if(relation != null) {
- for(RelationshipType rel: relation.keySet()) {
- NodeTemplate nodeTpls = relation.get(rel);
- for(NodeTemplate tpl: nodeTemplates) {
- if(tpl.getName().equals(nodeTpls.getName())) {
- _createEdge(node,tpl,rel);
- }
- }
- }
- }
- _createVertex(node);
- }
- }
-
- @Override
- public String toString() {
- return "ToscaGraph{" +
- "nodeTemplates=" + nodeTemplates +
- ", vertices=" + vertices +
- '}';
- }
-}
-
-/*python
-
-class ToscaGraph(object):
- '''Graph of Tosca Node Templates.'''
- def __init__(self, nodetemplates):
- self.nodetemplates = nodetemplates
- self.vertices = {}
- self._create()
-
- def _create_vertex(self, node):
- if node not in self.vertices:
- self.vertices[node.name] = node
-
- def _create_edge(self, node1, node2, relationship):
- if node1 not in self.vertices:
- self._create_vertex(node1)
- self.vertices[node1.name]._add_next(node2,
- relationship)
-
- def vertex(self, node):
- if node in self.vertices:
- return self.vertices[node]
-
- def __iter__(self):
- return iter(self.vertices.values())
-
- def _create(self):
- for node in self.nodetemplates:
- relation = node.relationships
- if relation:
- for rel, nodetpls in relation.items():
- for tpl in self.nodetemplates:
- if tpl.name == nodetpls.name:
- self._create_edge(node, tpl, rel)
- self._create_vertex(node)
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java
deleted file mode 100644
index b13a2a5..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/ToscaTemplate.java
+++ /dev/null
@@ -1,1002 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.*;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.common.JToscaException;
-import org.openecomp.sdc.toscaparser.api.elements.EntityType;
-import org.openecomp.sdc.toscaparser.api.elements.Metadata;
-import org.openecomp.sdc.toscaparser.api.extensions.ExtTools;
-import org.openecomp.sdc.toscaparser.api.parameters.Input;
-import org.openecomp.sdc.toscaparser.api.parameters.Output;
-import org.openecomp.sdc.toscaparser.api.prereq.CSAR;
-import org.openecomp.sdc.toscaparser.api.utils.JToscaErrorCodes;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
-
-public class ToscaTemplate extends Object {
-
- private static Logger log = LoggerFactory.getLogger(ToscaTemplate.class.getName());
-
- // TOSCA template key names
- private static final String DEFINITION_VERSION = "tosca_definitions_version";
- private static final String DEFAULT_NAMESPACE = "tosca_default_namespace";
- private static final String TEMPLATE_NAME = "template_name";
- private static final String TOPOLOGY_TEMPLATE = "topology_template";
- private static final String TEMPLATE_AUTHOR = "template_author";
- private static final String TEMPLATE_VERSION = "template_version";
- private static final String DESCRIPTION = "description";
- private static final String IMPORTS = "imports";
- private static final String DSL_DEFINITIONS = "dsl_definitions";
- private static final String NODE_TYPES = "node_types";
- private static final String RELATIONSHIP_TYPES = "relationship_types";
- private static final String RELATIONSHIP_TEMPLATES = "relationship_templates";
- private static final String CAPABILITY_TYPES = "capability_types";
- private static final String ARTIFACT_TYPES = "artifact_types";
- private static final String DATA_TYPES = "data_types";
- private static final String INTERFACE_TYPES = "interface_types";
- private static final String POLICY_TYPES = "policy_types";
- private static final String GROUP_TYPES = "group_types";
- private static final String REPOSITORIES = "repositories";
-
- private static String SECTIONS[] = {
- DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME,
- TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION,
- DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES,
- RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES,
- CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES,
- INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES, REPOSITORIES
- };
-
- // Sections that are specific to individual template definitions
- private static final String METADATA = "metadata";
- private static ArrayList<String> SPECIAL_SECTIONS;
-
- private ExtTools exttools = new ExtTools();
-
- private ArrayList<String> VALID_TEMPLATE_VERSIONS;
- private LinkedHashMap<String,ArrayList<String>> ADDITIONAL_SECTIONS;
-
- private boolean isFile;
- private String path;
- private String inputPath;
- private LinkedHashMap<String,Object> parsedParams;
- private LinkedHashMap<String,Object> tpl;
- private String version;
- private ArrayList<Object> imports;
- private LinkedHashMap<String,Object> relationshipTypes;
- private Metadata metaData;
- private String description;
- private TopologyTemplate topologyTemplate;
- private ArrayList<Repository> repositories;
- private ArrayList<Input> inputs;
- private ArrayList<RelationshipTemplate> relationshipTemplates;
- private ArrayList<NodeTemplate> nodeTemplates;
- private ArrayList<Output> outputs;
- private ArrayList<Policy> policies;
- private LinkedHashMap<String,Object> nestedToscaTplsWithTopology;
- private ArrayList<TopologyTemplate> nestedToscaTemplatesWithTopology;
- private ToscaGraph graph;
- private String csarTempDir;
- private int nestingLoopCounter;
- private LinkedHashMap<String, LinkedHashMap<String, Object>> metaProperties;
-
- @SuppressWarnings("unchecked")
- public ToscaTemplate(String _path,
- LinkedHashMap<String,Object> _parsedParams,
- boolean aFile,
- LinkedHashMap<String,Object> yamlDictTpl) throws JToscaException {
-
- ThreadLocalsHolder.setCollector(new ExceptionCollector(_path));
-
- VALID_TEMPLATE_VERSIONS = new ArrayList<>();
- VALID_TEMPLATE_VERSIONS.add("tosca_simple_yaml_1_0");
- VALID_TEMPLATE_VERSIONS.addAll(exttools.getVersions());
- ADDITIONAL_SECTIONS = new LinkedHashMap<>();
- SPECIAL_SECTIONS = new ArrayList<>();
- SPECIAL_SECTIONS.add(METADATA);
- ADDITIONAL_SECTIONS.put("tosca_simple_yaml_1_0",SPECIAL_SECTIONS);
- ADDITIONAL_SECTIONS.putAll(exttools.getSections());
-
- //long startTime = System.nanoTime();
-
-
- isFile = aFile;
- inputPath = null;
- path = null;
- tpl = null;
- csarTempDir = null;
- nestedToscaTplsWithTopology = new LinkedHashMap<String,Object>();
- nestedToscaTemplatesWithTopology = new ArrayList<TopologyTemplate>();
-
- if(_path != null && !_path.isEmpty()) {
- // save the original input path
- inputPath = _path;
- // get the actual path (will change with CSAR)
- path = _getPath(_path);
- // load the YAML template
- if (path != null && !path.isEmpty()) {
- try {
- //System.out.println("Loading YAML file " + path);
- log.debug("ToscaTemplate Loading YAMEL file {}", path);
- InputStream input = new FileInputStream(new File(path));
- Yaml yaml = new Yaml();
- Object data = yaml.load(input);
- this.tpl = (LinkedHashMap<String,Object>) data;
- }
- catch (FileNotFoundException e) {
- log.error("ToscaTemplate - Exception loading yaml: {}", e.getMessage());
- return;
- }
- catch(Exception e) {
- log.error("ToscaTemplate - Error loading yaml, aborting");
- return;
- }
-
- if(yamlDictTpl != null) {
- //msg = (_('Both path and yaml_dict_tpl arguments were '
- // 'provided. Using path and ignoring yaml_dict_tpl.'))
- //log.info(msg)
- log.debug("ToscaTemplate - Both path and yaml_dict_tpl arguments were provided. Using path and ignoring yaml_dict_tpl");
- }
- }
- else {
- // no input to process...
- _abort();
- }
- }
- else {
- if(yamlDictTpl != null) {
- tpl = yamlDictTpl;
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(
- "ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse");
- log.debug("ToscaTemplate ValueError: No path or yaml_dict_tpl was provided. There is nothing to parse");
-
- }
- }
-
- if(tpl != null) {
- parsedParams = _parsedParams;
- _validateField();
- this.version = _tplVersion();
- this.metaData = _tplMetaData();
- this.relationshipTypes = _tplRelationshipTypes();
- this.description = _tplDescription();
- this.topologyTemplate = _topologyTemplate();
- this.repositories = _tplRepositories();
- if(topologyTemplate.getTpl() != null) {
- this.inputs = _inputs();
- this.relationshipTemplates = _relationshipTemplates();
- this.nodeTemplates = _nodeTemplates();
- this.outputs = _outputs();
- this.policies = _policies();
- _handleNestedToscaTemplatesWithTopology();
- graph = new ToscaGraph(nodeTemplates);
- }
- }
-
- if(csarTempDir != null) {
- CSAR.deleteDir(new File(csarTempDir));
- csarTempDir = null;
- }
-
- verifyTemplate();
-
- }
-
- private void _abort() throws JToscaException {
- // print out all exceptions caught
- verifyTemplate();
- throw new JToscaException("jtosca aborting", JToscaErrorCodes.PATH_NOT_VALID.getValue());
- }
- private TopologyTemplate _topologyTemplate() {
- return new TopologyTemplate(
- _tplTopologyTemplate(),
- _getAllCustomDefs(imports),
- relationshipTypes,
- parsedParams,
- null);
- }
-
- private ArrayList<Input> _inputs() {
- return topologyTemplate.getInputs();
- }
-
- private ArrayList<NodeTemplate> _nodeTemplates() {
- return topologyTemplate.getNodeTemplates();
- }
-
- private ArrayList<RelationshipTemplate> _relationshipTemplates() {
- return topologyTemplate.getRelationshipTemplates();
- }
-
- private ArrayList<Output> _outputs() {
- return topologyTemplate.getOutputs();
- }
-
- private String _tplVersion() {
- return (String)tpl.get(DEFINITION_VERSION);
- }
-
- @SuppressWarnings("unchecked")
- private Metadata _tplMetaData() {
- Object mdo = tpl.get(METADATA);
- if(mdo instanceof LinkedHashMap) {
- return new Metadata((Map<String, Object>)mdo);
- }
- else {
- return null;
- }
- }
-
- private String _tplDescription() {
- return (String)tpl.get(DESCRIPTION);
- }
-
- private ArrayList<Object> _tplImports() {
- return (ArrayList<Object>)tpl.get(IMPORTS);
- }
-
- private ArrayList<Repository> _tplRepositories() {
- LinkedHashMap<String,Object> repositories =
- (LinkedHashMap<String,Object>)tpl.get(REPOSITORIES);
- ArrayList<Repository> reposit = new ArrayList<>();
- if(repositories != null) {
- for(Map.Entry<String,Object> me: repositories.entrySet()) {
- Repository reposits = new Repository(me.getKey(),me.getValue());
- reposit.add(reposits);
- }
- }
- return reposit;
- }
-
- private LinkedHashMap<String,Object> _tplRelationshipTypes() {
- return (LinkedHashMap<String,Object>)_getCustomTypes(RELATIONSHIP_TYPES,null);
- }
-
- @SuppressWarnings("unchecked")
- private LinkedHashMap<String,Object> _tplRelationshipTemplates() {
- return (LinkedHashMap<String,Object>)_tplTopologyTemplate().get(RELATIONSHIP_TEMPLATES);
- }
-
- @SuppressWarnings("unchecked")
- private LinkedHashMap<String,Object> _tplTopologyTemplate() {
- return (LinkedHashMap<String,Object>)tpl.get(TOPOLOGY_TEMPLATE);
- }
-
- private ArrayList<Policy> _policies() {
- return topologyTemplate.getPolicies();
- }
-
- private LinkedHashMap<String,Object> _getAllCustomDefs(ArrayList<Object> alImports) {
-
- String types[] = {
- IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES,
- DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES
- };
- LinkedHashMap<String,Object> customDefsFinal = new LinkedHashMap<String,Object>();
- LinkedHashMap<String,Object> customDefs = _getCustomTypes(types,alImports);
- if(customDefs != null) {
- customDefsFinal.putAll(customDefs);
- if(customDefs.get(IMPORTS) != null) {
- @SuppressWarnings("unchecked")
- LinkedHashMap<String,Object> importDefs = _getAllCustomDefs((ArrayList<Object>)customDefs.get(IMPORTS));
- customDefsFinal.putAll(importDefs);
- }
- }
-
- // As imports are not custom_types, remove from the dict
- customDefsFinal.remove(IMPORTS);
-
- return customDefsFinal;
- }
-
- @SuppressWarnings("unchecked")
- private LinkedHashMap<String,Object> _getCustomTypes(Object typeDefinitions,ArrayList<Object> alImports) {
-
- // Handle custom types defined in imported template files
- // This method loads the custom type definitions referenced in "imports"
- // section of the TOSCA YAML template.
-
- LinkedHashMap<String,Object> customDefs = new LinkedHashMap<String,Object>();
- ArrayList<String> typeDefs = new ArrayList<String>();
- if(typeDefinitions instanceof String[]) {
- for(String s: (String[])typeDefinitions) {
- typeDefs.add(s);
- }
- }
- else {
- typeDefs.add((String)typeDefinitions);
- }
-
- if(alImports == null) {
- alImports = _tplImports();
- }
-
- if(alImports != null) {
- ImportsLoader customService = new ImportsLoader(alImports,path,typeDefs,tpl);
- ArrayList<LinkedHashMap<String,Object>> nestedToscaTpls = customService.getNestedToscaTpls();
- _updateNestedToscaTplsWithTopology(nestedToscaTpls);
-
- customDefs = customService.getCustomDefs();
- if(customDefs == null) {
- return null;
- }
- }
-
- //Handle custom types defined in current template file
- for(String td: typeDefs) {
- if(!td.equals(IMPORTS)) {
- LinkedHashMap<String,Object> innerCustomTypes = (LinkedHashMap<String,Object> )tpl.get(td);
- if(innerCustomTypes != null) {
- customDefs.putAll(innerCustomTypes);
- }
- }
- }
- return customDefs;
- }
-
- private void _updateNestedToscaTplsWithTopology(ArrayList<LinkedHashMap<String,Object>> nestedToscaTpls) {
- for(LinkedHashMap<String,Object> ntpl: nestedToscaTpls) {
- // there is just one key:value pair in ntpl
- for(Map.Entry<String,Object> me: ntpl.entrySet()) {
- String fileName = me.getKey();
- @SuppressWarnings("unchecked")
- LinkedHashMap<String,Object> toscaTpl = (LinkedHashMap<String,Object>)me.getValue();
- if(toscaTpl.get(TOPOLOGY_TEMPLATE) != null) {
- if(nestedToscaTplsWithTopology.get(fileName) == null) {
- nestedToscaTplsWithTopology.putAll(ntpl);
- }
- }
- }
- }
- }
-
- // **experimental** (multi level nesting) RECURSIVE - BEWARE OF INIFINITE LOOPS...
- private void _handleNestedToscaTemplatesWithTopology2(TopologyTemplate tt) {
- if(++nestingLoopCounter > 10) {
- log.error("ToscaTemplate - _handleNestedToscaTemplatesWithTopology2 - Nested Topologies Loop: too many levels, aborting");
- return;
- }
- for(Map.Entry<String,Object> me: nestedToscaTplsWithTopology.entrySet()) {
- String fname = me.getKey();
- LinkedHashMap<String,Object> toscaTpl =
- (LinkedHashMap<String,Object>)me.getValue();
- for(NodeTemplate nt: tt.getNodeTemplates()) {
- if(_isSubMappedNode2(nt,toscaTpl)) {
- parsedParams = _getParamsForNestedTemplate(nt);
- LinkedHashMap<String,Object> topologyTpl =
- (LinkedHashMap<String,Object>)toscaTpl.get(TOPOLOGY_TEMPLATE);
- TopologyTemplate topologyWithSubMapping =
- new TopologyTemplate(topologyTpl,
- _getAllCustomDefs(null),
- relationshipTypes,
- parsedParams,
- nt);
- if(topologyWithSubMapping.getSubstitutionMappings() != null) {
- // Record nested topology templates in top level template
- //nestedToscaTemplatesWithTopology.add(topologyWithSubMapping);
- // Set substitution mapping object for mapped node
- nt.setSubMappingToscaTemplate2(
- topologyWithSubMapping.getSubstitutionMappings());
- _handleNestedToscaTemplatesWithTopology2(topologyWithSubMapping);
- }
- }
- }
- }
- }
-
- private void _handleNestedToscaTemplatesWithTopology() {
- for(Map.Entry<String,Object> me: nestedToscaTplsWithTopology.entrySet()) {
- String fname = me.getKey();
- LinkedHashMap<String,Object> toscaTpl =
- (LinkedHashMap<String,Object>)me.getValue();
- for(NodeTemplate nt: nodeTemplates) {
- if(_isSubMappedNode(nt,toscaTpl)) {
- parsedParams = _getParamsForNestedTemplate(nt);
- ArrayList<Object> alim = (ArrayList<Object>)toscaTpl.get(IMPORTS);
- LinkedHashMap<String,Object> topologyTpl =
- (LinkedHashMap<String,Object>)toscaTpl.get(TOPOLOGY_TEMPLATE);
- TopologyTemplate topologyWithSubMapping =
- new TopologyTemplate(topologyTpl,
- //_getAllCustomDefs(null),
- _getAllCustomDefs(alim),
- relationshipTypes,
- parsedParams,
- nt);
- if(topologyWithSubMapping.getSubstitutionMappings() != null) {
- // Record nested topology templates in top level template
- nestedToscaTemplatesWithTopology.add(topologyWithSubMapping);
- // Set substitution mapping object for mapped node
- nt.setSubMappingToscaTemplate(
- topologyWithSubMapping.getSubstitutionMappings());
- }
- }
- }
- }
- }
-
- private void _validateField() {
- String sVersion = _tplVersion();
- if(sVersion == null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "MissingRequiredField: Template is missing required field \"%s\"",DEFINITION_VERSION));
- }
- else {
- _validateVersion(sVersion);
- this.version = sVersion;
- }
-
- for (String sKey : tpl.keySet()) {
- boolean bFound = false;
- for (String sSection: SECTIONS) {
- if(sKey.equals(sSection)) {
- bFound = true;
- break;
- }
- }
- // check ADDITIONAL_SECTIONS
- if(!bFound) {
- if(ADDITIONAL_SECTIONS.get(version) != null &&
- ADDITIONAL_SECTIONS.get(version).contains(sKey)) {
- bFound = true;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Template contains unknown field \"%s\"",
- sKey));
- }
- }
- }
-
- private void _validateVersion(String sVersion) {
- boolean bFound = false;
- for(String vtv: VALID_TEMPLATE_VERSIONS) {
- if(sVersion.equals(vtv)) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidTemplateVersion: \"%s\" is invalid. Valid versions are %s",
- sVersion,VALID_TEMPLATE_VERSIONS.toString()));
- }
- else if(!sVersion.equals("tosca_simple_yaml_1_0")) {
- EntityType.updateDefinitions(sVersion);
- }
- }
-
- private String _getPath(String _path) throws JToscaException {
- if (_path.toLowerCase().endsWith(".yaml") || _path.toLowerCase().endsWith(".yml")) {
- return _path;
- }
- else if (_path.toLowerCase().endsWith(".zip") || _path.toLowerCase().endsWith(".csar")) {
- // a CSAR archive
- CSAR csar = new CSAR(_path, isFile);
- if (csar.validate()) {
- try {
- csar.decompress();
- metaProperties = csar.getMetaProperties();
- }
- catch (IOException e) {
- log.error("ToscaTemplate - _getPath - IOException trying to decompress {}", _path);
- return null;
- }
- isFile = true; // the file has been decompressed locally
- csar.cleanup();
- csarTempDir = csar.getTempDir();
- return csar.getTempDir() + File.separator + csar.getMainTemplate();
- }
- }
- else {
- ThreadLocalsHolder.getCollector().appendException("ValueError: " + _path + " is not a valid file");
- return null;
- }
- return null;
- }
-
- private void verifyTemplate() throws JToscaException {
- ThreadLocalsHolder.getCollector().setWantTrace(false);
-
- //Warnings
- int warningsCount = ThreadLocalsHolder.getCollector().warningsCaught();
- if (warningsCount > 0) {
- List<String> warningsStrings = ThreadLocalsHolder.getCollector().getWarningsReport();
- log.warn("####################################################################################################");
- log.warn("CSAR Warnings found! CSAR name - {}", inputPath);
- log.warn("ToscaTemplate - verifyTemplate - {} Parsing Warning{} occurred...", warningsCount, (warningsCount > 1 ? "s" : ""));
- for (String s : warningsStrings) {
- log.warn("{}. CSAR name - {}", s, inputPath);
- }
- log.warn("####################################################################################################");
- }
-
- //Criticals
- int criticalsCount = ThreadLocalsHolder.getCollector().criticalsCaught();
- if (criticalsCount > 0) {
- List<String> criticalStrings = ThreadLocalsHolder.getCollector().getCriticalsReport();
- log.error("####################################################################################################");
- log.error("ToscaTemplate - verifyTemplate - {} Parsing Critical{} occurred...", criticalsCount, (criticalsCount > 1 ? "s" : ""));
- for (String s : criticalStrings) {
- log.error("{}. CSAR name - {}", s, inputPath);
- }
- throw new JToscaException(String.format("CSAR Validation Failed. CSAR name - {}. Please check logs for details.", inputPath), JToscaErrorCodes.CSAR_TOSCA_VALIDATION_ERROR.getValue());
- }
- }
-
- public String getPath() {
- return path;
- }
-
- public String getVersion() {
- return version;
- }
-
- public String getDescription() {
- return description;
- }
-
- public TopologyTemplate getTopologyTemplate() {
- return topologyTemplate;
- }
-
- public Metadata getMetaData() {
- return metaData;
- }
-
- public ArrayList<Input> getInputs() {
- return inputs;
- }
-
- public ArrayList<Output> getOutputs() {
- return outputs;
- }
-
- public ArrayList<Policy> getPolicies() {
- return policies;
- }
-
- public ArrayList<NodeTemplate> getNodeTemplates() {
- return nodeTemplates;
- }
-
- public LinkedHashMap<String, Object> getMetaProperties(String propertiesFile) {
- return metaProperties.get(propertiesFile);
- }
-
- private boolean _isSubMappedNode(NodeTemplate nt,LinkedHashMap<String,Object> toscaTpl) {
- // Return True if the nodetemple is substituted
- if(nt != null && nt.getSubMappingToscaTemplate() == null &&
- getSubMappingNodeType(toscaTpl).equals(nt.getType()) &&
- nt.getInterfaces().size() < 1) {
- return true;
- }
- return false;
- }
-
- private boolean _isSubMappedNode2(NodeTemplate nt,LinkedHashMap<String,Object> toscaTpl) {
- // Return True if the nodetemple is substituted
- if(nt != null && nt.getSubMappingToscaTemplate2() == null &&
- getSubMappingNodeType(toscaTpl).equals(nt.getType()) &&
- nt.getInterfaces().size() < 1) {
- return true;
- }
- return false;
- }
-
- private LinkedHashMap<String,Object> _getParamsForNestedTemplate(NodeTemplate nt) {
- // Return total params for nested_template
- LinkedHashMap<String,Object> pparams;
- if(parsedParams != null) {
- pparams = parsedParams;
- }
- else {
- pparams = new LinkedHashMap<String,Object>();
- }
- if(nt != null) {
- for(String pname: nt.getProperties().keySet()) {
- pparams.put(pname,nt.getPropertyValue(pname));
- }
- }
- return pparams;
- }
-
- private String getSubMappingNodeType(LinkedHashMap<String,Object> toscaTpl) {
- // Return substitution mappings node type
- if(toscaTpl != null) {
- return TopologyTemplate.getSubMappingNodeType(
- (LinkedHashMap<String,Object>)toscaTpl.get(TOPOLOGY_TEMPLATE));
- }
- return null;
- }
-
- private boolean _hasSubstitutionMapping() {
- // Return True if the template has valid substitution mappings
- return topologyTemplate != null &&
- topologyTemplate.getSubstitutionMappings() != null;
- }
-
- public boolean hasNestedTemplates() {
- // Return True if the tosca template has nested templates
- return nestedToscaTemplatesWithTopology != null &&
- nestedToscaTemplatesWithTopology.size() >= 1;
-
- }
-
- public ArrayList<TopologyTemplate> getNestedTemplates() {
- return nestedToscaTemplatesWithTopology;
- }
-
- @Override
- public String toString() {
- return "ToscaTemplate{" +
- "exttools=" + exttools +
- ", VALID_TEMPLATE_VERSIONS=" + VALID_TEMPLATE_VERSIONS +
- ", ADDITIONAL_SECTIONS=" + ADDITIONAL_SECTIONS +
- ", isFile=" + isFile +
- ", path='" + path + '\'' +
- ", inputPath='" + inputPath + '\'' +
- ", parsedParams=" + parsedParams +
- ", tpl=" + tpl +
- ", version='" + version + '\'' +
- ", imports=" + imports +
- ", relationshipTypes=" + relationshipTypes +
- ", metaData=" + metaData +
- ", description='" + description + '\'' +
- ", topologyTemplate=" + topologyTemplate +
- ", repositories=" + repositories +
- ", inputs=" + inputs +
- ", relationshipTemplates=" + relationshipTemplates +
- ", nodeTemplates=" + nodeTemplates +
- ", outputs=" + outputs +
- ", policies=" + policies +
- ", nestedToscaTplsWithTopology=" + nestedToscaTplsWithTopology +
- ", nestedToscaTemplatesWithTopology=" + nestedToscaTemplatesWithTopology +
- ", graph=" + graph +
- ", csarTempDir='" + csarTempDir + '\'' +
- ", nestingLoopCounter=" + nestingLoopCounter +
- '}';
- }
-}
-
-/*python
-
-import logging
-import os
-
-from copy import deepcopy
-from toscaparser.common.exception import ExceptionCollector.collector
-from toscaparser.common.exception import InvalidTemplateVersion
-from toscaparser.common.exception import MissingRequiredFieldError
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.common.exception import ValidationError
-from toscaparser.elements.entity_type import update_definitions
-from toscaparser.extensions.exttools import ExtTools
-import org.openecomp.sdc.toscaparser.api.imports
-from toscaparser.prereq.csar import CSAR
-from toscaparser.repositories import Repository
-from toscaparser.topology_template import TopologyTemplate
-from toscaparser.tpl_relationship_graph import ToscaGraph
-from toscaparser.utils.gettextutils import _
-import org.openecomp.sdc.toscaparser.api.utils.yamlparser
-
-
-# TOSCA template key names
-SECTIONS = (DEFINITION_VERSION, DEFAULT_NAMESPACE, TEMPLATE_NAME,
- TOPOLOGY_TEMPLATE, TEMPLATE_AUTHOR, TEMPLATE_VERSION,
- DESCRIPTION, IMPORTS, DSL_DEFINITIONS, NODE_TYPES,
- RELATIONSHIP_TYPES, RELATIONSHIP_TEMPLATES,
- CAPABILITY_TYPES, ARTIFACT_TYPES, DATA_TYPES, INTERFACE_TYPES,
- POLICY_TYPES, GROUP_TYPES, REPOSITORIES) = \
- ('tosca_definitions_version', 'tosca_default_namespace',
- 'template_name', 'topology_template', 'template_author',
- 'template_version', 'description', 'imports', 'dsl_definitions',
- 'node_types', 'relationship_types', 'relationship_templates',
- 'capability_types', 'artifact_types', 'data_types',
- 'interface_types', 'policy_types', 'group_types', 'repositories')
-# Sections that are specific to individual template definitions
-SPECIAL_SECTIONS = (METADATA) = ('metadata')
-
-log = logging.getLogger("tosca.model")
-
-YAML_LOADER = toscaparser.utils.yamlparser.load_yaml
-
-
-class ToscaTemplate(object):
- exttools = ExtTools()
-
- VALID_TEMPLATE_VERSIONS = ['tosca_simple_yaml_1_0']
-
- VALID_TEMPLATE_VERSIONS.extend(exttools.get_versions())
-
- ADDITIONAL_SECTIONS = {'tosca_simple_yaml_1_0': SPECIAL_SECTIONS}
-
- ADDITIONAL_SECTIONS.update(exttools.get_sections())
-
- '''Load the template data.'''
- def __init__(self, path=None, parsed_params=None, a_file=True,
- yaml_dict_tpl=None):
-
- ExceptionCollector.collector.start()
- self.a_file = a_file
- self.input_path = None
- self.path = None
- self.tpl = None
- self.nested_tosca_tpls_with_topology = {}
- self.nested_tosca_templates_with_topology = []
- if path:
- self.input_path = path
- self.path = self._get_path(path)
- if self.path:
- self.tpl = YAML_LOADER(self.path, self.a_file)
- if yaml_dict_tpl:
- msg = (_('Both path and yaml_dict_tpl arguments were '
- 'provided. Using path and ignoring yaml_dict_tpl.'))
- log.info(msg)
- print(msg)
- else:
- if yaml_dict_tpl:
- self.tpl = yaml_dict_tpl
- else:
- ExceptionCollector.collector.appendException(
- ValueError(_('No path or yaml_dict_tpl was provided. '
- 'There is nothing to parse.')))
-
- if self.tpl:
- self.parsed_params = parsed_params
- self._validate_field()
- self.version = self._tpl_version()
- self.relationship_types = self._tpl_relationship_types()
- self.description = self._tpl_description()
- self.topology_template = self._topology_template()
- self.repositories = self._tpl_repositories()
- if self.topology_template.tpl:
- self.inputs = self._inputs()
- self.relationship_templates = self._relationship_templates()
- self.nodetemplates = self._nodetemplates()
- self.outputs = self._outputs()
- self._handle_nested_tosca_templates_with_topology()
- self.graph = ToscaGraph(self.nodetemplates)
-
- ExceptionCollector.collector.stop()
- self.verify_template()
-
- def _topology_template(self):
- return TopologyTemplate(self._tpl_topology_template(),
- self._get_all_custom_defs(),
- self.relationship_types,
- self.parsed_params,
- None)
-
- def _inputs(self):
- return self.topology_template.inputs
-
- def _nodetemplates(self):
- return self.topology_template.nodetemplates
-
- def _relationship_templates(self):
- return self.topology_template.relationship_templates
-
- def _outputs(self):
- return self.topology_template.outputs
-
- def _tpl_version(self):
- return self.tpl.get(DEFINITION_VERSION)
-
- def _tpl_description(self):
- desc = self.tpl.get(DESCRIPTION)
- if desc:
- return desc.rstrip()
-
- def _tpl_imports(self):
- return self.tpl.get(IMPORTS)
-
- def _tpl_repositories(self):
- repositories = self.tpl.get(REPOSITORIES)
- reposit = []
- if repositories:
- for name, val in repositories.items():
- reposits = Repository(name, val)
- reposit.append(reposits)
- return reposit
-
- def _tpl_relationship_types(self):
- return self._get_custom_types(RELATIONSHIP_TYPES)
-
- def _tpl_relationship_templates(self):
- topology_template = self._tpl_topology_template()
- return topology_template.get(RELATIONSHIP_TEMPLATES)
-
- def _tpl_topology_template(self):
- return self.tpl.get(TOPOLOGY_TEMPLATE)
-
- def _get_all_custom_defs(self, imports=None):
- types = [IMPORTS, NODE_TYPES, CAPABILITY_TYPES, RELATIONSHIP_TYPES,
- DATA_TYPES, INTERFACE_TYPES, POLICY_TYPES, GROUP_TYPES]
- custom_defs_final = {}
- custom_defs = self._get_custom_types(types, imports)
- if custom_defs:
- custom_defs_final.update(custom_defs)
- if custom_defs.get(IMPORTS):
- import_defs = self._get_all_custom_defs(
- custom_defs.get(IMPORTS))
- custom_defs_final.update(import_defs)
-
- # As imports are not custom_types, removing from the dict
- custom_defs_final.pop(IMPORTS, None)
- return custom_defs_final
-
- def _get_custom_types(self, type_definitions, imports=None):
- """Handle custom types defined in imported template files
-
- This method loads the custom type definitions referenced in "imports"
- section of the TOSCA YAML template.
- """
- custom_defs = {}
- type_defs = []
- if not isinstance(type_definitions, list):
- type_defs.append(type_definitions)
- else:
- type_defs = type_definitions
-
- if not imports:
- imports = self._tpl_imports()
-
- if imports:
- custom_service = toscaparser.imports.\
- ImportsLoader(imports, self.path,
- type_defs, self.tpl)
-
- nested_tosca_tpls = custom_service.get_nested_tosca_tpls()
- self._update_nested_tosca_tpls_with_topology(nested_tosca_tpls)
-
- custom_defs = custom_service.get_custom_defs()
- if not custom_defs:
- return
-
- # Handle custom types defined in current template file
- for type_def in type_defs:
- if type_def != IMPORTS:
- inner_custom_types = self.tpl.get(type_def) or {}
- if inner_custom_types:
- custom_defs.update(inner_custom_types)
- return custom_defs
-
- def _update_nested_tosca_tpls_with_topology(self, nested_tosca_tpls):
- for tpl in nested_tosca_tpls:
- filename, tosca_tpl = list(tpl.items())[0]
- if (tosca_tpl.get(TOPOLOGY_TEMPLATE) and
- filename not in list(
- self.nested_tosca_tpls_with_topology.keys())):
- self.nested_tosca_tpls_with_topology.update(tpl)
-
- def _handle_nested_tosca_templates_with_topology(self):
- for fname, tosca_tpl in self.nested_tosca_tpls_with_topology.items():
- for nodetemplate in self.nodetemplates:
- if self._is_sub_mapped_node(nodetemplate, tosca_tpl):
- parsed_params = self._get_params_for_nested_template(
- nodetemplate)
- topology_tpl = tosca_tpl.get(TOPOLOGY_TEMPLATE)
- topology_with_sub_mapping = TopologyTemplate(
- topology_tpl,
- self._get_all_custom_defs(),
- self.relationship_types,
- parsed_params,
- nodetemplate)
- if topology_with_sub_mapping.substitution_mappings:
- # Record nested topo templates in top level template
- self.nested_tosca_templates_with_topology.\
- append(topology_with_sub_mapping)
- # Set substitution mapping object for mapped node
- nodetemplate.sub_mapping_tosca_template = \
- topology_with_sub_mapping.substitution_mappings
-
- def _validate_field(self):
- version = self._tpl_version()
- if not version:
- ExceptionCollector.collector.appendException(
- MissingRequiredFieldError(what='Template',
- required=DEFINITION_VERSION))
- else:
- self._validate_version(version)
- self.version = version
-
- for name in self.tpl:
- if (name not in SECTIONS and
- name not in self.ADDITIONAL_SECTIONS.get(version, ())):
- ExceptionCollector.collector.appendException(
- UnknownFieldError(what='Template', field=name))
-
- def _validate_version(self, version):
- if version not in self.VALID_TEMPLATE_VERSIONS:
- ExceptionCollector.collector.appendException(
- InvalidTemplateVersion(
- what=version,
- valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS)))
- else:
- if version != 'tosca_simple_yaml_1_0':
- update_definitions(version)
-
- def _get_path(self, path):
- if path.lower().endswith(('.yaml','.yml')):
- return path
- elif path.lower().endswith(('.zip', '.csar')):
- # a CSAR archive
- csar = CSAR(path, self.a_file)
- if csar.validate():
- csar.decompress()
- self.a_file = True # the file has been decompressed locally
- return os.path.join(csar.temp_dir, csar.get_main_template())
- else:
- ExceptionCollector.collector.appendException(
- ValueError(_('"%(path)s" is not a valid file.')
- % {'path': path}))
-
- def verify_template(self):
- if ExceptionCollector.collector.exceptionsCaught():
- if self.input_path:
- raise ValidationError(
- message=(_('\nThe input "%(path)s" failed validation with '
- 'the following error(s): \n\n\t')
- % {'path': self.input_path}) +
- '\n\t'.join(ExceptionCollector.collector.getExceptionsReport()))
- else:
- raise ValidationError(
- message=_('\nThe pre-parsed input failed validation with '
- 'the following error(s): \n\n\t') +
- '\n\t'.join(ExceptionCollector.collector.getExceptionsReport()))
- else:
- if self.input_path:
- msg = (_('The input "%(path)s" successfully passed '
- 'validation.') % {'path': self.input_path})
- else:
- msg = _('The pre-parsed input successfully passed validation.')
-
- log.info(msg)
-
- def _is_sub_mapped_node(self, nodetemplate, tosca_tpl):
- """Return True if the nodetemple is substituted."""
- if (nodetemplate and not nodetemplate.sub_mapping_tosca_template and
- self.get_sub_mapping_node_type(tosca_tpl) == nodetemplate.type
- and len(nodetemplate.interfaces) < 1):
- return True
- else:
- return False
-
- def _get_params_for_nested_template(self, nodetemplate):
- """Return total params for nested_template."""
- parsed_params = deepcopy(self.parsed_params) \
- if self.parsed_params else {}
- if nodetemplate:
- for pname in nodetemplate.get_properties():
- parsed_params.update({pname:
- nodetemplate.get_property_value(pname)})
- return parsed_params
-
- def get_sub_mapping_node_type(self, tosca_tpl):
- """Return substitution mappings node type."""
- if tosca_tpl:
- return TopologyTemplate.get_sub_mapping_node_type(
- tosca_tpl.get(TOPOLOGY_TEMPLATE))
-
- def _has_substitution_mappings(self):
- """Return True if the template has valid substitution mappings."""
- return self.topology_template is not None and \
- self.topology_template.substitution_mappings is not None
-
- def has_nested_templates(self):
- """Return True if the tosca template has nested templates."""
- return self.nested_tosca_templates_with_topology is not None and \
- len(self.nested_tosca_templates_with_topology) >= 1
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java
deleted file mode 100644
index 0ec0b5a..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/Triggers.java
+++ /dev/null
@@ -1,183 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils;
-
-public class Triggers extends EntityTemplate {
-
- private static final String DESCRIPTION = "description";
- private static final String EVENT = "event_type";
- private static final String SCHEDULE = "schedule";
- private static final String TARGET_FILTER = "target_filter";
- private static final String CONDITION = "condition";
- private static final String ACTION = "action";
-
- private static final String SECTIONS[] = {
- DESCRIPTION, EVENT, SCHEDULE, TARGET_FILTER, CONDITION, ACTION
- };
-
- private static final String METER_NAME = "meter_name";
- private static final String CONSTRAINT = "constraint";
- private static final String PERIOD = "period";
- private static final String EVALUATIONS = "evaluations";
- private static final String METHOD = "method";
- private static final String THRESHOLD = "threshold";
- private static final String COMPARISON_OPERATOR = "comparison_operator";
-
- private static final String CONDITION_KEYNAMES[] = {
- METER_NAME, CONSTRAINT, PERIOD, EVALUATIONS, METHOD, THRESHOLD, COMPARISON_OPERATOR
- };
-
- private String name;
- private LinkedHashMap<String,Object> triggerTpl;
-
- public Triggers(String _name,LinkedHashMap<String,Object> _triggerTpl) {
- super(); // dummy. don't want super
- name = _name;
- triggerTpl = _triggerTpl;
- _validateKeys();
- _validateCondition();
- _validateInput();
- }
-
- public String getDescription() {
- return (String)triggerTpl.get("description");
- }
-
- public String getEvent() {
- return (String)triggerTpl.get("event_type");
- }
-
- public LinkedHashMap<String,Object> getSchedule() {
- return (LinkedHashMap<String,Object>)triggerTpl.get("schedule");
- }
-
- public LinkedHashMap<String,Object> getTargetFilter() {
- return (LinkedHashMap<String,Object>)triggerTpl.get("target_filter");
- }
-
- public LinkedHashMap<String,Object> getCondition() {
- return (LinkedHashMap<String,Object>)triggerTpl.get("condition");
- }
-
- public LinkedHashMap<String,Object> getAction() {
- return (LinkedHashMap<String,Object>)triggerTpl.get("action");
- }
-
- private void _validateKeys() {
- for(String key: triggerTpl.keySet()) {
- boolean bFound = false;
- for(int i=0; i<SECTIONS.length; i++) {
- if(key.equals(SECTIONS[i])) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Triggers \"%s\" contains unknown field \"%s\"",
- name,key));
- }
- }
- }
-
- private void _validateCondition() {
- for(String key: getCondition().keySet()) {
- boolean bFound = false;
- for(int i=0; i<CONDITION_KEYNAMES.length; i++) {
- if(key.equals(CONDITION_KEYNAMES[i])) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Triggers \"%s\" contains unknown field \"%s\"",
- name,key));
- }
- }
- }
-
- private void _validateInput() {
- for(String key: getCondition().keySet()) {
- Object value = getCondition().get(key);
- if(key.equals(PERIOD) || key.equals(EVALUATIONS)) {
- ValidateUtils.validateInteger(value);
- }
- else if(key.equals(THRESHOLD)) {
- ValidateUtils.validateNumeric(value);
- }
- else if(key.equals(METER_NAME) || key.equals(METHOD)) {
- ValidateUtils.validateString(value);
- }
- }
- }
-
- @Override
- public String toString() {
- return "Triggers{" +
- "name='" + name + '\'' +
- ", triggerTpl=" + triggerTpl +
- '}';
- }
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.entity_template import EntityTemplate
-
-SECTIONS = (DESCRIPTION, EVENT, SCHEDULE, TARGET_FILTER, CONDITION, ACTION) = \
- ('description', 'event_type', 'schedule',
- 'target_filter', 'condition', 'action')
-CONDITION_KEYNAMES = (CONTRAINT, PERIOD, EVALUATIONS, METHOD) = \
- ('constraint', 'period', 'evaluations', 'method')
-log = logging.getLogger('tosca')
-
-
-class Triggers(EntityTemplate):
-
- '''Triggers defined in policies of topology template'''
-
- def __init__(self, name, trigger_tpl):
- self.name = name
- self.trigger_tpl = trigger_tpl
- self._validate_keys()
- self._validate_condition()
-
- def get_description(self):
- return self.trigger_tpl['description']
-
- def get_event(self):
- return self.trigger_tpl['event_type']
-
- def get_schedule(self):
- return self.trigger_tpl['schedule']
-
- def get_target_filter(self):
- return self.trigger_tpl['target_filter']
-
- def get_condition(self):
- return self.trigger_tpl['condition']
-
- def get_action(self):
- return self.trigger_tpl['action']
-
- def _validate_keys(self):
- for key in self.trigger_tpl.keys():
- if key not in SECTIONS:
- ExceptionCollector.appendException(
- UnknownFieldError(what='Triggers "%s"' % self.name,
- field=key))
-
- def _validate_condition(self):
- for key in self.get_condition():
- if key not in CONDITION_KEYNAMES:
- ExceptionCollector.appendException(
- UnknownFieldError(what='Triggers "%s"' % self.name,
- field=key))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/UnsupportedType.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/UnsupportedType.java
deleted file mode 100644
index 2bd0197..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/UnsupportedType.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package org.openecomp.sdc.toscaparser.api;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class UnsupportedType {
-
- // Note: TOSCA spec version related
-
- /*
- The tosca.nodes.Storage.ObjectStorage and tosca.nodes.Storage.BlockStorage
- used here as un_supported_types are part of the name changes in TOSCA spec
- version 1.1. The original name as specified in version 1.0 are,
- tosca.nodes.BlockStorage and tosca.nodes.ObjectStorage which are supported
- by the tosca-parser. Since there are little overlapping in version support
- currently in the tosca-parser, the names tosca.nodes.Storage.ObjectStorage
- and tosca.nodes.Storage.BlockStorage are used here to demonstrate the usage
- of un_supported_types. As tosca-parser move to provide support for version
- 1.1 and higher, they will be removed.
- */
-
- private static final String unsupportedTypes[] = {
- "tosca.test.invalidtype",
- "tosca.nodes.Storage.ObjectStorage",
- "tosca.nodes.Storage.BlockStorage"};
-
- public static boolean validateType(String entityType) {
- for(String ust: unsupportedTypes) {
- if(ust.equals(entityType)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnsupportedTypeError: Entity type \"%s\" is not supported",entityType));
- return true;
- }
- }
- return false;
- }
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import UnsupportedTypeError
-from toscaparser.utils.gettextutils import _
-
-log = logging.getLogger('tosca')
-
-
-class UnsupportedType(object):
-
- """Note: TOSCA spec version related
-
- The tosca.nodes.Storage.ObjectStorage and tosca.nodes.Storage.BlockStorage
- used here as un_supported_types are part of the name changes in TOSCA spec
- version 1.1. The original name as specified in version 1.0 are,
- tosca.nodes.BlockStorage and tosca.nodes.ObjectStorage which are supported
- by the tosca-parser. Since there are little overlapping in version support
- currently in the tosca-parser, the names tosca.nodes.Storage.ObjectStorage
- and tosca.nodes.Storage.BlockStorage are used here to demonstrate the usage
- of un_supported_types. As tosca-parser move to provide support for version
- 1.1 and higher, they will be removed.
- """
- un_supported_types = ['tosca.test.invalidtype',
- 'tosca.nodes.Storage.ObjectStorage',
- 'tosca.nodes.Storage.BlockStorage']
-
- def __init__(self):
- pass
-
- @staticmethod
- def validate_type(entitytype):
- if entitytype in UnsupportedType.un_supported_types:
- ExceptionCollector.appendException(UnsupportedTypeError(
- what=_('%s')
- % entitytype))
- return True
- else:
- return False
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/common/ExceptionCollector.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/common/ExceptionCollector.java
deleted file mode 100644
index fa65ae4..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/common/ExceptionCollector.java
+++ /dev/null
@@ -1,122 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.common;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-// Perfectly good enough...
-
-public class ExceptionCollector {
-
- private static Logger log = LoggerFactory.getLogger(ExceptionCollector.class.getName());
-
- private Map<String, String> notAnalyzedExceptions = new HashMap<>();
- private Map<String, String> criticalExceptions = new HashMap<>();
- private Map<String, String> warningExceptions = new HashMap<>();
-
- private boolean bWantTrace = true;
- private String filePath;
-
- public enum ReportType {WARNING, CRITICAL, NOT_ANALYZED}
-
- public ExceptionCollector(String filePath) {
- this.filePath = filePath;
- }
-
- public void appendException(String exception) {
-
- addException(exception, ReportType.NOT_ANALYZED);
- }
-
- public void appendCriticalException(String exception) {
-
- addException(exception, ReportType.CRITICAL);
- }
-
- public void appendWarning(String exception) {
-
- addException(exception, ReportType.WARNING);
- }
-
- private void addException(String exception, ReportType type) {
-
- Map<String, String> exceptions = getExceptionCollection(type);
-
- if (!exceptions.containsKey(exception)) {
- // get stack trace
- StackTraceElement[] ste = Thread.currentThread().getStackTrace();
- StringBuilder sb = new StringBuilder();
- // skip the last 2 (getStackTrace and this)
- for (int i = 2; i < ste.length; i++) {
- sb.append(String.format(" %s(%s:%d)%s", ste[i].getClassName(), ste[i].getFileName(),
- ste[i].getLineNumber(), i == ste.length - 1 ? " " : "\n"));
- }
- exceptions.put(exception, sb.toString());
- }
- }
-
- public List<String> getCriticalsReport() {
-
- return getReport(ReportType.CRITICAL);
- }
-
- public List<String> getNotAnalyzedExceptionsReport() {
-
- return getReport(ReportType.NOT_ANALYZED);
- }
-
- public List<String> getWarningsReport() {
-
- return getReport(ReportType.WARNING);
- }
-
- private List<String> getReport(ReportType type) {
- Map<String, String> collectedExceptions = getExceptionCollection(type);
-
- List<String> report = new ArrayList<>();
- if (collectedExceptions.size() > 0) {
- for (Map.Entry<String, String> exception : collectedExceptions.entrySet()) {
- report.add(exception.getKey());
- if (bWantTrace) {
- report.add(exception.getValue());
- }
- }
- }
-
- return report;
- }
-
- private Map<String, String> getExceptionCollection(ReportType type) {
- switch (type) {
- case WARNING:
- return warningExceptions;
- case CRITICAL:
- return criticalExceptions;
- case NOT_ANALYZED:
- return notAnalyzedExceptions;
- default:
- return notAnalyzedExceptions;
- }
- }
-
- public int errorsNotAnalyzedCaught() {
- return notAnalyzedExceptions.size();
- }
-
- public int criticalsCaught() {
- return criticalExceptions.size();
- }
-
- public int warningsCaught() {
- return warningExceptions.size();
- }
-
- public void setWantTrace(boolean b) {
- bWantTrace = b;
- }
-
-}
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaException.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaException.java
deleted file mode 100644
index 6cd5872..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/common/JToscaException.java
+++ /dev/null
@@ -1,27 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.common;
-
-public class JToscaException extends Exception {
-
- private static final long serialVersionUID = 1L;
- private String code;
-
- public JToscaException(String message, String code) {
- super(message);
- this.code = code;
- }
-
- public String getCode() {
- return code;
- }
-
- public void setCode(String code) {
- this.code = code;
- }
-
- //JT1001 - Meta file missing
- //JT1002 - Invalid yaml content
- //JT1003 - Entry-Definition not defined in meta file
- //JT1004 - Entry-Definition file missing
- //JT1005 - General Error
- //JT1006 - General Error/Path not valid
-}
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/common/TOSCAException.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/common/TOSCAException.java
deleted file mode 100644
index cfd7560..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/common/TOSCAException.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.common;
-
-import java.util.IllegalFormatException;
-
-public class TOSCAException extends Exception {
- private String message = "An unkown exception has occurred";
- private static boolean FATAL_EXCEPTION_FORMAT_ERRORS = false;
- private String msgFmt = null;
-
- public TOSCAException(String...strings) {
- try {
- message = String.format(msgFmt,(Object[])strings);
- }
- catch (IllegalFormatException e) {
- // TODO log
-
- if(FATAL_EXCEPTION_FORMAT_ERRORS) {
- throw e;
- }
-
- }
-
- }
-
- public String __str__() {
- return message;
- }
-
- public static void generate_inv_schema_property_error(String name, String attr, String value, String valid_values) {
- //TODO
-
- }
-
- public static void setFatalFormatException(boolean flag) {
- FATAL_EXCEPTION_FORMAT_ERRORS = flag;
- }
-
-}
-
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ArtifactTypeDef.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ArtifactTypeDef.java
deleted file mode 100644
index 8a13d99..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ArtifactTypeDef.java
+++ /dev/null
@@ -1,105 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.util.LinkedHashMap;
-
-public class ArtifactTypeDef extends StatefulEntityType {
-
- private String type;
- private LinkedHashMap<String,Object> customDef;
- private LinkedHashMap<String,Object> properties;
- private LinkedHashMap<String,Object> parentArtifacts;
-
-
-
- public ArtifactTypeDef(String atype,LinkedHashMap<String,Object> _customDef) {
- super(atype,ARTIFACT_PREFIX,_customDef);
-
- type = atype;
- customDef = _customDef;
- properties = null;
- if(defs != null) {
- properties = (LinkedHashMap<String,Object>)defs.get(PROPERTIES);
- }
- parentArtifacts = _getParentArtifacts();
- }
-
- private LinkedHashMap<String,Object> _getParentArtifacts() {
- LinkedHashMap<String,Object> artifacts = new LinkedHashMap<>();
- String parentArtif = null;
- if(getParentType() != null) {
- parentArtif = getParentType().getType();
- }
- if(parentArtif != null && !parentArtif.isEmpty()) {
- while(!parentArtif.equals("tosca.artifacts.Root")) {
- Object ob = TOSCA_DEF.get(parentArtif);
- artifacts.put(parentArtif,ob);
- parentArtif =
- (String)((LinkedHashMap<String,Object>)ob).get("derived_from");
- }
- }
- return artifacts;
- }
-
- public ArtifactTypeDef getParentType() {
- // Return a artifact entity from which this entity is derived
- if(defs == null) {
- return null;
- }
- String partifactEntity = derivedFrom(defs);
- if(partifactEntity != null) {
- return new ArtifactTypeDef(partifactEntity,customDef);
- }
- return null;
- }
-
- public Object getArtifact(String name) {
- // Return the definition of an artifact field by name
- if(defs != null) {
- return defs.get(name);
- }
- return null;
- }
-
- public String getType() {
- return type;
- }
-
-}
-
-/*python
-class ArtifactTypeDef(StatefulEntityType):
- '''TOSCA built-in artifacts type.'''
-
- def __init__(self, atype, custom_def=None):
- super(ArtifactTypeDef, self).__init__(atype, self.ARTIFACT_PREFIX,
- custom_def)
- self.type = atype
- self.custom_def = custom_def
- self.properties = None
- if self.PROPERTIES in self.defs:
- self.properties = self.defs[self.PROPERTIES]
- self.parent_artifacts = self._get_parent_artifacts()
-
- def _get_parent_artifacts(self):
- artifacts = {}
- parent_artif = self.parent_type.type if self.parent_type else None
- if parent_artif:
- while parent_artif != 'tosca.artifacts.Root':
- artifacts[parent_artif] = self.TOSCA_DEF[parent_artif]
- parent_artif = artifacts[parent_artif]['derived_from']
- return artifacts
-
- @property
- def parent_type(self):
- '''Return a artifact entity from which this entity is derived.'''
- if not hasattr(self, 'defs'):
- return None
- partifact_entity = self.derived_from(self.defs)
- if partifact_entity:
- return ArtifactTypeDef(partifact_entity, self.custom_def)
-
- def get_artifact(self, name):
- '''Return the definition of an artifact field by name.'''
- if name in self.defs:
- return self.defs[name]
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/AttributeDef.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/AttributeDef.java
deleted file mode 100644
index 5551908..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/AttributeDef.java
+++ /dev/null
@@ -1,40 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.util.LinkedHashMap;
-
-public class AttributeDef {
- // TOSCA built-in Attribute type
-
- private String name;
- private Object value;
- private LinkedHashMap<String,Object> schema;
-
- public AttributeDef(String adName, Object adValue, LinkedHashMap<String,Object> adSchema) {
- name = adName;
- value = adValue;
- schema = adSchema;
- }
-
- public String getName() {
- return name;
- }
-
- public Object getValue() {
- return value;
- }
-
- public LinkedHashMap<String,Object> getSchema() {
- return schema;
- }
-}
-
-/*python
-
-class AttributeDef(object):
- '''TOSCA built-in Attribute type.'''
-
- def __init__(self, name, value=None, schema=None):
- self.name = name
- self.value = value
- self.schema = schema
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java
deleted file mode 100644
index 03e2c45..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/CapabilityTypeDef.java
+++ /dev/null
@@ -1,222 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import org.openecomp.sdc.toscaparser.api.elements.PropertyDef;
-
-public class CapabilityTypeDef extends StatefulEntityType {
- // TOSCA built-in capabilities type
-
- private static final String TOSCA_TYPEURI_CAPABILITY_ROOT = "tosca.capabilities.Root";
-
- private String name;
- private String nodetype;
- private LinkedHashMap<String,Object> customDef;
- private LinkedHashMap<String,Object> properties;
- private LinkedHashMap<String,Object> parentCapabilities;
-
- @SuppressWarnings("unchecked")
- public CapabilityTypeDef(String cname,String ctype,String ntype,LinkedHashMap<String,Object> ccustomDef) {
- super(ctype,CAPABILITY_PREFIX,ccustomDef);
-
- name = cname;
- nodetype = ntype;
- properties = null;
- customDef = ccustomDef;
- if(defs != null) {
- properties = (LinkedHashMap<String,Object>)defs.get(PROPERTIES);
- }
- parentCapabilities = _getParentCapabilities(customDef);
- }
-
- @SuppressWarnings("unchecked")
- public ArrayList<PropertyDef> getPropertiesDefObjects () {
- // Return a list of property definition objects
- ArrayList<PropertyDef> propsdefs = new ArrayList<>();
- LinkedHashMap<String,Object> parentProperties = new LinkedHashMap<>();
- if(parentCapabilities != null) {
- for(Map.Entry<String,Object> me: parentCapabilities.entrySet()) {
- parentProperties.put(me.getKey(),((LinkedHashMap<String,Object>)me.getValue()).get("properties"));
- }
- }
- if(properties != null) {
- for(Map.Entry<String,Object> me: properties.entrySet()) {
- propsdefs.add(new PropertyDef(me.getKey(),null,(LinkedHashMap<String,Object>)me.getValue()));
- }
- }
- if(parentProperties != null) {
- for(Map.Entry<String,Object> me: parentProperties.entrySet()) {
- LinkedHashMap<String,Object> props = (LinkedHashMap<String,Object>)me.getValue();
- for(Map.Entry<String,Object> pe: props.entrySet()) {
- String prop = pe.getKey();
- LinkedHashMap<String,Object> schema = (LinkedHashMap<String,Object>)pe.getValue();
- // add parent property if not overridden by children type
- if(properties == null || properties.get(prop) == null) {
- propsdefs.add(new PropertyDef(prop, null, schema));
- }
- }
- }
- }
- return propsdefs;
- }
-
- public LinkedHashMap<String,PropertyDef> getPropertiesDef() {
- LinkedHashMap<String,PropertyDef> pds = new LinkedHashMap<>();
- for(PropertyDef pd: getPropertiesDefObjects()) {
- pds.put(pd.getName(),pd);
- }
- return pds;
- }
-
- public PropertyDef getPropertyDefValue(String pdname) {
- // Return the definition of a given property name
- LinkedHashMap<String,PropertyDef> propsDef = getPropertiesDef();
- if(propsDef != null && propsDef.get(pdname) != null) {
- return (PropertyDef)propsDef.get(pdname).getPDValue();
- }
- return null;
- }
-
- @SuppressWarnings("unchecked")
- private LinkedHashMap<String,Object> _getParentCapabilities(LinkedHashMap<String,Object> customDef) {
- LinkedHashMap<String,Object> capabilities = new LinkedHashMap<>();
- CapabilityTypeDef parentCap = getParentType();
- if(parentCap != null) {
- String sParentCap = parentCap.getType();
- while(!sParentCap.equals(TOSCA_TYPEURI_CAPABILITY_ROOT)) {
- if(TOSCA_DEF.get(sParentCap) != null) {
- capabilities.put(sParentCap,TOSCA_DEF.get(sParentCap));
- }
- else if(customDef != null && customDef.get(sParentCap) != null) {
- capabilities.put(sParentCap,customDef.get(sParentCap));
- }
- sParentCap = (String)((LinkedHashMap<String,Object>)capabilities.get(sParentCap)).get("derived_from");
- }
- }
- return capabilities;
- }
-
- public CapabilityTypeDef getParentType() {
- // Return a capability this capability is derived from
- if(defs == null) {
- return null;
- }
- String pnode = derivedFrom(defs);
- if(pnode != null && !pnode.isEmpty()) {
- return new CapabilityTypeDef(name, pnode, nodetype, customDef);
- }
- return null;
- }
-
- public boolean inheritsFrom(ArrayList<String> typeNames) {
- // Check this capability is in type_names
-
- // Check if this capability or some of its parent types
- // are in the list of types: type_names
- if(typeNames.contains(getType())) {
- return true;
- }
- else if(getParentType() != null) {
- return getParentType().inheritsFrom(typeNames);
- }
- return false;
- }
-
- // getters/setters
-
- public LinkedHashMap<String,Object> getProperties() {
- return properties;
- }
-
- public String getName() {
- return name;
- }
-}
-
-/*python
-from toscaparser.elements.property_definition import PropertyDef
-from toscaparser.elements.statefulentitytype import StatefulEntityType
-
-
-class CapabilityTypeDef(StatefulEntityType):
- '''TOSCA built-in capabilities type.'''
- TOSCA_TYPEURI_CAPABILITY_ROOT = 'tosca.capabilities.Root'
-
- def __init__(self, name, ctype, ntype, custom_def=None):
- self.name = name
- super(CapabilityTypeDef, self).__init__(ctype, self.CAPABILITY_PREFIX,
- custom_def)
- self.nodetype = ntype
- self.properties = None
- self.custom_def = custom_def
- if self.PROPERTIES in self.defs:
- self.properties = self.defs[self.PROPERTIES]
- self.parent_capabilities = self._get_parent_capabilities(custom_def)
-
- def get_properties_def_objects(self):
- '''Return a list of property definition objects.'''
- properties = []
- parent_properties = {}
- if self.parent_capabilities:
- for type, value in self.parent_capabilities.items():
- parent_properties[type] = value.get('properties')
- if self.properties:
- for prop, schema in self.properties.items():
- properties.append(PropertyDef(prop, None, schema))
- if parent_properties:
- for parent, props in parent_properties.items():
- for prop, schema in props.items():
- # add parent property if not overridden by children type
- if not self.properties or \
- prop not in self.properties.keys():
- properties.append(PropertyDef(prop, None, schema))
- return properties
-
- def get_properties_def(self):
- '''Return a dictionary of property definition name-object pairs.'''
- return {prop.name: prop
- for prop in self.get_properties_def_objects()}
-
- def get_property_def_value(self, name):
- '''Return the definition of a given property name.'''
- props_def = self.get_properties_def()
- if props_def and name in props_def:
- return props_def[name].value
-
- def _get_parent_capabilities(self, custom_def=None):
- capabilities = {}
- parent_cap = self.parent_type
- if parent_cap:
- parent_cap = parent_cap.type
- while parent_cap != self.TOSCA_TYPEURI_CAPABILITY_ROOT:
- if parent_cap in self.TOSCA_DEF.keys():
- capabilities[parent_cap] = self.TOSCA_DEF[parent_cap]
- elif custom_def and parent_cap in custom_def.keys():
- capabilities[parent_cap] = custom_def[parent_cap]
- parent_cap = capabilities[parent_cap]['derived_from']
- return capabilities
-
- @property
- def parent_type(self):
- '''Return a capability this capability is derived from.'''
- if not hasattr(self, 'defs'):
- return None
- pnode = self.derived_from(self.defs)
- if pnode:
- return CapabilityTypeDef(self.name, pnode,
- self.nodetype, self.custom_def)
-
- def inherits_from(self, type_names):
- '''Check this capability is in type_names
-
- Check if this capability or some of its parent types
- are in the list of types: type_names
- '''
- if self.type in type_names:
- return True
- elif self.parent_type:
- return self.parent_type.inherits_from(type_names)
- else:
- return False*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/DataType.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/DataType.java
deleted file mode 100644
index d5d770b..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/DataType.java
+++ /dev/null
@@ -1,116 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-
-public class DataType extends StatefulEntityType {
-
- LinkedHashMap<String,Object> customDef;
-
- public DataType(String _dataTypeName,LinkedHashMap<String,Object> _customDef) {
- super(_dataTypeName,DATATYPE_NETWORK_PREFIX,_customDef);
-
- customDef = _customDef;
- }
-
- public DataType getParentType() {
- // Return a datatype this datatype is derived from
- if(defs != null) {
- String ptype = derivedFrom(defs);
- if(ptype != null) {
- return new DataType(ptype,customDef);
- }
- }
- return null;
- }
-
- public String getValueType() {
- // Return 'type' section in the datatype schema
- if(defs != null) {
- return (String)entityValue(defs,"type");
- }
- return null;
- }
-
- public ArrayList<PropertyDef> getAllPropertiesObjects() {
- //Return all properties objects defined in type and parent type
- ArrayList<PropertyDef> propsDef = getPropertiesDefObjects();
- DataType ptype = getParentType();
- while(ptype != null) {
- propsDef.addAll(ptype.getPropertiesDefObjects());
- ptype = ptype.getParentType();
- }
- return propsDef;
- }
-
- public LinkedHashMap<String,PropertyDef> getAllProperties() {
- // Return a dictionary of all property definition name-object pairs
- LinkedHashMap<String,PropertyDef> pno = new LinkedHashMap<>();
- for(PropertyDef pd: getAllPropertiesObjects()) {
- pno.put(pd.getName(),pd);
- }
- return pno;
- }
-
- public Object getAllPropertyValue(String name) {
- // Return the value of a given property name
- LinkedHashMap<String,PropertyDef> propsDef = getAllProperties();
- if(propsDef != null && propsDef.get(name) != null) {
- return propsDef.get(name).getPDValue();
- }
- return null;
- }
-
- public LinkedHashMap<String,Object> getDefs() {
- return defs;
- }
-
-}
-
-/*python
-
-from toscaparser.elements.statefulentitytype import StatefulEntityType
-
-
-class DataType(StatefulEntityType):
- '''TOSCA built-in and user defined complex data type.'''
-
- def __init__(self, datatypename, custom_def=None):
- super(DataType, self).__init__(datatypename,
- self.DATATYPE_NETWORK_PREFIX,
- custom_def)
- self.custom_def = custom_def
-
- @property
- def parent_type(self):
- '''Return a datatype this datatype is derived from.'''
- ptype = self.derived_from(self.defs)
- if ptype:
- return DataType(ptype, self.custom_def)
- return None
-
- @property
- def value_type(self):
- '''Return 'type' section in the datatype schema.'''
- return self.entity_value(self.defs, 'type')
-
- def get_all_properties_objects(self):
- '''Return all properties objects defined in type and parent type.'''
- props_def = self.get_properties_def_objects()
- ptype = self.parent_type
- while ptype:
- props_def.extend(ptype.get_properties_def_objects())
- ptype = ptype.parent_type
- return props_def
-
- def get_all_properties(self):
- '''Return a dictionary of all property definition name-object pairs.'''
- return {prop.name: prop
- for prop in self.get_all_properties_objects()}
-
- def get_all_property_value(self, name):
- '''Return the value of a given property name.'''
- props_def = self.get_all_properties()
- if props_def and name in props_def.key():
- return props_def[name].value
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java
deleted file mode 100644
index 650166d..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/EntityType.java
+++ /dev/null
@@ -1,418 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.InputStream;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import org.openecomp.sdc.toscaparser.api.extensions.ExtTools;
-import org.openecomp.sdc.toscaparser.api.utils.CopyUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
-
-public class EntityType {
-
- private static Logger log = LoggerFactory.getLogger(EntityType.class.getName());
-
- private static final String TOSCA_DEFINITION_1_0_YAML = "TOSCA_definition_1_0.yaml";
- protected static final String DERIVED_FROM = "derived_from";
- protected static final String PROPERTIES = "properties";
- protected static final String ATTRIBUTES = "attributes";
- protected static final String REQUIREMENTS = "requirements";
- protected static final String INTERFACES = "interfaces";
- protected static final String CAPABILITIES = "capabilities";
- protected static final String TYPE = "type";
- protected static final String ARTIFACTS = "artifacts";
-
- @SuppressWarnings("unused")
- private static final String SECTIONS[] = {
- DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS,
- INTERFACES, CAPABILITIES, TYPE, ARTIFACTS
- };
-
- public static final String TOSCA_DEF_SECTIONS[] = {
- "node_types", "data_types", "artifact_types",
- "group_types", "relationship_types",
- "capability_types", "interface_types",
- "policy_types"};
-
-
- // TOSCA definition file
- //private final static String path = EntityType.class.getProtectionDomain().getCodeSource().getLocation().getPath();
-
- //private final static String path = EntityType.class.getClassLoader().getResource("TOSCA_definition_1_0.yaml").getFile();
- //private final static String TOSCA_DEF_FILE = EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml");
-
- private static LinkedHashMap<String,Object> TOSCA_DEF_LOAD_AS_IS = loadTdf();
-
- //EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml");
-
- @SuppressWarnings("unchecked")
- private static LinkedHashMap<String,Object> loadTdf() {
- String toscaDefLocation = EntityType.class.getClassLoader().getResource(TOSCA_DEFINITION_1_0_YAML).getFile();
- InputStream input = EntityType.class.getClassLoader().getResourceAsStream(TOSCA_DEFINITION_1_0_YAML);
- if (input == null){
- log.error("EntityType - loadTdf - Couldn't load TOSCA_DEF_FILE {}", toscaDefLocation);
- }
- Yaml yaml = new Yaml();
- Object loaded = yaml.load(input);
- //@SuppressWarnings("unchecked")
- return (LinkedHashMap<String,Object>) loaded;
- }
-
- // Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS
- public static LinkedHashMap<String,Object> TOSCA_DEF;
- static {
- TOSCA_DEF = new LinkedHashMap<String,Object>();
- for(String section: TOSCA_DEF_SECTIONS) {
- @SuppressWarnings("unchecked")
- LinkedHashMap<String,Object> value = (LinkedHashMap<String,Object>)TOSCA_DEF_LOAD_AS_IS.get(section);
- if(value != null) {
- for(String key: value.keySet()) {
- TOSCA_DEF.put(key, value.get(key));
- }
- }
- }
- }
-
- public static final String DEPENDSON = "tosca.relationships.DependsOn";
- public static final String HOSTEDON = "tosca.relationships.HostedOn";
- public static final String CONNECTSTO = "tosca.relationships.ConnectsTo";
- public static final String ATTACHESTO = "tosca.relationships.AttachesTo";
- public static final String LINKSTO = "tosca.relationships.network.LinksTo";
- public static final String BINDSTO = "tosca.relationships.network.BindsTo";
-
- public static final String RELATIONSHIP_TYPE[] = {
- "tosca.relationships.DependsOn",
- "tosca.relationships.HostedOn",
- "tosca.relationships.ConnectsTo",
- "tosca.relationships.AttachesTo",
- "tosca.relationships.network.LinksTo",
- "tosca.relationships.network.BindsTo"};
-
- public static final String NODE_PREFIX = "tosca.nodes.";
- public static final String RELATIONSHIP_PREFIX = "tosca.relationships.";
- public static final String CAPABILITY_PREFIX = "tosca.capabilities.";
- public static final String INTERFACE_PREFIX = "tosca.interfaces.";
- public static final String ARTIFACT_PREFIX = "tosca.artifacts.";
- public static final String POLICY_PREFIX = "tosca.policies.";
- public static final String GROUP_PREFIX = "tosca.groups.";
- //currently the data types are defined only for network
- // but may have changes in the future.
- public static final String DATATYPE_PREFIX = "tosca.datatypes.";
- public static final String DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + "network.";
- public static final String TOSCA = "tosca";
-
- protected String type;
- protected LinkedHashMap<String,Object> defs = null;
- public Object getParentType() { return null; }
-
- public String derivedFrom(LinkedHashMap<String,Object> defs) {
- // Return a type this type is derived from
- return (String)entityValue(defs, "derived_from");
- }
-
- public boolean isDerivedFrom(String type_str) {
- // Check if object inherits from the given type
- // Returns true if this object is derived from 'type_str'
- // False otherwise.
- if(type == null || this.type.isEmpty()) {
- return false;
- }
- else if(type == type_str) {
- return true;
- }
- else if(getParentType() != null) {
- return ((EntityType)getParentType()).isDerivedFrom(type_str);
- }
- else {
- return false;
- }
- }
-
- public Object entityValue(LinkedHashMap<String,Object> defs, String key) {
- if(defs != null) {
- return defs.get(key);
- }
- return null;
- }
-
- @SuppressWarnings("unchecked")
- public Object getValue(String ndtype, LinkedHashMap<String,Object> _defs, boolean parent) {
- Object value = null;
- if(_defs == null) {
- if(defs == null) {
- return null;
- }
- _defs = this.defs;
- }
- Object defndt = _defs.get(ndtype);
- if(defndt != null) {
- // copy the value to avoid that next operations add items in the
- // item definitions
- //value = copy.copy(defs[ndtype])
- value = CopyUtils.copyLhmOrAl(defndt);
- }
-
- if(parent) {
- EntityType p = this;
- if(p != null) {
- while(p != null) {
- if(p.defs != null && p.defs.get(ndtype) != null) {
- // get the parent value
- Object parentValue = p.defs.get(ndtype);
- if(value != null) {
- if(value instanceof LinkedHashMap) {
- for(Map.Entry<String,Object> me: ((LinkedHashMap<String,Object>)parentValue).entrySet()) {
- String k = me.getKey();
- if(((LinkedHashMap<String,Object>)value).get(k) == null) {
- ((LinkedHashMap<String,Object>)value).put(k,me.getValue());
- }
- }
- }
- if(value instanceof ArrayList) {
- for(Object pValue: (ArrayList<Object>)parentValue) {
- if(!((ArrayList<Object>)value).contains(pValue)) {
- ((ArrayList<Object>)value).add(pValue);
- }
- }
- }
- }
- else {
- // value = copy.copy(parent_value)
- value = CopyUtils.copyLhmOrAl(parentValue);
- }
- }
- p = (EntityType)p.getParentType();
- }
- }
- }
-
- return value;
- }
-
- @SuppressWarnings("unchecked")
- public Object getDefinition(String ndtype) {
- Object value = null;
- LinkedHashMap<String,Object> _defs;
- // no point in hasattr, because we have it, and it
- // doesn't do anything except emit an exception anyway
- //if not hasattr(self, 'defs'):
- // defs = None
- // ExceptionCollector.appendException(
- // ValidationError(message="defs is " + str(defs)))
- //else:
- // defs = self.defs
- _defs = this.defs;
-
-
- if(_defs != null && _defs.get(ndtype) != null) {
- value = _defs.get(ndtype);
- }
-
- Object p = getParentType();
- if(p != null) {
- Object inherited = ((EntityType)p).getDefinition(ndtype);
- if(inherited != null) {
- // inherited = dict(inherited) WTF?!?
- if(value == null) {
- value = inherited;
- }
- else {
- //?????
- //inherited.update(value)
- //value.update(inherited)
- for(Map.Entry<String,Object> me: ((LinkedHashMap<String,Object>)inherited).entrySet()) {
- ((LinkedHashMap<String,Object>)value).put(me.getKey(),me.getValue());
- }
- }
- }
- }
- return value;
- }
-
- public static void updateDefinitions(String version) {
- ExtTools exttools = new ExtTools();
- String extensionDefsFile = exttools.getDefsFile(version);
-
- InputStream input = null;
- try {
- input = new FileInputStream(new File(extensionDefsFile));
- }
- catch (FileNotFoundException e) {
- log.error("EntityType - updateDefinitions - Failed to open extension defs file ", extensionDefsFile);
- return;
- }
- Yaml yaml = new Yaml();
- LinkedHashMap<String,Object> nfvDefFile = (LinkedHashMap<String,Object>)yaml.load(input);
- LinkedHashMap<String,Object> nfvDef = new LinkedHashMap<>();
- for(String section: TOSCA_DEF_SECTIONS) {
- if(nfvDefFile.get(section) != null) {
- LinkedHashMap<String,Object> value =
- (LinkedHashMap<String,Object>)nfvDefFile.get(section);
- for(String key: value.keySet()) {
- nfvDef.put(key, value.get(key));
- }
- }
- }
- TOSCA_DEF.putAll(nfvDef);
- }
-
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import ValidationError
-from toscaparser.extensions.exttools import ExtTools
-import org.openecomp.sdc.toscaparser.api.utils.yamlparser
-
-log = logging.getLogger('tosca')
-
-
-class EntityType(object):
- '''Base class for TOSCA elements.'''
-
- SECTIONS = (DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS,
- INTERFACES, CAPABILITIES, TYPE, ARTIFACTS) = \
- ('derived_from', 'properties', 'attributes', 'requirements',
- 'interfaces', 'capabilities', 'type', 'artifacts')
-
- TOSCA_DEF_SECTIONS = ['node_types', 'data_types', 'artifact_types',
- 'group_types', 'relationship_types',
- 'capability_types', 'interface_types',
- 'policy_types']
-
- '''TOSCA definition file.'''
- TOSCA_DEF_FILE = os.path.join(
- os.path.dirname(os.path.abspath(__file__)),
- "TOSCA_definition_1_0.yaml")
-
- loader = toscaparser.utils.yamlparser.load_yaml
-
- TOSCA_DEF_LOAD_AS_IS = loader(TOSCA_DEF_FILE)
-
- # Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS
- TOSCA_DEF = {}
- for section in TOSCA_DEF_SECTIONS:
- if section in TOSCA_DEF_LOAD_AS_IS.keys():
- value = TOSCA_DEF_LOAD_AS_IS[section]
- for key in value.keys():
- TOSCA_DEF[key] = value[key]
-
- RELATIONSHIP_TYPE = (DEPENDSON, HOSTEDON, CONNECTSTO, ATTACHESTO,
- LINKSTO, BINDSTO) = \
- ('tosca.relationships.DependsOn',
- 'tosca.relationships.HostedOn',
- 'tosca.relationships.ConnectsTo',
- 'tosca.relationships.AttachesTo',
- 'tosca.relationships.network.LinksTo',
- 'tosca.relationships.network.BindsTo')
-
- NODE_PREFIX = 'tosca.nodes.'
- RELATIONSHIP_PREFIX = 'tosca.relationships.'
- CAPABILITY_PREFIX = 'tosca.capabilities.'
- INTERFACE_PREFIX = 'tosca.interfaces.'
- ARTIFACT_PREFIX = 'tosca.artifacts.'
- POLICY_PREFIX = 'tosca.policies.'
- GROUP_PREFIX = 'tosca.groups.'
- # currently the data types are defined only for network
- # but may have changes in the future.
- DATATYPE_PREFIX = 'tosca.datatypes.'
- DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + 'network.'
- TOSCA = 'tosca'
-
- def derived_from(self, defs):
- '''Return a type this type is derived from.'''
- return self.entity_value(defs, 'derived_from')
-
- def is_derived_from(self, type_str):
- '''Check if object inherits from the given type.
-
- Returns true if this object is derived from 'type_str'.
- False otherwise.
- '''
- if not self.type:
- return False
- elif self.type == type_str:
- return True
- elif self.parent_type:
- return self.parent_type.is_derived_from(type_str)
- else:
- return False
-
- def entity_value(self, defs, key):
- if key in defs:
- return defs[key]
-
- def get_value(self, ndtype, defs=None, parent=None):
- value = None
- if defs is None:
- if not hasattr(self, 'defs'):
- return None
- defs = self.defs
- if ndtype in defs:
- # copy the value to avoid that next operations add items in the
- # item definitions
- value = copy.copy(defs[ndtype])
- if parent:
- p = self
- if p:
- while p:
- if ndtype in p.defs:
- # get the parent value
- parent_value = p.defs[ndtype]
- if value:
- if isinstance(value, dict):
- for k, v in parent_value.items():
- if k not in value.keys():
- value[k] = v
- if isinstance(value, list):
- for p_value in parent_value:
- if p_value not in value:
- value.append(p_value)
- else:
- value = copy.copy(parent_value)
- p = p.parent_type
- return value
-
- def get_definition(self, ndtype):
- value = None
- if not hasattr(self, 'defs'):
- defs = None
- ExceptionCollector.appendException(
- ValidationError(message="defs is " + str(defs)))
- else:
- defs = self.defs
- if defs is not None and ndtype in defs:
- value = defs[ndtype]
- p = self.parent_type
- if p:
- inherited = p.get_definition(ndtype)
- if inherited:
- inherited = dict(inherited)
- if not value:
- value = inherited
- else:
- inherited.update(value)
- value.update(inherited)
- return value
-
-
-def update_definitions(version):
- exttools = ExtTools()
- extension_defs_file = exttools.get_defs_file(version)
- loader = toscaparser.utils.yamlparser.load_yaml
- nfv_def_file = loader(extension_defs_file)
- nfv_def = {}
- for section in EntityType.TOSCA_DEF_SECTIONS:
- if section in nfv_def_file.keys():
- value = nfv_def_file[section]
- for key in value.keys():
- nfv_def[key] = value[key]
- EntityType.TOSCA_DEF.update(nfv_def)
-*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java
deleted file mode 100644
index d226b78..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/GroupType.java
+++ /dev/null
@@ -1,215 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class GroupType extends StatefulEntityType {
-
- private static final String DERIVED_FROM = "derived_from";
- private static final String VERSION = "version";
- private static final String METADATA = "metadata";
- private static final String DESCRIPTION = "description";
- private static final String PROPERTIES = "properties";
- private static final String MEMBERS = "members";
- private static final String INTERFACES = "interfaces";
-
- private static final String SECTIONS[] = {
- DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES};
-
- private String groupType;
- private LinkedHashMap<String,Object> customDef;
- private String groupDescription;
- private String groupVersion;
- //private LinkedHashMap<String,Object> groupProperties;
- //private ArrayList<String> groupMembers;
- private LinkedHashMap<String,Object> metaData;
-
- @SuppressWarnings("unchecked")
- public GroupType(String _grouptype,LinkedHashMap<String,Object> _customDef) {
- super(_grouptype,GROUP_PREFIX,_customDef);
-
- groupType = _grouptype;
- customDef = _customDef;
- _validateFields();
- if(defs != null) {
- groupDescription = (String)defs.get(DESCRIPTION);
- groupVersion = (String)defs.get(VERSION);
- //groupProperties = (LinkedHashMap<String,Object>)defs.get(PROPERTIES);
- //groupMembers = (ArrayList<String>)defs.get(MEMBERS);
- Object mdo = defs.get(METADATA);
- if(mdo instanceof LinkedHashMap) {
- metaData = (LinkedHashMap<String,Object>)mdo;
- }
- else {
- metaData = null;
- }
-
- if(metaData != null) {
- _validateMetadata(metaData);
- }
- }
- }
-
- public GroupType getParentType() {
- // Return a group statefulentity of this entity is derived from.
- if(defs == null) {
- return null;
- }
- String pgroupEntity = derivedFrom(defs);
- if(pgroupEntity != null) {
- return new GroupType(pgroupEntity,customDef);
- }
- return null;
- }
-
- public String getDescription() {
- return groupDescription;
- }
-
- public String getVersion() {
- return groupVersion;
- }
-
- @SuppressWarnings("unchecked")
- public LinkedHashMap<String,Object> getInterfaces() {
- Object ifo = getValue(INTERFACES,null,false);
- if(ifo instanceof LinkedHashMap) {
- return (LinkedHashMap<String, Object>)ifo;
- }
- return new LinkedHashMap<String,Object>();
- }
-
- private void _validateFields() {
- if(defs != null) {
- for(String name: defs.keySet()) {
- boolean bFound = false;
- for(String sect: SECTIONS) {
- if(name.equals(sect)) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Group Type \"%s\" contains unknown field \"%s\"",
- groupType,name));
- }
- }
- }
- }
-
- @SuppressWarnings("unchecked")
- private void _validateMetadata(LinkedHashMap<String,Object> metadata) {
- String mtt = (String) metadata.get("type");
- if(mtt != null && !mtt.equals("map") && !mtt.equals("tosca:map")) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidTypeError: \"%s\" defined in group for metadata is invalid",
- mtt));
- }
- for(String entrySchema: metadata.keySet()) {
- Object estob = metadata.get(entrySchema);
- if(estob instanceof LinkedHashMap) {
- String est = (String)((LinkedHashMap<String,Object>)estob).get("type");
- if(!est.equals("string")) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidTypeError: \"%s\" defined in group for metadata \"%s\" is invalid",
- est,entrySchema));
- }
- }
- }
- }
-
- public String getType() {
- return groupType;
- }
-
-
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import InvalidTypeError
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.elements.statefulentitytype import StatefulEntityType
-
-
-class GroupType(StatefulEntityType):
- '''TOSCA built-in group type.'''
-
- SECTIONS = (DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES,
- MEMBERS, INTERFACES) = \
- ("derived_from", "version", "metadata", "description",
- "properties", "members", "interfaces")
-
- def __init__(self, grouptype, custom_def=None):
- super(GroupType, self).__init__(grouptype, self.GROUP_PREFIX,
- custom_def)
- self.custom_def = custom_def
- self.grouptype = grouptype
- self._validate_fields()
- self.group_description = None
- if self.DESCRIPTION in self.defs:
- self.group_description = self.defs[self.DESCRIPTION]
-
- self.group_version = None
- if self.VERSION in self.defs:
- self.group_version = self.defs[self.VERSION]
-
- self.group_properties = None
- if self.PROPERTIES in self.defs:
- self.group_properties = self.defs[self.PROPERTIES]
-
- self.group_members = None
- if self.MEMBERS in self.defs:
- self.group_members = self.defs[self.MEMBERS]
-
- if self.METADATA in self.defs:
- self.meta_data = self.defs[self.METADATA]
- self._validate_metadata(self.meta_data)
-
- @property
- def parent_type(self):
- '''Return a group statefulentity of this entity is derived from.'''
- if not hasattr(self, 'defs'):
- return None
- pgroup_entity = self.derived_from(self.defs)
- if pgroup_entity:
- return GroupType(pgroup_entity, self.custom_def)
-
- @property
- def description(self):
- return self.group_description
-
- @property
- def version(self):
- return self.group_version
-
- @property
- def interfaces(self):
- return self.get_value(self.INTERFACES)
-
- def _validate_fields(self):
- if self.defs:
- for name in self.defs.keys():
- if name not in self.SECTIONS:
- ExceptionCollector.appendException(
- UnknownFieldError(what='Group Type %s'
- % self.grouptype, field=name))
-
- def _validate_metadata(self, meta_data):
- if not meta_data.get('type') in ['map', 'tosca:map']:
- ExceptionCollector.appendException(
- InvalidTypeError(what='"%s" defined in group for '
- 'metadata' % (meta_data.get('type'))))
- for entry_schema, entry_schema_type in meta_data.items():
- if isinstance(entry_schema_type, dict) and not \
- entry_schema_type.get('type') == 'string':
- ExceptionCollector.appendException(
- InvalidTypeError(what='"%s" defined in group for '
- 'metadata "%s"'
- % (entry_schema_type.get('type'),
- entry_schema)))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java
deleted file mode 100644
index 8a2b4dd..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/InterfacesDef.java
+++ /dev/null
@@ -1,228 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import org.openecomp.sdc.toscaparser.api.EntityTemplate;
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class InterfacesDef extends StatefulEntityType {
-
- public static final String LIFECYCLE = "tosca.interfaces.node.lifecycle.Standard";
- public static final String CONFIGURE = "tosca.interfaces.relationship.Configure";
- public static final String LIFECYCLE_SHORTNAME = "Standard";
- public static final String CONFIGURE_SHORTNAME = "Configure";
-
- public static final String SECTIONS[] = {
- LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME,CONFIGURE_SHORTNAME
- };
-
- public static final String IMPLEMENTATION = "implementation";
- public static final String INPUTS = "inputs";
-
- public static final String INTERFACEVALUE[] = {IMPLEMENTATION, INPUTS};
-
- public static final String INTERFACE_DEF_RESERVED_WORDS[] = {
- "type", "inputs", "derived_from", "version", "description"};
-
- private EntityType ntype;
- private EntityTemplate nodeTemplate;
- private String name;
- private Object value;
- private String implementation;
- private LinkedHashMap<String,Object> inputs;
-
-
- @SuppressWarnings("unchecked")
- public InterfacesDef(EntityType inodeType,
- String interfaceType,
- EntityTemplate inodeTemplate,
- String iname,
- Object ivalue) {
- // void
- super();
-
- ntype = inodeType;
- nodeTemplate = inodeTemplate;
- type = interfaceType;
- name = iname;
- value = ivalue;
- implementation = null;
- inputs = null;
- defs = new LinkedHashMap<String,Object>();
-
- if(interfaceType.equals(LIFECYCLE_SHORTNAME)) {
- interfaceType = LIFECYCLE;
- }
- if(interfaceType.equals(CONFIGURE_SHORTNAME)) {
- interfaceType = CONFIGURE;
- }
-
- // only NodeType has getInterfaces "hasattr(ntype,interfaces)"
- // while RelationshipType does not
- if(ntype instanceof NodeType) {
- if(((NodeType)ntype).getInterfaces() != null &&
- ((NodeType)ntype).getInterfaces().values().contains(interfaceType)) {
- LinkedHashMap<String,Object> nii = (LinkedHashMap<String,Object>)
- ((NodeType)ntype).getInterfaces().get(interfaceType);
- interfaceType = (String)nii.get("type");
- }
- }
- if(inodeType != null) {
- if(nodeTemplate != null && nodeTemplate.getCustomDef() != null &&
- nodeTemplate.getCustomDef().values().contains(interfaceType)) {
- defs = (LinkedHashMap<String,Object>)
- nodeTemplate.getCustomDef().get(interfaceType);
- }
- else {
- defs = (LinkedHashMap<String,Object>)TOSCA_DEF.get(interfaceType);
- }
- }
-
- if(ivalue != null) {
- if(ivalue instanceof LinkedHashMap) {
- for(Map.Entry<String,Object> me: ((LinkedHashMap<String,Object>)ivalue).entrySet()) {
- if(me.getKey().equals("implementation")) {
- implementation = (String)me.getValue();
- }
- else if(me.getKey().equals("inputs")) {
- inputs = (LinkedHashMap<String,Object>)me.getValue();
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: \"interfaces\" of template \"%s\" contain unknown field \"%s\"",
- nodeTemplate.getName(),me.getKey()));
- }
- }
- }
- else {
- implementation = (String)ivalue;
- }
- }
- }
-
- public ArrayList<String> getLifecycleOps() {
- if(defs != null) {
- if(type.equals(LIFECYCLE)) {
- return _ops();
- }
- }
- return null;
- }
-
- public ArrayList<String> getConfigureOps() {
- if(defs != null) {
- if(type.equals(CONFIGURE)) {
- return _ops();
- }
- }
- return null;
- }
-
- private ArrayList<String> _ops() {
- return new ArrayList<String>(defs.keySet());
- }
-
- // getters/setters
-
- public LinkedHashMap<String,Object> getInputs() {
- return inputs;
- }
-
- public void setInput(String name,Object value) {
- inputs.put(name, value);
- }
-}
-
-/*python
-
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.elements.statefulentitytype import StatefulEntityType
-
-SECTIONS = (LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME,
- CONFIGURE_SHORTNAME) = \
- ('tosca.interfaces.node.lifecycle.Standard',
- 'tosca.interfaces.relationship.Configure',
- 'Standard', 'Configure')
-
-INTERFACEVALUE = (IMPLEMENTATION, INPUTS) = ('implementation', 'inputs')
-
-INTERFACE_DEF_RESERVED_WORDS = ['type', 'inputs', 'derived_from', 'version',
- 'description']
-
-
-class InterfacesDef(StatefulEntityType):
- '''TOSCA built-in interfaces type.'''
-
- def __init__(self, node_type, interfacetype,
- node_template=None, name=None, value=None):
- self.ntype = node_type
- self.node_template = node_template
- self.type = interfacetype
- self.name = name
- self.value = value
- self.implementation = None
- self.inputs = None
- self.defs = {}
- if interfacetype == LIFECYCLE_SHORTNAME:
- interfacetype = LIFECYCLE
- if interfacetype == CONFIGURE_SHORTNAME:
- interfacetype = CONFIGURE
- if hasattr(self.ntype, 'interfaces') \
- and self.ntype.interfaces \
- and interfacetype in self.ntype.interfaces:
- interfacetype = self.ntype.interfaces[interfacetype]['type']
- if node_type:
- if self.node_template and self.node_template.custom_def \
- and interfacetype in self.node_template.custom_def:
- self.defs = self.node_template.custom_def[interfacetype]
- else:
- self.defs = self.TOSCA_DEF[interfacetype]
- if value:
- if isinstance(self.value, dict):
- for i, j in self.value.items():
- if i == IMPLEMENTATION:
- self.implementation = j
- elif i == INPUTS:
- self.inputs = j
- else:
- what = ('"interfaces" of template "%s"' %
- self.node_template.name)
- ExceptionCollector.appendException(
- UnknownFieldError(what=what, field=i))
- else:
- self.implementation = value
-
- @property
- def lifecycle_ops(self):
- if self.defs:
- if self.type == LIFECYCLE:
- return self._ops()
-
- @property
- def configure_ops(self):
- if self.defs:
- if self.type == CONFIGURE:
- return self._ops()
-
- def _ops(self):
- ops = []
- for name in list(self.defs.keys()):
- ops.append(name)
- return ops
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java
deleted file mode 100644
index 4f7bdd0..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/Metadata.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.util.Map;
-
-public class Metadata {
-
- private final Map<String, Object> metadataMap;
-
- public Metadata(Map<String, Object> metadataMap) {
- this.metadataMap = metadataMap;
- }
-
- public String getValue(String key) {
- return !isEmpty() ? String.valueOf(this.metadataMap.get(key)) : null;
- }
-
- public void setValue(String key, Object value) {
- if (!isEmpty()) {
- this.metadataMap.put(key, value);
- }
- }
-
-
- private boolean isEmpty() {
- return this.metadataMap == null || this.metadataMap.size() == 0;
- }
-
- @Override
- public String toString() {
- return "Metadata{" +
- "metadataMap=" + metadataMap +
- '}';
- }
-
-}
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java
deleted file mode 100644
index d5f1a18..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/NodeType.java
+++ /dev/null
@@ -1,523 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.elements.InterfacesDef;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class NodeType extends StatefulEntityType {
- // TOSCA built-in node type
-
- private static final String DERIVED_FROM = "derived_from";
- private static final String METADATA = "metadata";
- private static final String PROPERTIES = "properties";
- private static final String VERSION = "version";
- private static final String DESCRIPTION = "description";
- private static final String ATTRIBUTES = "attributes";
- private static final String REQUIREMENTS = "requirements";
- private static final String CAPABILITIES = "capabilities";
- private static final String INTERFACES = "interfaces";
- private static final String ARTIFACTS = "artifacts";
-
- private static final String SECTIONS[] = {
- DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS
- };
-
- private String ntype;
- public LinkedHashMap<String,Object> customDef;
-
- public NodeType(String nttype,LinkedHashMap<String,Object> ntcustomDef) {
- super(nttype,NODE_PREFIX, ntcustomDef);
- ntype = nttype;
- customDef = ntcustomDef;
- _validateKeys();
- }
-
- public Object getParentType() {
- // Return a node this node is derived from
- if(defs == null) {
- return null;
- }
- String pnode = derivedFrom(defs);
- if(pnode != null && !pnode.isEmpty()) {
- return new NodeType(pnode,customDef);
- }
- return null;
- }
-
- @SuppressWarnings("unchecked")
- public LinkedHashMap<RelationshipType,NodeType> getRelationship() {
- // Return a dictionary of relationships to other node types
-
- // This method returns a dictionary of named relationships that nodes
- // of the current node type (self) can have to other nodes (of specific
- // types) in a TOSCA template.
-
- LinkedHashMap<RelationshipType,NodeType> relationship = new LinkedHashMap<>();
- ArrayList<LinkedHashMap<String,Object>> requires;
- Object treq = getAllRequirements();
- if(treq != null) {
- // NOTE(sdmonov): Check if requires is a dict.
- // If it is a dict convert it to a list of dicts.
- // This is needed because currently the code below supports only
- // lists as requirements definition. The following check will
- // make sure if a map (dict) was provided it will be converted to
- // a list before proceeding to the parsing.
- if(treq instanceof LinkedHashMap) {
- requires = new ArrayList<>();
- for(Map.Entry<String,Object> me: ((LinkedHashMap<String,Object>)treq).entrySet()) {
- LinkedHashMap<String,Object> tl = new LinkedHashMap<>();
- tl.put(me.getKey(),me.getValue());
- requires.add(tl);
- }
- }
- else {
- requires = (ArrayList<LinkedHashMap<String,Object>>)treq;
- }
-
- String keyword = null;
- String nodeType = null;
- for(LinkedHashMap<String,Object> require: requires) {
- String relation = null;
- for(Map.Entry<String,Object> re: require.entrySet()) {
- String key = re.getKey();
- LinkedHashMap<String,Object> req = (LinkedHashMap<String,Object>)re.getValue();
- if(req.get("relationship") != null) {
- Object trelation = req.get("relationship");
- // trelation is a string or a dict with "type" mapped to the string we want
- if(trelation instanceof String) {
- relation = (String)trelation;
- }
- else {
- if(((LinkedHashMap<String,Object>)trelation).get("type") != null) {
- relation = (String)((LinkedHashMap<String,Object>)trelation).get("type");
- }
- }
- nodeType = (String)req.get("node");
- //BUG meaningless?? LinkedHashMap<String,Object> value = req;
- if(nodeType != null) {
- keyword = "node";
- }
- else {
- // If value is a dict and has a type key
- // we need to lookup the node type using
- // the capability type
- String captype = (String)req.get("capability");
- String value = _getNodeTypeByCap(captype);
- String getRelation = _getRelation(key,value);
- if (getRelation != null) {
- relation = getRelation;
- }
- keyword = key;
- nodeType = value;
- }
- }
-
- }
- RelationshipType rtype = new RelationshipType(relation, keyword, customDef);
- NodeType relatednode = new NodeType(nodeType, customDef);
- relationship.put(rtype, relatednode);
- }
- }
- return relationship;
-
- }
-
- @SuppressWarnings("unchecked")
- private String _getNodeTypeByCap(String cap) {
- // Find the node type that has the provided capability
-
- // This method will lookup all node types if they have the
- // provided capability.
-
- // Filter the node types
- ArrayList<String> nodeTypes = new ArrayList<>();
- for(String nt: TOSCA_DEF.keySet()) {
- if(nt.startsWith(NODE_PREFIX) && !nt.equals("tosca.nodes.Root")) {
- nodeTypes.add(nt);
- }
- }
- for(String nt: nodeTypes) {
- LinkedHashMap<String,Object> nodeDef = (LinkedHashMap<String,Object>)TOSCA_DEF.get(nt);
- if(nodeDef instanceof LinkedHashMap && nodeDef.get("capabilities") != null) {
- LinkedHashMap<String,Object> nodeCaps = (LinkedHashMap<String,Object>)nodeDef.get("capabilities");
- if(nodeCaps != null) {
- for(Object val: nodeCaps.values()) {
- if(val instanceof LinkedHashMap) {
- String tp = (String)((LinkedHashMap<String,Object>)val).get("type");
- if(tp != null && tp.equals(cap)) {
- return nt;
- }
- }
- }
- }
- }
- }
- return null;
- }
-
- @SuppressWarnings("unchecked")
- private String _getRelation(String key,String ndtype) {
- String relation = null;
- NodeType ntype = new NodeType(ndtype,null);
- LinkedHashMap<String,CapabilityTypeDef> caps = ntype.getCapabilities();
- if(caps != null && caps.get(key) != null) {
- CapabilityTypeDef c = caps.get(key);
- for(int i=0; i< RELATIONSHIP_TYPE.length; i++) {
- String r = RELATIONSHIP_TYPE[i];
- LinkedHashMap<String,Object> rtypedef = (LinkedHashMap<String,Object>)TOSCA_DEF.get(r);
- for(Object o: rtypedef.values()) {
- LinkedHashMap<String,Object> properties = (LinkedHashMap<String,Object>)o;
- if(properties.get(c.getType()) != null) {
- relation = r;
- break;
- }
- }
- if(relation != null) {
- break;
- }
- else {
- for(Object o: rtypedef.values()) {
- LinkedHashMap<String,Object> properties = (LinkedHashMap<String,Object>)o;
- if(properties.get(c.getParentType()) != null) {
- relation = r;
- break;
- }
- }
- }
- }
- }
- return relation;
- }
-
- @SuppressWarnings("unchecked")
- public ArrayList<CapabilityTypeDef> getCapabilitiesObjects() {
- // Return a list of capability objects
- ArrayList<CapabilityTypeDef> typecapabilities = new ArrayList<>();
- LinkedHashMap<String,Object> caps = (LinkedHashMap<String,Object>)getValue(CAPABILITIES, null, true);
- if(caps != null) {
- // 'cname' is symbolic name of the capability
- // 'cvalue' is a dict { 'type': <capability type name> }
- for(Map.Entry<String,Object> me: caps.entrySet()) {
- String cname = me.getKey();
- LinkedHashMap<String,String> cvalue = (LinkedHashMap<String,String>)me.getValue();
- String ctype = cvalue.get("type");
- CapabilityTypeDef cap = new CapabilityTypeDef(cname,ctype,type,customDef);
- typecapabilities.add(cap);
- }
- }
- return typecapabilities;
- }
-
- public LinkedHashMap<String,CapabilityTypeDef> getCapabilities() {
- // Return a dictionary of capability name-objects pairs
- LinkedHashMap<String,CapabilityTypeDef> caps = new LinkedHashMap<>();
- for(CapabilityTypeDef ctd: getCapabilitiesObjects()) {
- caps.put(ctd.getName(),ctd);
- }
- return caps;
- }
-
- @SuppressWarnings("unchecked")
- public ArrayList<Object> getRequirements() {
- return (ArrayList<Object>)getValue(REQUIREMENTS,null,true);
- }
-
- public ArrayList<Object> getAllRequirements() {
- return getRequirements();
- }
-
- @SuppressWarnings("unchecked")
- public LinkedHashMap<String,Object> getInterfaces() {
- return (LinkedHashMap<String,Object>)getValue(INTERFACES,null,false);
- }
-
-
- @SuppressWarnings("unchecked")
- public ArrayList<String> getLifecycleInputs()
- {
- // Return inputs to life cycle operations if found
- ArrayList<String> inputs = new ArrayList<>();
- LinkedHashMap<String,Object> interfaces = getInterfaces();
- if(interfaces != null) {
- for(Map.Entry<String,Object> me: interfaces.entrySet()) {
- String iname = me.getKey();
- LinkedHashMap<String,Object> ivalue = (LinkedHashMap<String,Object>)me.getValue();
- if(iname.equals(InterfacesDef.LIFECYCLE)) {
- for(Map.Entry<String,Object> ie: ivalue.entrySet()) {
- if(ie.getKey().equals("input")) {
- LinkedHashMap<String,Object> y = (LinkedHashMap<String,Object>)ie.getValue();
- for(String i: y.keySet()) {
- inputs.add(i);
- }
- }
- }
- }
- }
- }
- return inputs;
- }
-
- public ArrayList<String> getLifecycleOperations() {
- // Return available life cycle operations if found
- ArrayList<String> ops = null;
- LinkedHashMap<String,Object> interfaces = getInterfaces();
- if(interfaces != null) {
- InterfacesDef i = new InterfacesDef(this,InterfacesDef.LIFECYCLE,null,null,null);
- ops = i.getLifecycleOps();
- }
- return ops;
- }
-
- public CapabilityTypeDef getCapability(String name) {
- //BUG?? the python code has to be wrong
- // it refers to a bad attribute 'value'...
- LinkedHashMap<String,CapabilityTypeDef> caps = getCapabilities();
- if(caps != null) {
- return caps.get(name);
- }
- return null;
- /*
- def get_capability(self, name):
- caps = self.get_capabilities()
- if caps and name in caps.keys():
- return caps[name].value
- */
- }
-
- public String getCapabilityType(String name) {
- //BUG?? the python code has to be wrong
- // it refers to a bad attribute 'value'...
- CapabilityTypeDef captype = getCapability(name);
- if(captype != null) {
- return captype.getType();
- }
- return null;
- /*
- def get_capability_type(self, name):
- captype = self.get_capability(name)
- if captype and name in captype.keys():
- return captype[name].value
- */
- }
-
- private void _validateKeys() {
- if(defs != null) {
- for(String key: defs.keySet()) {
- boolean bFound = false;
- for(int i=0; i< SECTIONS.length; i++) {
- if(key.equals(SECTIONS[i])) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Nodetype \"%s\" has unknown field \"%s\"",ntype,key));
- }
- }
- }
- }
-
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.elements.capabilitytype import CapabilityTypeDef
-import org.openecomp.sdc.toscaparser.api.elements.interfaces as ifaces
-from toscaparser.elements.interfaces import InterfacesDef
-from toscaparser.elements.relationshiptype import RelationshipType
-from toscaparser.elements.statefulentitytype import StatefulEntityType
-
-
-class NodeType(StatefulEntityType):
- '''TOSCA built-in node type.'''
- SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS) = \
- ('derived_from', 'metadata', 'properties', 'version',
- 'description', 'attributes', 'requirements', 'capabilities',
- 'interfaces', 'artifacts')
-
- def __init__(self, ntype, custom_def=None):
- super(NodeType, self).__init__(ntype, self.NODE_PREFIX, custom_def)
- self.ntype = ntype
- self.custom_def = custom_def
- self._validate_keys()
-
- @property
- def parent_type(self):
- '''Return a node this node is derived from.'''
- if not hasattr(self, 'defs'):
- return None
- pnode = self.derived_from(self.defs)
- if pnode:
- return NodeType(pnode, self.custom_def)
-
- @property
- def relationship(self):
- '''Return a dictionary of relationships to other node types.
-
- This method returns a dictionary of named relationships that nodes
- of the current node type (self) can have to other nodes (of specific
- types) in a TOSCA template.
-
- '''
- relationship = {}
- requires = self.get_all_requirements()
- if requires:
- # NOTE(sdmonov): Check if requires is a dict.
- # If it is a dict convert it to a list of dicts.
- # This is needed because currently the code below supports only
- # lists as requirements definition. The following check will
- # make sure if a map (dict) was provided it will be converted to
- # a list before proceeding to the parsing.
- if isinstance(requires, dict):
- requires = [{key: value} for key, value in requires.items()]
-
- keyword = None
- node_type = None
- for require in requires:
- for key, req in require.items():
- if 'relationship' in req:
- relation = req.get('relationship')
- if 'type' in relation:
- relation = relation.get('type')
- node_type = req.get('node')
- value = req
- if node_type:
- keyword = 'node'
- else:
- # If value is a dict and has a type key
- # we need to lookup the node type using
- # the capability type
- value = req
- if isinstance(value, dict):
- captype = value['capability']
- value = (self.
- _get_node_type_by_cap(key, captype))
- relation = self._get_relation(key, value)
- keyword = key
- node_type = value
- rtype = RelationshipType(relation, keyword, self.custom_def)
- relatednode = NodeType(node_type, self.custom_def)
- relationship[rtype] = relatednode
- return relationship
-
- def _get_node_type_by_cap(self, key, cap):
- '''Find the node type that has the provided capability
-
- This method will lookup all node types if they have the
- provided capability.
- '''
-
- # Filter the node types
- node_types = [node_type for node_type in self.TOSCA_DEF.keys()
- if node_type.startswith(self.NODE_PREFIX) and
- node_type != 'tosca.nodes.Root']
-
- for node_type in node_types:
- node_def = self.TOSCA_DEF[node_type]
- if isinstance(node_def, dict) and 'capabilities' in node_def:
- node_caps = node_def['capabilities']
- for value in node_caps.values():
- if isinstance(value, dict) and \
- 'type' in value and value['type'] == cap:
- return node_type
-
- def _get_relation(self, key, ndtype):
- relation = None
- ntype = NodeType(ndtype)
- caps = ntype.get_capabilities()
- if caps and key in caps.keys():
- c = caps[key]
- for r in self.RELATIONSHIP_TYPE:
- rtypedef = ntype.TOSCA_DEF[r]
- for properties in rtypedef.values():
- if c.type in properties:
- relation = r
- break
- if relation:
- break
- else:
- for properties in rtypedef.values():
- if c.parent_type in properties:
- relation = r
- break
- return relation
-
- def get_capabilities_objects(self):
- '''Return a list of capability objects.'''
- typecapabilities = []
- caps = self.get_value(self.CAPABILITIES, None, True)
- if caps:
- # 'name' is symbolic name of the capability
- # 'value' is a dict { 'type': <capability type name> }
- for name, value in caps.items():
- ctype = value.get('type')
- cap = CapabilityTypeDef(name, ctype, self.type,
- self.custom_def)
- typecapabilities.append(cap)
- return typecapabilities
-
- def get_capabilities(self):
- '''Return a dictionary of capability name-objects pairs.'''
- return {cap.name: cap
- for cap in self.get_capabilities_objects()}
-
- @property
- def requirements(self):
- return self.get_value(self.REQUIREMENTS, None, True)
-
- def get_all_requirements(self):
- return self.requirements
-
- @property
- def interfaces(self):
- return self.get_value(self.INTERFACES)
-
- @property
- def lifecycle_inputs(self):
- '''Return inputs to life cycle operations if found.'''
- inputs = []
- interfaces = self.interfaces
- if interfaces:
- for name, value in interfaces.items():
- if name == ifaces.LIFECYCLE:
- for x, y in value.items():
- if x == 'inputs':
- for i in y.iterkeys():
- inputs.append(i)
- return inputs
-
- @property
- def lifecycle_operations(self):
- '''Return available life cycle operations if found.'''
- ops = None
- interfaces = self.interfaces
- if interfaces:
- i = InterfacesDef(self.type, ifaces.LIFECYCLE)
- ops = i.lifecycle_ops
- return ops
-
- def get_capability(self, name):
- caps = self.get_capabilities()
- if caps and name in caps.keys():
- return caps[name].value
-
- def get_capability_type(self, name):
- captype = self.get_capability(name)
- if captype and name in captype.keys():
- return captype[name].value
-
- def _validate_keys(self):
- if self.defs:
- for key in self.defs.keys():
- if key not in self.SECTIONS:
- ExceptionCollector.appendException(
- UnknownFieldError(what='Nodetype"%s"' % self.ntype,
- field=key))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java
deleted file mode 100644
index c60bed1..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PolicyType.java
+++ /dev/null
@@ -1,290 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.TOSCAVersionProperty;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class PolicyType extends StatefulEntityType {
-
- private static final String DERIVED_FROM = "derived_from";
- private static final String METADATA = "metadata";
- private static final String PROPERTIES = "properties";
- private static final String VERSION = "version";
- private static final String DESCRIPTION = "description";
- private static final String TARGETS = "targets";
- private static final String TRIGGERS = "triggers";
- private static final String TYPE = "type";
-
- private static final String SECTIONS[] = {
- DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS, TRIGGERS, TYPE
- };
-
- private LinkedHashMap<String,Object> customDef;
- private String policyDescription;
- private Object policyVersion;
- private LinkedHashMap<String,Object> properties;
- private LinkedHashMap<String,Object> parentPolicies;
- private LinkedHashMap<String,Object> metaData;
- private ArrayList<String> targetsList;
-
-
- public PolicyType(String _type, LinkedHashMap<String,Object> _customDef) {
- super(_type,POLICY_PREFIX,_customDef);
-
- type = _type;
- customDef = _customDef;
- _validateKeys();
-
- metaData = null;
- if(defs != null && defs.get(METADATA) != null) {
- metaData = (LinkedHashMap<String,Object>)defs.get(METADATA);
- _validateMetadata(metaData);
- }
-
- properties = null;
- if(defs != null && defs.get(PROPERTIES) != null) {
- properties = (LinkedHashMap<String,Object>)defs.get(PROPERTIES);
- }
- parentPolicies = _getParentPolicies();
-
- policyVersion = null;
- if(defs != null && defs.get(VERSION) != null) {
- policyVersion = (new TOSCAVersionProperty(
- defs.get(VERSION))).getVersion();
- }
-
- policyDescription = null;
- if(defs != null && defs.get(DESCRIPTION) != null) {
- policyDescription = (String)defs.get(DESCRIPTION);
- }
-
- targetsList = null;
- if(defs != null && defs.get(TARGETS) != null) {
- targetsList = (ArrayList<String>)defs.get(TARGETS);
- _validateTargets(targetsList,customDef);
- }
-
- }
-
- private LinkedHashMap<String,Object> _getParentPolicies() {
- LinkedHashMap<String,Object> policies = new LinkedHashMap<>();
- String parentPolicy;
- if(getParentType() != null) {
- parentPolicy = getParentType().getType();
- }
- else {
- parentPolicy = null;
- }
- if(parentPolicy != null) {
- while(parentPolicy != null && !parentPolicy.equals("tosca.policies.Root")) {
- policies.put(parentPolicy, TOSCA_DEF.get(parentPolicy));
- parentPolicy = (String)
- ((LinkedHashMap<String,Object>)policies.get(parentPolicy)).get("derived_from);");
- }
- }
- return policies;
- }
-
- public String getType() {
- return type;
- }
-
- public PolicyType getParentType() {
- // Return a policy statefulentity of this node is derived from
- if(defs == null) {
- return null;
- }
- String ppolicyEntity = derivedFrom(defs);
- if(ppolicyEntity != null) {
- return new PolicyType(ppolicyEntity,customDef);
- }
- return null;
- }
-
- public Object getPolicy(String name) {
- // Return the definition of a policy field by name
- if(defs != null && defs.get(name) != null) {
- return defs.get(name);
- }
- return null;
- }
-
- public ArrayList<String> getTargets() {
- // Return targets
- return targetsList;
- }
-
- public String getDescription() {
- return policyDescription;
- }
-
- public Object getVersion() {
- return policyVersion;
- }
-
- private void _validateKeys() {
- for(String key: defs.keySet()) {
- boolean bFound = false;
- for(String sect: SECTIONS) {
- if(key.equals(sect)) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"",
- type,key));
- }
- }
- }
-
- private void _validateTargets(ArrayList<String> _targetsList,
- LinkedHashMap<String,Object> _customDef) {
- for(String nodetype: _targetsList) {
- if(_customDef.get(nodetype) == null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidTypeError: \"%s\" defined in targets for policy \"%s\"",
- nodetype,type));
-
- }
- }
- }
-
- private void _validateMetadata(LinkedHashMap<String,Object> _metaData) {
- String mtype = (String)_metaData.get("type");
- if(mtype != null && !mtype.equals("map") && !mtype.equals("tosca:map")) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidTypeError: \"%s\" defined in policy for metadata",
- mtype));
- }
- for(String entrySchema: metaData.keySet()) {
- Object estob = metaData.get(entrySchema);
- if(estob instanceof LinkedHashMap) {
- String est = (String)
- ((LinkedHashMap<String,Object>)estob).get("type");
- if(!est.equals("string")) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidTypeError: \"%s\" defined in policy for metadata \"%s\"",
- est,entrySchema));
- }
- }
- }
- }
-
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import InvalidTypeError
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.elements.statefulentitytype import StatefulEntityType
-from toscaparser.utils.validateutils import TOSCAVersionProperty
-
-
-class PolicyType(StatefulEntityType):
-
- '''TOSCA built-in policies type.'''
- SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS) = \
- ('derived_from', 'metadata', 'properties', 'version',
- 'description', 'targets')
-
- def __init__(self, ptype, custom_def=None):
- super(PolicyType, self).__init__(ptype, self.POLICY_PREFIX,
- custom_def)
- self.type = ptype
- self.custom_def = custom_def
- self._validate_keys()
-
- self.meta_data = None
- if self.METADATA in self.defs:
- self.meta_data = self.defs[self.METADATA]
- self._validate_metadata(self.meta_data)
-
- self.properties = None
- if self.PROPERTIES in self.defs:
- self.properties = self.defs[self.PROPERTIES]
- self.parent_policies = self._get_parent_policies()
-
- self.policy_version = None
- if self.VERSION in self.defs:
- self.policy_version = TOSCAVersionProperty(
- self.defs[self.VERSION]).get_version()
-
- self.policy_description = self.defs[self.DESCRIPTION] \
- if self.DESCRIPTION in self.defs else None
-
- self.targets_list = None
- if self.TARGETS in self.defs:
- self.targets_list = self.defs[self.TARGETS]
- self._validate_targets(self.targets_list, custom_def)
-
- def _get_parent_policies(self):
- policies = {}
- parent_policy = self.parent_type.type if self.parent_type else None
- if parent_policy:
- while parent_policy != 'tosca.policies.Root':
- policies[parent_policy] = self.TOSCA_DEF[parent_policy]
- parent_policy = policies[parent_policy]['derived_from']
- return policies
-
- @property
- def parent_type(self):
- '''Return a policy statefulentity of this node is derived from.'''
- if not hasattr(self, 'defs'):
- return None
- ppolicy_entity = self.derived_from(self.defs)
- if ppolicy_entity:
- return PolicyType(ppolicy_entity, self.custom_def)
-
- def get_policy(self, name):
- '''Return the definition of a policy field by name.'''
- if name in self.defs:
- return self.defs[name]
-
- @property
- def targets(self):
- '''Return targets.'''
- return self.targets_list
-
- @property
- def description(self):
- return self.policy_description
-
- @property
- def version(self):
- return self.policy_version
-
- def _validate_keys(self):
- for key in self.defs.keys():
- if key not in self.SECTIONS:
- ExceptionCollector.appendException(
- UnknownFieldError(what='Policy "%s"' % self.type,
- field=key))
-
- def _validate_targets(self, targets_list, custom_def):
- for nodetype in targets_list:
- if nodetype not in custom_def:
- ExceptionCollector.appendException(
- InvalidTypeError(what='"%s" defined in targets for '
- 'policy "%s"' % (nodetype, self.type)))
-
- def _validate_metadata(self, meta_data):
- if not meta_data.get('type') in ['map', 'tosca:map']:
- ExceptionCollector.appendException(
- InvalidTypeError(what='"%s" defined in policy for '
- 'metadata' % (meta_data.get('type'))))
-
- for entry_schema, entry_schema_type in meta_data.items():
- if isinstance(entry_schema_type, dict) and not \
- entry_schema_type.get('type') == 'string':
- ExceptionCollector.appendException(
- InvalidTypeError(what='"%s" defined in policy for '
- 'metadata "%s"'
- % (entry_schema_type.get('type'),
- entry_schema)))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java
deleted file mode 100644
index 8d490ee..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PortSpec.java
+++ /dev/null
@@ -1,160 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.DataEntity;
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils;
-
-public class PortSpec {
- // Parent class for tosca.datatypes.network.PortSpec type
-
- private static final String SHORTNAME = "PortSpec";
- private static final String TYPE_URI = "tosca.datatypes.network." + SHORTNAME;
-
- private static final String PROTOCOL = "protocol";
- private static final String SOURCE = "source";
- private static final String SOURCE_RANGE = "source_range";
- private static final String TARGET = "target";
- private static final String TARGET_RANGE = "target_range";
-
- private static final String PROPERTY_NAMES[] = {
- PROTOCOL, SOURCE, SOURCE_RANGE,
- TARGET, TARGET_RANGE
- };
-
- // todo(TBD) May want to make this a subclass of DataType
- // and change init method to set PortSpec's properties
- public PortSpec() {
-
- }
-
- // The following additional requirements MUST be tested:
- // 1) A valid PortSpec MUST have at least one of the following properties:
- // target, target_range, source or source_range.
- // 2) A valid PortSpec MUST have a value for the source property that
- // is within the numeric range specified by the property source_range
- // when source_range is specified.
- // 3) A valid PortSpec MUST have a value for the target property that is
- // within the numeric range specified by the property target_range
- // when target_range is specified.
- public static void validateAdditionalReq(Object _properties,
- String propName,
- LinkedHashMap<String,Object> custom_def) {
-
- try {
- LinkedHashMap<String,Object> properties = (LinkedHashMap<String,Object>)_properties;
- Object source = properties.get(PortSpec.SOURCE);
- Object sourceRange = properties.get(PortSpec.SOURCE_RANGE);
- Object target = properties.get(PortSpec.TARGET);
- Object targetRange = properties.get(PortSpec.TARGET_RANGE);
-
- // verify one of the specified values is set
- if(source == null && sourceRange == null &&
- target == null && targetRange == null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidTypeAdditionalRequirementsError: Additional requirements for type \"%s\" not met",
- TYPE_URI));
- }
- // Validate source value is in specified range
- if(source != null && sourceRange != null) {
- ValidateUtils.validateValueInRange(source,sourceRange,SOURCE);
- }
- else {
- DataEntity portdef = new DataEntity("PortDef", source, null, SOURCE);
- portdef.validate();
- }
- // Validate target value is in specified range
- if(target != null && targetRange != null) {
- ValidateUtils.validateValueInRange(target,targetRange,SOURCE);
- }
- else {
- DataEntity portdef = new DataEntity("PortDef", source, null, TARGET);
- portdef.validate();
- }
- }
- catch(Exception e) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: \"%s\" do not meet requirements for type \"%s\"",
- _properties.toString(),SHORTNAME));
- }
- }
-
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import InvalidTypeAdditionalRequirementsError
-from toscaparser.utils.gettextutils import _
-import org.openecomp.sdc.toscaparser.api.utils.validateutils as validateutils
-
-log = logging.getLogger('tosca')
-
-
-class PortSpec(object):
- '''Parent class for tosca.datatypes.network.PortSpec type.'''
-
- SHORTNAME = 'PortSpec'
- TYPE_URI = 'tosca.datatypes.network.' + SHORTNAME
-
- PROPERTY_NAMES = (
- PROTOCOL, SOURCE, SOURCE_RANGE,
- TARGET, TARGET_RANGE
- ) = (
- 'protocol', 'source', 'source_range',
- 'target', 'target_range'
- )
-
- # TODO(TBD) May want to make this a subclass of DataType
- # and change init method to set PortSpec's properties
- def __init__(self):
- pass
-
- # The following additional requirements MUST be tested:
- # 1) A valid PortSpec MUST have at least one of the following properties:
- # target, target_range, source or source_range.
- # 2) A valid PortSpec MUST have a value for the source property that
- # is within the numeric range specified by the property source_range
- # when source_range is specified.
- # 3) A valid PortSpec MUST have a value for the target property that is
- # within the numeric range specified by the property target_range
- # when target_range is specified.
- @staticmethod
- def validate_additional_req(properties, prop_name, custom_def=None, ):
- try:
- source = properties.get(PortSpec.SOURCE)
- source_range = properties.get(PortSpec.SOURCE_RANGE)
- target = properties.get(PortSpec.TARGET)
- target_range = properties.get(PortSpec.TARGET_RANGE)
-
- # verify one of the specified values is set
- if source is None and source_range is None and \
- target is None and target_range is None:
- ExceptionCollector.appendException(
- InvalidTypeAdditionalRequirementsError(
- type=PortSpec.TYPE_URI))
- # Validate source value is in specified range
- if source and source_range:
- validateutils.validate_value_in_range(source, source_range,
- PortSpec.SOURCE)
- else:
- from toscaparser.dataentity import DataEntity
- portdef = DataEntity('PortDef', source, None, PortSpec.SOURCE)
- portdef.validate()
- # Validate target value is in specified range
- if target and target_range:
- validateutils.validate_value_in_range(target, target_range,
- PortSpec.TARGET)
- else:
- from toscaparser.dataentity import DataEntity
- portdef = DataEntity('PortDef', source, None, PortSpec.TARGET)
- portdef.validate()
- except Exception:
- msg = _('"%(value)s" do not meet requirements '
- 'for type "%(type)s".') \
- % {'value': properties, 'type': PortSpec.SHORTNAME}
- ExceptionCollector.appendException(
- ValueError(msg))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java
deleted file mode 100644
index c139eb6..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/PropertyDef.java
+++ /dev/null
@@ -1,231 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class PropertyDef {
-
- private static final String PROPERTY_KEYNAME_DEFAULT = "default";
- private static final String PROPERTY_KEYNAME_REQUIRED = "required";
- private static final String PROPERTY_KEYNAME_STATUS = "status";
- private static final String VALID_PROPERTY_KEYNAMES[] = {
- PROPERTY_KEYNAME_DEFAULT,
- PROPERTY_KEYNAME_REQUIRED,
- PROPERTY_KEYNAME_STATUS};
-
- private static final boolean PROPERTY_REQUIRED_DEFAULT = true;
-
- private static final String VALID_REQUIRED_VALUES[] = {"true", "false"};
-
- private static final String PROPERTY_STATUS_SUPPORTED = "supported";
- private static final String PROPERTY_STATUS_EXPERIMENTAL = "experimental";
- private static final String VALID_STATUS_VALUES[] = {
- PROPERTY_STATUS_SUPPORTED, PROPERTY_STATUS_EXPERIMENTAL};
-
- private static final String PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED;
-
- private String name;
- private Object value;
- private LinkedHashMap<String,Object> schema;
- private String _status;
- private boolean _required;
-
- public PropertyDef(String pdName, Object pdValue,
- LinkedHashMap<String,Object> pdSchema) {
- name = pdName;
- value = pdValue;
- schema = pdSchema;
- _status = PROPERTY_STATUS_DEFAULT;
- _required = PROPERTY_REQUIRED_DEFAULT;
-
- if(schema != null) {
- // Validate required 'type' property exists
- if(schema.get("type") == null) {
- //msg = (_('Schema definition of "%(pname)s" must have a "type" '
- // 'attribute.') % dict(pname=self.name))
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name));
- }
- _loadRequiredAttrFromSchema();
- _loadStatusAttrFromSchema();
- }
- }
-
- public Object getDefault() {
- if(schema != null) {
- for(Map.Entry<String,Object> me: schema.entrySet()) {
- if(me.getKey().equals(PROPERTY_KEYNAME_DEFAULT)) {
- return me.getValue();
- }
- }
- }
- return null;
- }
-
- public boolean isRequired() {
- return _required;
- }
-
- private void _loadRequiredAttrFromSchema() {
- // IF 'required' keyname exists verify it's a boolean,
- // if so override default
- Object val = schema.get(PROPERTY_KEYNAME_REQUIRED);
- if(val != null) {
- if(val instanceof Boolean) {
- _required = (boolean)val;
- }
- else {
- //valid_values = ', '.join(self.VALID_REQUIRED_VALUES)
- //attr = self.PROPERTY_KEYNAME_REQUIRED
- //TOSCAException.generate_inv_schema_property_error(self,
- // attr,
- // value,
- // valid_values)
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "Schema definition of \"%s\" has \"required\" attribute with an invalid value",
- name));
- }
- }
- }
-
- public String getStatus() {
- return _status;
- }
-
- private void _loadStatusAttrFromSchema() {
- // IF 'status' keyname exists verify it's a boolean,
- // if so override default
- String sts = (String)schema.get(PROPERTY_KEYNAME_STATUS);
- if(sts != null) {
- boolean bFound = false;
- for(String vsv: VALID_STATUS_VALUES) {
- if(vsv.equals(sts)) {
- bFound = true;
- break;
- }
- }
- if(bFound) {
- _status = sts;
- }
- else {
- //valid_values = ', '.join(self.VALID_STATUS_VALUES)
- //attr = self.PROPERTY_KEYNAME_STATUS
- //TOSCAException.generate_inv_schema_property_error(self,
- // attr,
- // value,
- // valid_values)
- ThreadLocalsHolder.getCollector().appendWarning(String.format(
- "Schema definition of \"%s\" has \"status\" attribute with an invalid value",
- name));
- }
- }
- }
-
- public String getName() {
- return name;
- }
-
- public LinkedHashMap<String,Object> getSchema() {
- return schema;
- }
-
- public Object getPDValue() {
- // there's getValue in EntityType...
- return value;
- }
-
-}
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import InvalidSchemaError
-from toscaparser.common.exception import TOSCAException
-from toscaparser.utils.gettextutils import _
-
-
-class PropertyDef(object):
- '''TOSCA built-in Property type.'''
-
- VALID_PROPERTY_KEYNAMES = (PROPERTY_KEYNAME_DEFAULT,
- PROPERTY_KEYNAME_REQUIRED,
- PROPERTY_KEYNAME_STATUS) = \
- ('default', 'required', 'status')
-
- PROPERTY_REQUIRED_DEFAULT = True
-
- VALID_REQUIRED_VALUES = ['true', 'false']
- VALID_STATUS_VALUES = (PROPERTY_STATUS_SUPPORTED,
- PROPERTY_STATUS_EXPERIMENTAL) = \
- ('supported', 'experimental')
-
- PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED
-
- def __init__(self, name, value=None, schema=None):
- self.name = name
- self.value = value
- self.schema = schema
- self._status = self.PROPERTY_STATUS_DEFAULT
- self._required = self.PROPERTY_REQUIRED_DEFAULT
-
- # Validate required 'type' property exists
- try:
- self.schema['type']
- except KeyError:
- msg = (_('Schema definition of "%(pname)s" must have a "type" '
- 'attribute.') % dict(pname=self.name))
- ExceptionCollector.appendException(
- InvalidSchemaError(message=msg))
-
- if self.schema:
- self._load_required_attr_from_schema()
- self._load_status_attr_from_schema()
-
- @property
- def default(self):
- if self.schema:
- for prop_key, prop_value in self.schema.items():
- if prop_key == self.PROPERTY_KEYNAME_DEFAULT:
- return prop_value
- return None
-
- @property
- def required(self):
- return self._required
-
- def _load_required_attr_from_schema(self):
- # IF 'required' keyname exists verify it's a boolean,
- # if so override default
- if self.PROPERTY_KEYNAME_REQUIRED in self.schema:
- value = self.schema[self.PROPERTY_KEYNAME_REQUIRED]
- if isinstance(value, bool):
- self._required = value
- else:
- valid_values = ', '.join(self.VALID_REQUIRED_VALUES)
- attr = self.PROPERTY_KEYNAME_REQUIRED
- TOSCAException.generate_inv_schema_property_error(self,
- attr,
- value,
- valid_values)
-
- @property
- def status(self):
- return self._status
-
- def _load_status_attr_from_schema(self):
- # IF 'status' keyname exists verify it's a valid value,
- # if so override default
- if self.PROPERTY_KEYNAME_STATUS in self.schema:
- value = self.schema[self.PROPERTY_KEYNAME_STATUS]
- if value in self.VALID_STATUS_VALUES:
- self._status = value
- else:
- valid_values = ', '.join(self.VALID_STATUS_VALUES)
- attr = self.PROPERTY_KEYNAME_STATUS
- TOSCAException.generate_inv_schema_property_error(self,
- attr,
- value,
- valid_values)
-*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java
deleted file mode 100644
index 3903941..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/RelationshipType.java
+++ /dev/null
@@ -1,103 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.elements.EntityType;
-import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class RelationshipType extends StatefulEntityType {
-
- private static final String DERIVED_FROM = "derived_from";
- private static final String VALID_TARGET_TYPES = "valid_target_types";
- private static final String INTERFACES = "interfaces";
- private static final String ATTRIBUTES = "attributes";
- private static final String PROPERTIES = "properties";
- private static final String DESCRIPTION = "description";
- private static final String VERSION = "version";
- private static final String CREDENTIAL = "credential";
-
- private static final String SECTIONS[] = {
- DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES,
- ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, CREDENTIAL};
-
- private String capabilityName;
- private LinkedHashMap<String,Object> customDef;
-
- public RelationshipType(String _type, String _capabilityName, LinkedHashMap<String,Object> _customDef) {
- super(_type,RELATIONSHIP_PREFIX,_customDef);
- capabilityName = _capabilityName;
- customDef = _customDef;
- }
-
- public RelationshipType getParentType() {
- // Return a relationship this reletionship is derived from.'''
- String prel = derivedFrom(defs);
- if(prel != null) {
- return new RelationshipType(prel,null,customDef);
- }
- return null;
- }
-
- public Object getValidTargetTypes() {
- return entityValue(defs,"valid_target_types");
- }
-
- private void _validateKeys() {
- for(String key: defs.keySet()) {
- boolean bFound = false;
- for(int i=0; i< SECTIONS.length; i++) {
- if(key.equals(SECTIONS[i])) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Relationshiptype \"%s\" has unknown field \"%s\"",type,key));
- }
- }
- }
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.elements.statefulentitytype import StatefulEntityType
-
-
-class RelationshipType(StatefulEntityType):
- '''TOSCA built-in relationship type.'''
- SECTIONS = (DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES,
- ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION,
- CREDENTIAL) = ('derived_from', 'valid_target_types',
- 'interfaces', 'attributes', 'properties',
- 'description', 'version', 'credential')
-
- def __init__(self, type, capability_name=None, custom_def=None):
- super(RelationshipType, self).__init__(type, self.RELATIONSHIP_PREFIX,
- custom_def)
- self.capability_name = capability_name
- self.custom_def = custom_def
- self._validate_keys()
-
- @property
- def parent_type(self):
- '''Return a relationship this reletionship is derived from.'''
- prel = self.derived_from(self.defs)
- if prel:
- return RelationshipType(prel, self.custom_def)
-
- @property
- def valid_target_types(self):
- return self.entity_value(self.defs, 'valid_target_types')
-
- def _validate_keys(self):
- for key in self.defs.keys():
- if key not in self.SECTIONS:
- ExceptionCollector.appendException(
- UnknownFieldError(what='Relationshiptype "%s"' % self.type,
- field=key))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java
deleted file mode 100644
index de18cd6..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnit.java
+++ /dev/null
@@ -1,262 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.util.HashMap;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-import org.openecomp.sdc.toscaparser.api.utils.ValidateUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public abstract class ScalarUnit {
-
- private static Logger log = LoggerFactory.getLogger(ScalarUnit.class.getName());
-
- private static final String SCALAR_UNIT_SIZE = "scalar-unit.size";
- private static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency";
- private static final String SCALAR_UNIT_TIME = "scalar-unit.time";
-
- public static final String SCALAR_UNIT_TYPES[] = {
- SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME
- };
-
- private Object value;
- protected HashMap<String,Object> SCALAR_UNIT_DICT;
- protected String SCALAR_UNIT_DEFAULT;
-
- public ScalarUnit(Object _value) {
- value = _value;
- SCALAR_UNIT_DICT = new HashMap<>();
- SCALAR_UNIT_DEFAULT = "";
- }
-
-
- private String _checkUnitInScalarStandardUnits(String inputUnit) {
- // Check whether the input unit is following specified standard
-
- // If unit is not following specified standard, convert it to standard
- // unit after displaying a warning message.
-
- if(SCALAR_UNIT_DICT.get(inputUnit) != null) {
- return inputUnit;
- }
- else {
- for(String key: SCALAR_UNIT_DICT.keySet()) {
- if(key.toUpperCase().equals(inputUnit.toUpperCase())) {
- log.debug("ScalarUnit - _checkUnitInScalarStandardUnits - \n" +
- "The unit {} does not follow scalar unit standards\n" +
- "using {} instead",
- inputUnit, key);
- return key;
- }
- }
- ThreadLocalsHolder.getCollector().appendWarning(String.format(
- "'The unit \"%s\" is not valid. Valid units are \n%s",
- inputUnit,SCALAR_UNIT_DICT.keySet().toString()));
- return inputUnit;
- }
- }
-
- public Object validateScalarUnit() {
- Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)");
- Matcher matcher = pattern.matcher(value.toString());
- if(matcher.find()) {
- ValidateUtils.strToNum(matcher.group(1));
- String scalarUnit = _checkUnitInScalarStandardUnits(matcher.group(2));
- value = matcher.group(1) + " " + scalarUnit;
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: \"%s\" is not a valid scalar-unit",value.toString()));
- }
- return value;
- }
-
- public double getNumFromScalarUnit(String unit) {
- if(unit != null) {
- unit = _checkUnitInScalarStandardUnits(unit);
- }
- else {
- unit = SCALAR_UNIT_DEFAULT;
- }
- Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)");
- Matcher matcher = pattern.matcher(value.toString());
- if(matcher.find()) {
- ValidateUtils.strToNum(matcher.group(1));
- String scalarUnit = _checkUnitInScalarStandardUnits(matcher.group(2));
- value = matcher.group(1) + " " + scalarUnit;
- Object on1 = ValidateUtils.strToNum(matcher.group(1)) != null ? ValidateUtils.strToNum(matcher.group(1)) : 0;
- Object on2 = SCALAR_UNIT_DICT.get(matcher.group(2)) != null ? SCALAR_UNIT_DICT.get(matcher.group(2)) : 0;
- Object on3 = SCALAR_UNIT_DICT.get(unit) != null ? SCALAR_UNIT_DICT.get(unit) : 0;
-
- Double n1 = new Double(on1.toString());
- Double n2 = new Double(on2.toString());
- Double n3 = new Double(on3.toString());
- double converted = n1 * n2 / n3;
- if(Math.abs(converted - Math.round(converted)) < 0.0000000000001 ) {
- converted = Math.round(converted);
- }
- return converted;
- }
- return 0l; //???
- }
-
- protected static HashMap<String,String> scalarunitMapping = _getScalarunitMappings();
-
- private static HashMap<String,String> _getScalarunitMappings() {
- HashMap<String,String> map = new HashMap<>();
- map.put(SCALAR_UNIT_FREQUENCY,"ScalarUnitFrequency");
- map.put(SCALAR_UNIT_SIZE, "ScalarUnitSize");
- map.put(SCALAR_UNIT_TIME, "ScalarUnit_Time");
- return map;
- }
-
- public static ScalarUnit getScalarunitClass(String type,Object val) {
- if(type.equals(SCALAR_UNIT_SIZE)) {
- return new ScalarUnitSize(val);
- }
- else if(type.equals(SCALAR_UNIT_TIME)) {
- return new ScalarUnitTime(val);
- }
- else if(type.equals(SCALAR_UNIT_FREQUENCY)) {
- return new ScalarUnitFrequency(val);
- }
- return null;
- }
-
- public static double getScalarunitValue(String type, Object value, String unit) {
- if(type.equals(SCALAR_UNIT_SIZE)) {
- return (new ScalarUnitSize(value)).getNumFromScalarUnit(unit);
- }
- if(type.equals(SCALAR_UNIT_TIME)) {
- return (new ScalarUnitTime(value)).getNumFromScalarUnit(unit);
- }
- if(type.equals(SCALAR_UNIT_FREQUENCY)) {
- return (new ScalarUnitFrequency(value)).getNumFromScalarUnit(unit);
- }
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "TypeError: \"%s\" is not a valid scalar-unit type",type));
- return 0.0;
- }
-
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.utils.gettextutils import _
-from toscaparser.utils import validateutils
-
-log = logging.getLogger('tosca')
-
-
-class ScalarUnit(object):
- '''Parent class for scalar-unit type.'''
-
- SCALAR_UNIT_TYPES = (
- SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME
- ) = (
- 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time'
- )
-
- def __init__(self, value):
- self.value = value
-
- def _check_unit_in_scalar_standard_units(self, input_unit):
- """Check whether the input unit is following specified standard
-
- If unit is not following specified standard, convert it to standard
- unit after displaying a warning message.
- """
- if input_unit in self.SCALAR_UNIT_DICT.keys():
- return input_unit
- else:
- for key in self.SCALAR_UNIT_DICT.keys():
- if key.upper() == input_unit.upper():
- log.warning(_('The unit "%(unit)s" does not follow '
- 'scalar unit standards; using "%(key)s" '
- 'instead.') % {'unit': input_unit,
- 'key': key})
- return key
- msg = (_('The unit "%(unit)s" is not valid. Valid units are '
- '"%(valid_units)s".') %
- {'unit': input_unit,
- 'valid_units': sorted(self.SCALAR_UNIT_DICT.keys())})
- ExceptionCollector.appendException(ValueError(msg))
-
- def validate_scalar_unit(self):
- regex = re.compile('([0-9.]+)\s*(\w+)')
- try:
- result = regex.match(str(self.value)).groups()
- validateutils.str_to_num(result[0])
- scalar_unit = self._check_unit_in_scalar_standard_units(result[1])
- self.value = ' '.join([result[0], scalar_unit])
- return self.value
-
- except Exception:
- ExceptionCollector.appendException(
- ValueError(_('"%s" is not a valid scalar-unit.')
- % self.value))
-
- def get_num_from_scalar_unit(self, unit=None):
- if unit:
- unit = self._check_unit_in_scalar_standard_units(unit)
- else:
- unit = self.SCALAR_UNIT_DEFAULT
- self.validate_scalar_unit()
-
- regex = re.compile('([0-9.]+)\s*(\w+)')
- result = regex.match(str(self.value)).groups()
- converted = (float(validateutils.str_to_num(result[0]))
- * self.SCALAR_UNIT_DICT[result[1]]
- / self.SCALAR_UNIT_DICT[unit])
- if converted - int(converted) < 0.0000000000001:
- converted = int(converted)
- return converted
-
-
-class ScalarUnit_Size(ScalarUnit):
-
- SCALAR_UNIT_DEFAULT = 'B'
- SCALAR_UNIT_DICT = {'B': 1, 'kB': 1000, 'KiB': 1024, 'MB': 1000000,
- 'MiB': 1048576, 'GB': 1000000000,
- 'GiB': 1073741824, 'TB': 1000000000000,
- 'TiB': 1099511627776}
-
-
-class ScalarUnit_Time(ScalarUnit):
-
- SCALAR_UNIT_DEFAULT = 'ms'
- SCALAR_UNIT_DICT = {'d': 86400, 'h': 3600, 'm': 60, 's': 1,
- 'ms': 0.001, 'us': 0.000001, 'ns': 0.000000001}
-
-
-class ScalarUnit_Frequency(ScalarUnit):
-
- SCALAR_UNIT_DEFAULT = 'GHz'
- SCALAR_UNIT_DICT = {'Hz': 1, 'kHz': 1000,
- 'MHz': 1000000, 'GHz': 1000000000}
-
-
-scalarunit_mapping = {
- ScalarUnit.SCALAR_UNIT_FREQUENCY: ScalarUnit_Frequency,
- ScalarUnit.SCALAR_UNIT_SIZE: ScalarUnit_Size,
- ScalarUnit.SCALAR_UNIT_TIME: ScalarUnit_Time,
- }
-
-
-def get_scalarunit_class(type):
- return scalarunit_mapping.get(type)
-
-
-def get_scalarunit_value(type, value, unit=None):
- if type in ScalarUnit.SCALAR_UNIT_TYPES:
- ScalarUnit_Class = get_scalarunit_class(type)
- return (ScalarUnit_Class(value).
- get_num_from_scalar_unit(unit))
- else:
- ExceptionCollector.appendException(
- TypeError(_('"%s" is not a valid scalar-unit type.') % type))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitFrequency.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitFrequency.java
deleted file mode 100644
index 57a111e..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitFrequency.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-public class ScalarUnitFrequency extends ScalarUnit {
-
- public ScalarUnitFrequency(Object value) {
- super(value);
- SCALAR_UNIT_DEFAULT = "GHz";
- SCALAR_UNIT_DICT.put("Hz",1L);
- SCALAR_UNIT_DICT.put("kHz",1000L);
- SCALAR_UNIT_DICT.put("MHz",1000000L);
- SCALAR_UNIT_DICT.put("GHz",1000000000L);
- }
-
-}
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitSize.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitSize.java
deleted file mode 100644
index 72e7c33..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitSize.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-public class ScalarUnitSize extends ScalarUnit {
-
- public ScalarUnitSize(Object value) {
- super(value);
-
- SCALAR_UNIT_DEFAULT = "B";
- SCALAR_UNIT_DICT.put("B",1L);
- SCALAR_UNIT_DICT.put("kB",1000L);
- SCALAR_UNIT_DICT.put("kiB",1024L);
- SCALAR_UNIT_DICT.put("MB",1000000L);
- SCALAR_UNIT_DICT.put("MiB",1048576L);
- SCALAR_UNIT_DICT.put("GB",1000000000L);
- SCALAR_UNIT_DICT.put("GiB",1073741824L);
- SCALAR_UNIT_DICT.put("TB",1000000000000L);
- SCALAR_UNIT_DICT.put("TiB",1099511627776L);
- }
-}
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitTime.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitTime.java
deleted file mode 100644
index 5cde10a..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/ScalarUnitTime.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-public class ScalarUnitTime extends ScalarUnit {
-
- public ScalarUnitTime(Object value) {
- super(value);
- SCALAR_UNIT_DEFAULT = "ms";
- SCALAR_UNIT_DICT.put("d",86400L);
- SCALAR_UNIT_DICT.put("h",3600L);
- SCALAR_UNIT_DICT.put("m",60L);
- SCALAR_UNIT_DICT.put("s",1L);
- SCALAR_UNIT_DICT.put("ms",0.001);
- SCALAR_UNIT_DICT.put("us",0.000001);
- SCALAR_UNIT_DICT.put("ns",0.000000001);
- }
-
-}
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java
deleted file mode 100644
index 5ab816f..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/StatefulEntityType.java
+++ /dev/null
@@ -1,220 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import org.openecomp.sdc.toscaparser.api.UnsupportedType;
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.elements.AttributeDef;
-import org.openecomp.sdc.toscaparser.api.elements.EntityType;
-import org.openecomp.sdc.toscaparser.api.elements.PropertyDef;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-
-public class StatefulEntityType extends EntityType {
- // Class representing TOSCA states
-
- public static final String interfacesNodeLifecycleOperations[] = {
- "create", "configure", "start", "stop", "delete"};
-
- public static final String interfacesRelationshipConfigureOperations[] = {
- "post_configure_source", "post_configure_target", "add_target", "remove_target"};
-
- public StatefulEntityType() {
- // void constructor for subclasses that don't want super
- }
-
- @SuppressWarnings("unchecked")
- public StatefulEntityType(String entityType, String prefix, LinkedHashMap<String,Object> customDef) {
-
- String entireEntityType = entityType;
- if(UnsupportedType.validateType(entireEntityType)) {
- defs = null;
- }
- else {
- if(entityType.startsWith(TOSCA + ":")) {
- entityType = entityType.substring(TOSCA.length()+1);
- entireEntityType = prefix + entityType;
- }
- if(!entityType.startsWith(TOSCA)) {
- entireEntityType = prefix + entityType;
- }
- if(TOSCA_DEF.get(entireEntityType) != null) {
- defs = (LinkedHashMap<String,Object> )TOSCA_DEF.get(entireEntityType);
- entityType = entireEntityType;
- }
- else if(customDef != null && customDef.get(entityType) != null) {
- defs = (LinkedHashMap<String,Object> )customDef.get(entityType);
- }
- else{
- defs = null;
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidTypeError: \"%s\" is not a valid type",entityType));
- }
- }
- type = entityType;
- }
-
- @SuppressWarnings("unchecked")
- public ArrayList<PropertyDef> getPropertiesDefObjects() {
- // Return a list of property definition objects
- ArrayList<PropertyDef> properties = new ArrayList<PropertyDef>();
- LinkedHashMap<String,Object> props = (LinkedHashMap<String,Object>)getDefinition(PROPERTIES);
- if(props != null) {
- for(Map.Entry<String,Object> me: props.entrySet()) {
- String pdname = me.getKey();
- Object to = me.getValue();
- if(to == null || !(to instanceof LinkedHashMap)) {
- String s = to == null ? "null" : to.getClass().getSimpleName();
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "Unexpected type error: property \"%s\" has type \"%s\" (expected dict)",pdname,s));
- continue;
- }
- LinkedHashMap<String,Object> pdschema = (LinkedHashMap<String,Object>)to;
- properties.add(new PropertyDef(pdname,null,pdschema));
- }
- }
- return properties;
- }
-
- public LinkedHashMap<String,PropertyDef> getPropertiesDef() {
- LinkedHashMap<String,PropertyDef> pds = new LinkedHashMap<String,PropertyDef>();
- for(PropertyDef pd: getPropertiesDefObjects()) {
- pds.put(pd.getName(),pd);
- }
- return pds;
- }
-
- public PropertyDef getPropertyDefValue(String name) {
- // Return the property definition associated with a given name
- PropertyDef pd = null;
- LinkedHashMap<String,PropertyDef> propsDef = getPropertiesDef();
- if(propsDef != null) {
- pd = propsDef.get(name);
- }
- return pd;
- }
-
- public ArrayList<AttributeDef> getAttributesDefObjects() {
- // Return a list of attribute definition objects
- @SuppressWarnings("unchecked")
- LinkedHashMap<String,Object> attrs = (LinkedHashMap<String,Object>)getValue(ATTRIBUTES,null,true);
- ArrayList<AttributeDef> ads = new ArrayList<>();
- if(attrs != null) {
- for(Map.Entry<String,Object> me: attrs.entrySet()) {
- String attr = me.getKey();
- @SuppressWarnings("unchecked")
- LinkedHashMap<String,Object> adschema = (LinkedHashMap<String,Object>)me.getValue();
- ads.add(new AttributeDef(attr,null,adschema));
- }
- }
- return ads;
- }
-
- public LinkedHashMap<String,AttributeDef> getAttributesDef() {
- // Return a dictionary of attribute definition name-object pairs
-
- LinkedHashMap<String,AttributeDef> ads = new LinkedHashMap<>();
- for(AttributeDef ado: getAttributesDefObjects()) {
- ads.put(((AttributeDef)ado).getName(),ado);
- }
- return ads;
- }
-
- public AttributeDef getAttributeDefValue(String name) {
- // Return the attribute definition associated with a given name
- AttributeDef ad = null;
- LinkedHashMap<String,AttributeDef> attrsDef = getAttributesDef();
- if(attrsDef != null) {
- ad = attrsDef.get(name);
- }
- return ad;
- }
-
- public String getType() {
- return type;
- }
- }
-
-/*python
-
-from toscaparser.common.exception import InvalidTypeError
-from toscaparser.elements.attribute_definition import AttributeDef
-from toscaparser.elements.entity_type import EntityType
-from toscaparser.elements.property_definition import PropertyDef
-from toscaparser.unsupportedtype import UnsupportedType
-
-
-class StatefulEntityType(EntityType):
- '''Class representing TOSCA states.'''
-
- interfaces_node_lifecycle_operations = ['create',
- 'configure', 'start',
- 'stop', 'delete']
-
- interfaces_relationship_configure_operations = ['post_configure_source',
- 'post_configure_target',
- 'add_target',
- 'remove_target']
-
- def __init__(self, entitytype, prefix, custom_def=None):
- entire_entitytype = entitytype
- if UnsupportedType.validate_type(entire_entitytype):
- self.defs = None
- else:
- if entitytype.startswith(self.TOSCA + ":"):
- entitytype = entitytype[(len(self.TOSCA) + 1):]
- entire_entitytype = prefix + entitytype
- if not entitytype.startswith(self.TOSCA):
- entire_entitytype = prefix + entitytype
- if entire_entitytype in list(self.TOSCA_DEF.keys()):
- self.defs = self.TOSCA_DEF[entire_entitytype]
- entitytype = entire_entitytype
- elif custom_def and entitytype in list(custom_def.keys()):
- self.defs = custom_def[entitytype]
- else:
- self.defs = None
- ExceptionCollector.appendException(
- InvalidTypeError(what=entitytype))
- self.type = entitytype
-
- def get_properties_def_objects(self):
- '''Return a list of property definition objects.'''
- properties = []
- props = self.get_definition(self.PROPERTIES)
- if props:
- for prop, schema in props.items():
- properties.append(PropertyDef(prop, None, schema))
- return properties
-
- def get_properties_def(self):
- '''Return a dictionary of property definition name-object pairs.'''
- return {prop.name: prop
- for prop in self.get_properties_def_objects()}
-
- def get_property_def_value(self, name):
- '''Return the property definition associated with a given name.'''
- props_def = self.get_properties_def()
- if props_def and name in props_def.keys():
- return props_def[name].value
-
- def get_attributes_def_objects(self):
- '''Return a list of attribute definition objects.'''
- attrs = self.get_value(self.ATTRIBUTES, parent=True)
- if attrs:
- return [AttributeDef(attr, None, schema)
- for attr, schema in attrs.items()]
- return []
-
- def get_attributes_def(self):
- '''Return a dictionary of attribute definition name-object pairs.'''
- return {attr.name: attr
- for attr in self.get_attributes_def_objects()}
-
- def get_attribute_def_value(self, name):
- '''Return the attribute definition associated with a given name.'''
- attrs_def = self.get_attributes_def()
- if attrs_def and name in attrs_def.keys():
- return attrs_def[name].value
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java
deleted file mode 100644
index 2caf5c4..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/TypeValidation.java
+++ /dev/null
@@ -1,151 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.extensions.ExtTools;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class TypeValidation {
-
- private static final String DEFINITION_VERSION = "tosca_definitions_version";
- private static final String DESCRIPTION = "description";
- private static final String IMPORTS = "imports";
- private static final String DSL_DEFINITIONS = "dsl_definitions";
- private static final String NODE_TYPES = "node_types";
- private static final String REPOSITORIES = "repositories";
- private static final String DATA_TYPES = "data_types";
- private static final String ARTIFACT_TYPES = "artifact_types";
- private static final String GROUP_TYPES = "group_types";
- private static final String RELATIONSHIP_TYPES = "relationship_types";
- private static final String CAPABILITY_TYPES = "capability_types";
- private static final String INTERFACE_TYPES = "interface_types";
- private static final String POLICY_TYPES = "policy_types";
- private static final String TOPOLOGY_TEMPLATE = "topology_template";
- //Pavel
- private static final String METADATA = "metadata";
-
- private String ALLOWED_TYPE_SECTIONS[] = {
- DEFINITION_VERSION, DESCRIPTION, IMPORTS,
- DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES,
- DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES,
- RELATIONSHIP_TYPES, CAPABILITY_TYPES,
- INTERFACE_TYPES, POLICY_TYPES,
- TOPOLOGY_TEMPLATE, METADATA
- };
-
- private static ArrayList<String> VALID_TEMPLATE_VERSIONS = _getVTV();
-
- private static ArrayList<String> _getVTV() {
- ArrayList<String> vtv = new ArrayList<>();
- vtv.add("tosca_simple_yaml_1_0");
- ExtTools exttools = new ExtTools();
- vtv.addAll(exttools.getVersions());
- return vtv;
- }
-
- //private LinkedHashMap<String,Object> customTypes;
- private Object importDef;
- //private String version;
-
- public TypeValidation(LinkedHashMap<String,Object> _customTypes,
- Object _importDef) {
- importDef = _importDef;
- _validateTypeKeys(_customTypes);
- }
-
- private void _validateTypeKeys(LinkedHashMap<String,Object> customTypes) {
-
- String sVersion = (String)customTypes.get(DEFINITION_VERSION);
- if(sVersion != null) {
- _validateTypeVersion(sVersion);
- //version = sVersion;
- }
- for(String name: customTypes.keySet()) {
- boolean bFound = false;
- for(String ats: ALLOWED_TYPE_SECTIONS) {
- if(name.equals(ats)) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Template \"%s\" contains unknown field \"%s\"",
- importDef.toString(),name));
- }
- }
- }
-
- private void _validateTypeVersion(String sVersion) {
- boolean bFound = false;
- String allowed = "";
- for(String atv: VALID_TEMPLATE_VERSIONS) {
- allowed += "\"" + atv + "\" ";
- if(sVersion.equals(atv)) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidTemplateVersion: version \"%s\" in \"%s\" is not supported\n" +
- "Allowed versions: [%s]",
- sVersion,importDef.toString(),allowed));
- }
- }
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import InvalidTemplateVersion
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.extensions.exttools import ExtTools
-
-
-class TypeValidation(object):
-
- ALLOWED_TYPE_SECTIONS = (DEFINITION_VERSION, DESCRIPTION, IMPORTS,
- DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES,
- DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES,
- RELATIONSHIP_TYPES, CAPABILITY_TYPES,
- INTERFACE_TYPES, POLICY_TYPES,
- TOPOLOGY_TEMPLATE) = \
- ('tosca_definitions_version', 'description', 'imports',
- 'dsl_definitions', 'node_types', 'repositories',
- 'data_types', 'artifact_types', 'group_types',
- 'relationship_types', 'capability_types',
- 'interface_types', 'policy_types', 'topology_template')
- VALID_TEMPLATE_VERSIONS = ['tosca_simple_yaml_1_0']
- exttools = ExtTools()
- VALID_TEMPLATE_VERSIONS.extend(exttools.get_versions())
-
- def __init__(self, custom_types, import_def):
- self.import_def = import_def
- self._validate_type_keys(custom_types)
-
- def _validate_type_keys(self, custom_type):
- version = custom_type[self.DEFINITION_VERSION] \
- if self.DEFINITION_VERSION in custom_type \
- else None
- if version:
- self._validate_type_version(version)
- self.version = version
-
- for name in custom_type:
- if name not in self.ALLOWED_TYPE_SECTIONS:
- ExceptionCollector.appendException(
-# UnknownFieldError(what='Template ' + (self.import_def),
- UnknownFieldError(what= (self.import_def),
- field=name))
-
- def _validate_type_version(self, version):
- if version not in self.VALID_TEMPLATE_VERSIONS:
- ExceptionCollector.appendException(
- InvalidTemplateVersion(
-# what=version + ' in ' + self.import_def,
- what=self.import_def,
- valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS)))
-*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java
deleted file mode 100644
index 3c60a66..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Constraint.java
+++ /dev/null
@@ -1,237 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements.constraints;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.elements.ScalarUnit;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public abstract class Constraint {
-
- // Parent class for constraints for a Property or Input
-
- protected static final String EQUAL = "equal";
- protected static final String GREATER_THAN = "greater_than";
- protected static final String GREATER_OR_EQUAL = "greater_or_equal";
- protected static final String LESS_THAN = "less_than";
- protected static final String LESS_OR_EQUAL = "less_or_equal";
- protected static final String IN_RANGE = "in_range";
- protected static final String VALID_VALUES = "valid_values";
- protected static final String LENGTH = "length";
- protected static final String MIN_LENGTH = "min_length";
- protected static final String MAX_LENGTH = "max_length";
- protected static final String PATTERN = "pattern";
-
- protected static final String CONSTRAINTS[] = {
- EQUAL, GREATER_THAN,GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL,
- IN_RANGE, VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN};
-
- @SuppressWarnings("unchecked")
- public static Constraint factory(String constraintClass,String propname,String proptype,Object constraint) {
-
- // a factory for the different Constraint classes
- // replaces Python's __new__() usage
-
- if(!(constraint instanceof LinkedHashMap) ||
- ((LinkedHashMap<String,Object>)constraint).size() != 1) {
- ThreadLocalsHolder.getCollector().appendException(
- "InvalidSchemaError: Invalid constraint schema " + constraint.toString());
- }
-
- if(constraintClass.equals(EQUAL)) {
- return new Equal(propname,proptype,constraint);
- }
- else if(constraintClass.equals(GREATER_THAN)) {
- return new GreaterThan(propname,proptype,constraint);
- }
- else if(constraintClass.equals(GREATER_OR_EQUAL)) {
- return new GreaterOrEqual(propname,proptype,constraint);
- }
- else if(constraintClass.equals(LESS_THAN)) {
- return new LessThan(propname,proptype,constraint);
- }
- else if(constraintClass.equals(LESS_OR_EQUAL)) {
- return new LessOrEqual(propname,proptype,constraint);
- }
- else if(constraintClass.equals(IN_RANGE)) {
- return new InRange(propname,proptype,constraint);
- }
- else if(constraintClass.equals(VALID_VALUES)) {
- return new ValidValues(propname,proptype,constraint);
- }
- else if(constraintClass.equals(LENGTH)) {
- return new Length(propname,proptype,constraint);
- }
- else if(constraintClass.equals(MIN_LENGTH)) {
- return new MinLength(propname,proptype,constraint);
- }
- else if(constraintClass.equals(MAX_LENGTH)) {
- return new MaxLength(propname,proptype,constraint);
- }
- else if(constraintClass.equals(PATTERN)) {
- return new Pattern(propname,proptype,constraint);
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidSchemaError: Invalid property \"%s\"",constraintClass));
- return null;
- }
- }
-
- protected String constraintKey = "TBD";
- protected ArrayList<String> validTypes = new ArrayList<>();
- protected ArrayList<String> validPropTypes = new ArrayList<>();
-
- protected String propertyName;
- protected String propertyType;
- protected Object constraintValue;
- protected Object constraintValueMsg;
- protected Object valueMsg;
-
- @SuppressWarnings("unchecked")
- public Constraint(String propname,String proptype,Object constraint) {
-
- _setValues();
-
- propertyName = propname;
- propertyType = proptype;
- constraintValue = ((LinkedHashMap<String,Object>)constraint).get(constraintKey);
- constraintValueMsg = constraintValue;
- boolean bFound = false;
- for(String s: ScalarUnit.SCALAR_UNIT_TYPES) {
- if(s.equals(propertyType)) {
- bFound = true;
- break;
- }
- }
- if(bFound) {
- constraintValue = _getScalarUnitConstraintValue();
- }
- // check if constraint is valid for property type
- bFound = false;
- for(String s: validPropTypes) {
- if(s.equals(propertyType)) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidSchemaError: Property \"%s\" is not valid for data type \"%s\"",
- constraintKey,propertyType));
- }
- }
-
- @SuppressWarnings("unchecked")
- private Object _getScalarUnitConstraintValue() {
- // code differs from Python because of class creation
- if(constraintValue instanceof ArrayList) {
- ArrayList<Object> ret = new ArrayList<>();
- for(Object v: (ArrayList<Object>)constraintValue) {
- ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType,v);
- ret.add(su.getNumFromScalarUnit(null));
- }
- return ret;
- }
- else {
- ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType,constraintValue);
- return su.getNumFromScalarUnit(null);
- }
- }
-
- public void validate(Object value) {
- valueMsg = value;
- boolean bFound = false;
- for(String s: ScalarUnit.SCALAR_UNIT_TYPES) {
- if(s.equals(propertyType)) {
- bFound = true;
- break;
- }
- }
- if(bFound) {
- value = ScalarUnit.getScalarunitValue(propertyType,value,null);
- }
- if(!_isValid(value)) {
- ThreadLocalsHolder.getCollector().appendWarning("ValidationError: " + _errMsg(value));
- }
- }
-
- protected abstract boolean _isValid(Object value);
-
- protected abstract void _setValues();
-
- protected abstract String _errMsg(Object value);
-
-}
-
-/*python
-
-class Constraint(object):
- '''Parent class for constraints for a Property or Input.'''
-
- CONSTRAINTS = (EQUAL, GREATER_THAN,
- GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, IN_RANGE,
- VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN) = \
- ('equal', 'greater_than', 'greater_or_equal', 'less_than',
- 'less_or_equal', 'in_range', 'valid_values', 'length',
- 'min_length', 'max_length', 'pattern')
-
- def __new__(cls, property_name, property_type, constraint):
- if cls is not Constraint:
- return super(Constraint, cls).__new__(cls)
-
- if(not isinstance(constraint, collections.Mapping) or
- len(constraint) != 1):
- ExceptionCollector.appendException(
- InvalidSchemaError(message=_('Invalid constraint schema.')))
-
- for type in constraint.keys():
- ConstraintClass = get_constraint_class(type)
- if not ConstraintClass:
- msg = _('Invalid property "%s".') % type
- ExceptionCollector.appendException(
- InvalidSchemaError(message=msg))
-
- return ConstraintClass(property_name, property_type, constraint)
-
- def __init__(self, property_name, property_type, constraint):
- self.property_name = property_name
- self.property_type = property_type
- self.constraint_value = constraint[self.constraint_key]
- self.constraint_value_msg = self.constraint_value
- if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES:
- self.constraint_value = self._get_scalarunit_constraint_value()
- # check if constraint is valid for property type
- if property_type not in self.valid_prop_types:
- msg = _('Property "%(ctype)s" is not valid for data type '
- '"%(dtype)s".') % dict(
- ctype=self.constraint_key,
- dtype=property_type)
- ExceptionCollector.appendException(InvalidSchemaError(message=msg))
-
- def _get_scalarunit_constraint_value(self):
- if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES:
- ScalarUnit_Class = (scalarunit.
- get_scalarunit_class(self.property_type))
- if isinstance(self.constraint_value, list):
- return [ScalarUnit_Class(v).get_num_from_scalar_unit()
- for v in self.constraint_value]
- else:
- return (ScalarUnit_Class(self.constraint_value).
- get_num_from_scalar_unit())
-
- def _err_msg(self, value):
- return _('Property "%s" could not be validated.') % self.property_name
-
- def validate(self, value):
- self.value_msg = value
- if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES:
- value = scalarunit.get_scalarunit_value(self.property_type, value)
- if not self._is_valid(value):
- err_msg = self._err_msg(value)
- ExceptionCollector.appendException(
- ValidationError(message=err_msg))
-
-
-*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Equal.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Equal.java
deleted file mode 100644
index e16cac3..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Equal.java
+++ /dev/null
@@ -1,61 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements.constraints;
-
-public class Equal extends Constraint {
-
- protected void _setValues() {
-
- constraintKey = EQUAL;
-
- for(String s: Schema.PROPERTY_TYPES) {
- validPropTypes.add(s);
- }
-
- }
-
- public Equal(String name,String type,Object c) {
- super(name,type,c);
-
- }
-
- protected boolean _isValid(Object val) {
- // equality of objects is tricky so we're comparing
- // the toString() representation
- if(val.toString().equals(constraintValue.toString())) {
- return true;
- }
- return false;
- }
-
- protected String _errMsg(Object value) {
- return String.format("The value \"%s\" of property \"%s\" is not equal to \"%s\"",
- valueMsg,propertyName,constraintValueMsg);
- }
-
-}
-
-/*python
-
-class Equal(Constraint):
-"""Constraint class for "equal"
-
-Constrains a property or parameter to a value equal to ('=')
-the value declared.
-"""
-
-constraint_key = Constraint.EQUAL
-
-valid_prop_types = Schema.PROPERTY_TYPES
-
-def _is_valid(self, value):
- if value == self.constraint_value:
- return True
-
- return False
-
-def _err_msg(self, value):
- return (_('The value "%(pvalue)s" of property "%(pname)s" is not '
- 'equal to "%(cvalue)s".') %
- dict(pname=self.property_name,
- pvalue=self.value_msg,
- cvalue=self.constraint_value_msg))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java
deleted file mode 100644
index 021bed3..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterOrEqual.java
+++ /dev/null
@@ -1,113 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements.constraints;
-
-import java.util.Date;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.functions.Function;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class GreaterOrEqual extends Constraint {
- // Constraint class for "greater_or_equal"
-
- // Constrains a property or parameter to a value greater than or equal
- // to ('>=') the value declared.
-
- protected void _setValues() {
-
- constraintKey = GREATER_OR_EQUAL;
-
- validTypes.add("Integer");
- validTypes.add("Double");
- validTypes.add("Float");
- // timestamps are loaded as Date objects
- validTypes.add("Date");
- //validTypes.add("datetime.date");
- //validTypes.add("datetime.time");
- //validTypes.add("datetime.datetime");
-
- validPropTypes.add(Schema.INTEGER);
- validPropTypes.add(Schema.FLOAT);
- validPropTypes.add(Schema.TIMESTAMP);
- validPropTypes.add(Schema.SCALAR_UNIT_SIZE);
- validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY);
- validPropTypes.add(Schema.SCALAR_UNIT_TIME);
-
- }
-
- public GreaterOrEqual(String name,String type,Object c) {
- super(name,type,c);
-
- if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
- ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"greater_or_equal\" expects comparable values");
- }
- }
-
-
-
- @Override
- protected boolean _isValid(Object value) {
- if(Function.isFunction(value)) {
- return true;
- }
-
- // timestamps
- if(value instanceof Date) {
- if(constraintValue instanceof Date) {
- return !((Date)value).before((Date)constraintValue);
- }
- return false;
- }
- // all others
- Double n1 = new Double(value.toString());
- Double n2 = new Double(constraintValue.toString());
- return n1 >= n2;
- }
-
- protected String _errMsg(Object value) {
- return String.format("The value \"%s\" of property \"%s\" must be greater or equal to \"%s\"",
- valueMsg,propertyName,constraintValueMsg);
- }
-}
-
-/*python
-
-class GreaterOrEqual(Constraint):
-"""Constraint class for "greater_or_equal"
-
-Constrains a property or parameter to a value greater than or equal
-to ('>=') the value declared.
-"""
-
-constraint_key = Constraint.GREATER_OR_EQUAL
-
-valid_types = (int, float, datetime.date,
- datetime.time, datetime.datetime)
-
-valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP,
- Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY,
- Schema.SCALAR_UNIT_TIME)
-
-def __init__(self, property_name, property_type, constraint):
- super(GreaterOrEqual, self).__init__(property_name, property_type,
- constraint)
- if not isinstance(self.constraint_value, self.valid_types):
- ThreadLocalsHolder.getCollector().appendException(
- InvalidSchemaError(message=_('The property '
- '"greater_or_equal" expects '
- 'comparable values.')))
-
-def _is_valid(self, value):
- if toscaparser.functions.is_function(value) or \
- value >= self.constraint_value:
- return True
- return False
-
-def _err_msg(self, value):
- return (_('The value "%(pvalue)s" of property "%(pname)s" must be '
- 'greater than or equal to "%(cvalue)s".') %
- dict(pname=self.property_name,
- pvalue=self.value_msg,
- cvalue=self.constraint_value_msg))
-
-
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java
deleted file mode 100644
index d23d7ce..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/GreaterThan.java
+++ /dev/null
@@ -1,102 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements.constraints;
-
-import java.util.Date;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class GreaterThan extends Constraint {
-
- @Override
- protected void _setValues() {
-
- constraintKey = GREATER_THAN;
-
- validTypes.add("Integer");
- validTypes.add("Double");
- validTypes.add("Float");
- // timestamps are loaded as Date objects
- validTypes.add("Date");
- //validTypes.add("datetime.date");
- //validTypes.add("datetime.time");
- //validTypes.add("datetime.datetime");
-
-
- validPropTypes.add(Schema.INTEGER);
- validPropTypes.add(Schema.FLOAT);
- validPropTypes.add(Schema.TIMESTAMP);
- validPropTypes.add(Schema.SCALAR_UNIT_SIZE);
- validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY);
- validPropTypes.add(Schema.SCALAR_UNIT_TIME);
-
- }
-
- public GreaterThan(String name,String type,Object c) {
- super(name,type,c);
-
- if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
- ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"greater_than\" expects comparable values");
- }
- }
-
- @Override
- protected boolean _isValid(Object value) {
-
- // timestamps
- if(value instanceof Date) {
- if(constraintValue instanceof Date) {
- return ((Date)value).after((Date)constraintValue);
- }
- return false;
- }
-
- Double n1 = new Double(value.toString());
- Double n2 = new Double(constraintValue.toString());
- return n1 > n2;
- }
-
- protected String _errMsg(Object value) {
- return String.format("The value \"%s\" of property \"%s\" must be greater than \"%s\"",
- valueMsg,propertyName,constraintValueMsg);
- }
-
-}
-
-/*
-class GreaterThan(Constraint):
- """Constraint class for "greater_than"
-
- Constrains a property or parameter to a value greater than ('>')
- the value declared.
- """
-
- constraint_key = Constraint.GREATER_THAN
-
- valid_types = (int, float, datetime.date,
- datetime.time, datetime.datetime)
-
- valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP,
- Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY,
- Schema.SCALAR_UNIT_TIME)
-
- def __init__(self, property_name, property_type, constraint):
- super(GreaterThan, self).__init__(property_name, property_type,
- constraint)
- if not isinstance(constraint[self.GREATER_THAN], self.valid_types):
- ExceptionCollector.appendException(
- InvalidSchemaError(message=_('The property "greater_than" '
- 'expects comparable values.')))
-
- def _is_valid(self, value):
- if value > self.constraint_value:
- return True
-
- return False
-
- def _err_msg(self, value):
- return (_('The value "%(pvalue)s" of property "%(pname)s" must be '
- 'greater than "%(cvalue)s".') %
- dict(pname=self.property_name,
- pvalue=self.value_msg,
- cvalue=self.constraint_value_msg))
-*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java
deleted file mode 100644
index 282267d..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/InRange.java
+++ /dev/null
@@ -1,171 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements.constraints;
-
-import java.util.Date;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-import java.util.ArrayList;
-
-public class InRange extends Constraint {
- // Constraint class for "in_range"
-
- //Constrains a property or parameter to a value in range of (inclusive)
- //the two values declared.
-
- private static final String UNBOUNDED = "UNBOUNDED";
-
- private Object min,max;
-
- protected void _setValues() {
-
- constraintKey = IN_RANGE;
-
- validTypes.add("Integer");
- validTypes.add("Double");
- validTypes.add("Float");
- validTypes.add("String");
- // timestamps are loaded as Date objects
- validTypes.add("Date");
- //validTypes.add("datetime.date");
- //validTypes.add("datetime.time");
- //validTypes.add("datetime.datetime");
-
- validPropTypes.add(Schema.INTEGER);
- validPropTypes.add(Schema.FLOAT);
- validPropTypes.add(Schema.TIMESTAMP);
- validPropTypes.add(Schema.SCALAR_UNIT_SIZE);
- validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY);
- validPropTypes.add(Schema.SCALAR_UNIT_TIME);
- validPropTypes.add(Schema.RANGE);
-
- }
-
- @SuppressWarnings("unchecked")
- public InRange(String name,String type,Object c) {
- super(name,type,c);
-
- if(!(constraintValue instanceof ArrayList) || ((ArrayList<Object>)constraintValue).size() != 2) {
- ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"in_range\" expects a list");
-
- }
-
- ArrayList<Object> alcv = (ArrayList<Object>)constraintValue;
- String msg = "The property \"in_range\" expects comparable values";
- for(Object vo: alcv) {
- if(!validTypes.contains(vo.getClass().getSimpleName())) {
- ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: " + msg);
- }
- // The only string we allow for range is the special value 'UNBOUNDED'
- if((vo instanceof String) && !((String)vo).equals(UNBOUNDED)) {
- ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: " + msg);
- }
- }
- min = alcv.get(0);
- max = alcv.get(1);
-
- }
-
- @Override
- protected boolean _isValid(Object value) {
-
- // timestamps
- if(value instanceof Date) {
- if(min instanceof Date && max instanceof Date) {
- return !((Date)value).before((Date)min) &&
- !((Date)value).after((Date)max);
- }
- return false;
- }
-
- Double dvalue = new Double(value.toString());
- if(!(min instanceof String)) {
- if(dvalue < new Double(min.toString())) {
- return false;
- }
- }
- else if(!((String)min).equals(UNBOUNDED)) {
- return false;
- }
- if(!(max instanceof String)) {
- if(dvalue > new Double(max.toString())) {
- return false;
- }
- }
- else if(!((String)max).equals(UNBOUNDED)) {
- return false;
- }
- return true;
- }
-
- @Override
- protected String _errMsg(Object value) {
- return String.format("The value \"%s\" of property \"%s\" is out of range \"(min:%s, max:%s)\"",
- valueMsg,propertyName,min.toString(),max.toString());
- }
-
-}
-
-/*python
-
-class InRange(Constraint):
- """Constraint class for "in_range"
-
- Constrains a property or parameter to a value in range of (inclusive)
- the two values declared.
- """
- UNBOUNDED = 'UNBOUNDED'
-
- constraint_key = Constraint.IN_RANGE
-
- valid_types = (int, float, datetime.date,
- datetime.time, datetime.datetime, str)
-
- valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP,
- Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY,
- Schema.SCALAR_UNIT_TIME, Schema.RANGE)
-
- def __init__(self, property_name, property_type, constraint):
- super(InRange, self).__init__(property_name, property_type, constraint)
- if(not isinstance(self.constraint_value, collections.Sequence) or
- (len(constraint[self.IN_RANGE]) != 2)):
- ExceptionCollector.appendException(
- InvalidSchemaError(message=_('The property "in_range" '
- 'expects a list.')))
-
- msg = _('The property "in_range" expects comparable values.')
- for value in self.constraint_value:
- if not isinstance(value, self.valid_types):
- ExceptionCollector.appendException(
- InvalidSchemaError(message=msg))
- # The only string we allow for range is the special value
- # 'UNBOUNDED'
- if(isinstance(value, str) and value != self.UNBOUNDED):
- ExceptionCollector.appendException(
- InvalidSchemaError(message=msg))
-
- self.min = self.constraint_value[0]
- self.max = self.constraint_value[1]
-
- def _is_valid(self, value):
- if not isinstance(self.min, str):
- if value < self.min:
- return False
- elif self.min != self.UNBOUNDED:
- return False
- if not isinstance(self.max, str):
- if value > self.max:
- return False
- elif self.max != self.UNBOUNDED:
- return False
- return True
-
- def _err_msg(self, value):
- return (_('The value "%(pvalue)s" of property "%(pname)s" is out of '
- 'range "(min:%(vmin)s, max:%(vmax)s)".') %
- dict(pname=self.property_name,
- pvalue=self.value_msg,
- vmin=self.constraint_value_msg[0],
- vmax=self.constraint_value_msg[1]))
-
-*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java
deleted file mode 100644
index 4cfd1c0..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Length.java
+++ /dev/null
@@ -1,79 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements.constraints;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class Length extends Constraint {
- // Constraint class for "length"
-
- // Constrains the property or parameter to a value of a given length.
-
- @Override
- protected void _setValues() {
-
- constraintKey = LENGTH;
-
- validTypes.add("Integer");
-
- validPropTypes.add(Schema.STRING);
-
- }
-
- public Length(String name,String type,Object c) {
- super(name,type,c);
-
- if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
- ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"length\" expects an integer");
- }
- }
-
- @Override
- protected boolean _isValid(Object value) {
- if(value instanceof String && constraintValue instanceof Integer &&
- ((String)value).length() == (Integer)constraintValue) {
- return true;
- }
- return false;
- }
-
- @Override
- protected String _errMsg(Object value) {
- return String.format("Length of value \"%s\" of property \"%s\" must be equal to \"%s\"",
- value.toString(),propertyName,constraintValue.toString());
- }
-
-}
-
-/*python
- class Length(Constraint):
- """Constraint class for "length"
-
- Constrains the property or parameter to a value of a given length.
- """
-
- constraint_key = Constraint.LENGTH
-
- valid_types = (int, )
-
- valid_prop_types = (Schema.STRING, )
-
- def __init__(self, property_name, property_type, constraint):
- super(Length, self).__init__(property_name, property_type, constraint)
- if not isinstance(self.constraint_value, self.valid_types):
- ExceptionCollector.appendException(
- InvalidSchemaError(message=_('The property "length" expects '
- 'an integer.')))
-
- def _is_valid(self, value):
- if isinstance(value, str) and len(value) == self.constraint_value:
- return True
-
- return False
-
- def _err_msg(self, value):
- return (_('Length of value "%(pvalue)s" of property "%(pname)s" '
- 'must be equal to "%(cvalue)s".') %
- dict(pname=self.property_name,
- pvalue=value,
- cvalue=self.constraint_value))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java
deleted file mode 100644
index 00cba36..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessOrEqual.java
+++ /dev/null
@@ -1,106 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements.constraints;
-
-import java.util.Date;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class LessOrEqual extends Constraint {
- // Constraint class for "less_or_equal"
-
- // Constrains a property or parameter to a value less than or equal
- // to ('<=') the value declared.
-
- protected void _setValues() {
-
- constraintKey = LESS_OR_EQUAL;
-
- validTypes.add("Integer");
- validTypes.add("Double");
- validTypes.add("Float");
- // timestamps are loaded as Date objects
- validTypes.add("Date");
- //validTypes.add("datetime.date");
- //validTypes.add("datetime.time");
- //validTypes.add("datetime.datetime");
-
- validPropTypes.add(Schema.INTEGER);
- validPropTypes.add(Schema.FLOAT);
- validPropTypes.add(Schema.TIMESTAMP);
- validPropTypes.add(Schema.SCALAR_UNIT_SIZE);
- validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY);
- validPropTypes.add(Schema.SCALAR_UNIT_TIME);
-
- }
-
- public LessOrEqual(String name,String type,Object c) {
- super(name,type,c);
-
- if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
- ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"less_or_equal\" expects comparable values");
- }
- }
-
- @Override
- protected boolean _isValid(Object value) {
-
- // timestamps
- if(value instanceof Date) {
- if(constraintValue instanceof Date) {
- return !((Date)value).after((Date)constraintValue);
- }
- return false;
- }
-
- Double n1 = new Double(value.toString());
- Double n2 = new Double(constraintValue.toString());
- return n1 <= n2;
- }
-
- @Override
- protected String _errMsg(Object value) {
- return String.format("The value \"%s\" of property \"%s\" must be less or equal to \"%s\"",
- valueMsg,propertyName,constraintValueMsg);
- }
-
-}
-
-/*python
-
-class LessOrEqual(Constraint):
- """Constraint class for "less_or_equal"
-
- Constrains a property or parameter to a value less than or equal
- to ('<=') the value declared.
- """
-
- constraint_key = Constraint.LESS_OR_EQUAL
-
- valid_types = (int, float, datetime.date,
- datetime.time, datetime.datetime)
-
- valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP,
- Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY,
- Schema.SCALAR_UNIT_TIME)
-
- def __init__(self, property_name, property_type, constraint):
- super(LessOrEqual, self).__init__(property_name, property_type,
- constraint)
- if not isinstance(self.constraint_value, self.valid_types):
- ExceptionCollector.appendException(
- InvalidSchemaError(message=_('The property "less_or_equal" '
- 'expects comparable values.')))
-
- def _is_valid(self, value):
- if value <= self.constraint_value:
- return True
-
- return False
-
- def _err_msg(self, value):
- return (_('The value "%(pvalue)s" of property "%(pname)s" must be '
- 'less than or equal to "%(cvalue)s".') %
- dict(pname=self.property_name,
- pvalue=self.value_msg,
- cvalue=self.constraint_value_msg))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java
deleted file mode 100644
index eb5a41d..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/LessThan.java
+++ /dev/null
@@ -1,104 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements.constraints;
-
-import java.util.Date;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class LessThan extends Constraint {
-
- @Override
- protected void _setValues() {
-
- constraintKey = LESS_THAN;
-
- validTypes.add("Integer");
- validTypes.add("Double");
- validTypes.add("Float");
- // timestamps are loaded as Date objects
- validTypes.add("Date");
- //validTypes.add("datetime.date");
- //validTypes.add("datetime.time");
- //validTypes.add("datetime.datetime");
-
-
- validPropTypes.add(Schema.INTEGER);
- validPropTypes.add(Schema.FLOAT);
- validPropTypes.add(Schema.TIMESTAMP);
- validPropTypes.add(Schema.SCALAR_UNIT_SIZE);
- validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY);
- validPropTypes.add(Schema.SCALAR_UNIT_TIME);
-
- }
-
- public LessThan(String name,String type,Object c) {
- super(name,type,c);
-
- if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
- ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"less_than\" expects comparable values");
- }
- }
-
- @Override
- protected boolean _isValid(Object value) {
-
- // timestamps
- if(value instanceof Date) {
- if(constraintValue instanceof Date) {
- return ((Date)value).before((Date)constraintValue);
- }
- return false;
- }
-
- Double n1 = new Double(value.toString());
- Double n2 = new Double(constraintValue.toString());
- return n1 < n2;
- }
-
- @Override
- protected String _errMsg(Object value) {
- return String.format("The value \"%s\" of property \"%s\" must be less than \"%s\"",
- valueMsg,propertyName,constraintValueMsg);
- }
-
-}
-
-/*python
-
-class LessThan(Constraint):
-"""Constraint class for "less_than"
-
-Constrains a property or parameter to a value less than ('<')
-the value declared.
-"""
-
-constraint_key = Constraint.LESS_THAN
-
-valid_types = (int, float, datetime.date,
- datetime.time, datetime.datetime)
-
-valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP,
- Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY,
- Schema.SCALAR_UNIT_TIME)
-
-def __init__(self, property_name, property_type, constraint):
- super(LessThan, self).__init__(property_name, property_type,
- constraint)
- if not isinstance(self.constraint_value, self.valid_types):
- ExceptionCollector.appendException(
- InvalidSchemaError(message=_('The property "less_than" '
- 'expects comparable values.')))
-
-def _is_valid(self, value):
- if value < self.constraint_value:
- return True
-
- return False
-
-def _err_msg(self, value):
- return (_('The value "%(pvalue)s" of property "%(pname)s" must be '
- 'less than "%(cvalue)s".') %
- dict(pname=self.property_name,
- pvalue=self.value_msg,
- cvalue=self.constraint_value_msg))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java
deleted file mode 100644
index 278ae85..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MaxLength.java
+++ /dev/null
@@ -1,90 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements.constraints;
-
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class MaxLength extends Constraint {
- // Constraint class for "min_length"
-
- // Constrains the property or parameter to a value of a maximum length.
-
- @Override
- protected void _setValues() {
-
- constraintKey = MAX_LENGTH;
-
- validTypes.add("Integer");
-
- validPropTypes.add(Schema.STRING);
- validPropTypes.add(Schema.MAP);
-
- }
-
- public MaxLength(String name,String type,Object c) {
- super(name,type,c);
-
- if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
- ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"max_length\" expects an integer");
- }
- }
-
- @SuppressWarnings("unchecked")
- @Override
- protected boolean _isValid(Object value) {
- if(value instanceof String && constraintValue instanceof Integer &&
- ((String)value).length() <= (Integer)constraintValue) {
- return true;
- }
- else if(value instanceof LinkedHashMap && constraintValue instanceof Integer &&
- ((LinkedHashMap<String,Object>)value).size() <= (Integer)constraintValue) {
- return true;
- }
- return false;
- }
-
- @Override
- protected String _errMsg(Object value) {
- return String.format("Length of value \"%s\" of property \"%s\" must be no greater than \"%s\"",
- value.toString(),propertyName,constraintValue.toString());
- }
-
-}
-
-/*python
-
-class MaxLength(Constraint):
- """Constraint class for "max_length"
-
- Constrains the property or parameter to a value to a maximum length.
- """
-
- constraint_key = Constraint.MAX_LENGTH
-
- valid_types = (int, )
-
- valid_prop_types = (Schema.STRING, Schema.MAP)
-
- def __init__(self, property_name, property_type, constraint):
- super(MaxLength, self).__init__(property_name, property_type,
- constraint)
- if not isinstance(self.constraint_value, self.valid_types):
- ExceptionCollector.appendException(
- InvalidSchemaError(message=_('The property "max_length" '
- 'expects an integer.')))
-
- def _is_valid(self, value):
- if ((isinstance(value, str) or isinstance(value, dict)) and
- len(value) <= self.constraint_value):
- return True
-
- return False
-
- def _err_msg(self, value):
- return (_('Length of value "%(pvalue)s" of property "%(pname)s" '
- 'must be no greater than "%(cvalue)s".') %
- dict(pname=self.property_name,
- pvalue=value,
- cvalue=self.constraint_value))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java
deleted file mode 100644
index 480c878..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/MinLength.java
+++ /dev/null
@@ -1,90 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements.constraints;
-
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class MinLength extends Constraint {
- // Constraint class for "min_length"
-
- // Constrains the property or parameter to a value of a minimum length.
-
- @Override
- protected void _setValues() {
-
- constraintKey = MIN_LENGTH;
-
- validTypes.add("Integer");
-
- validPropTypes.add(Schema.STRING);
- validPropTypes.add(Schema.MAP);
-
- }
-
- public MinLength(String name,String type,Object c) {
- super(name,type,c);
-
- if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
- ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"min_length\" expects an integer");
- }
- }
-
- @SuppressWarnings("unchecked")
- @Override
- protected boolean _isValid(Object value) {
- if(value instanceof String && constraintValue instanceof Integer &&
- ((String)value).length() >= (Integer)constraintValue) {
- return true;
- }
- else if(value instanceof LinkedHashMap && constraintValue instanceof Integer &&
- ((LinkedHashMap<String,Object>)value).size() >= (Integer)constraintValue) {
- return true;
- }
- return false;
- }
-
- @Override
- protected String _errMsg(Object value) {
- return String.format("Length of value \"%s\" of property \"%s\" must be at least \"%s\"",
- value.toString(),propertyName,constraintValue.toString());
- }
-
-}
-
-/*python
-
-class MinLength(Constraint):
- """Constraint class for "min_length"
-
- Constrains the property or parameter to a value to a minimum length.
- """
-
- constraint_key = Constraint.MIN_LENGTH
-
- valid_types = (int, )
-
- valid_prop_types = (Schema.STRING, Schema.MAP)
-
- def __init__(self, property_name, property_type, constraint):
- super(MinLength, self).__init__(property_name, property_type,
- constraint)
- if not isinstance(self.constraint_value, self.valid_types):
- ExceptionCollector.appendException(
- InvalidSchemaError(message=_('The property "min_length" '
- 'expects an integer.')))
-
- def _is_valid(self, value):
- if ((isinstance(value, str) or isinstance(value, dict)) and
- len(value) >= self.constraint_value):
- return True
-
- return False
-
- def _err_msg(self, value):
- return (_('Length of value "%(pvalue)s" of property "%(pname)s" '
- 'must be at least "%(cvalue)s".') %
- dict(pname=self.property_name,
- pvalue=value,
- cvalue=self.constraint_value))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java
deleted file mode 100644
index 444a73c..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Pattern.java
+++ /dev/null
@@ -1,96 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements.constraints;
-
-import java.util.regex.Matcher;
-import java.util.regex.PatternSyntaxException;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class Pattern extends Constraint {
-
- @Override
- protected void _setValues() {
-
- constraintKey = PATTERN;
-
- validTypes.add("String");
-
- validPropTypes.add(Schema.STRING);
-
- }
-
-
- public Pattern(String name,String type,Object c) {
- super(name,type,c);
-
- if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
- ThreadLocalsHolder.getCollector().appendException("InvalidSchemaError: The property \"pattern\" expects a string");
- }
- }
-
- @Override
- protected boolean _isValid(Object value) {
- try {
- if(!(value instanceof String)) {
- ThreadLocalsHolder.getCollector().appendException(String.format("ValueError: Input value \"%s\" to \"pattern\" property \"%s\" must be a string",
- value.toString(),propertyName));
- return false;
- }
- String strp = constraintValue.toString();
- String strm = value.toString();
- java.util.regex.Pattern pattern = java.util.regex.Pattern.compile(strp);
- Matcher matcher = pattern.matcher(strm);
- if(matcher.find() && matcher.end() == strm.length()) {
- return true;
- }
- return false;
- }
- catch(PatternSyntaxException pse) {
- ThreadLocalsHolder.getCollector().appendException(String.format("ValueError: Invalid regex \"%s\" in \"pattern\" property \"%s\"",
- constraintValue.toString(),propertyName));
- return false;
- }
- }
-
- @Override
- protected String _errMsg(Object value) {
- return String.format("The value \"%s\" of property \"%s\" does not match the pattern \"%s\"",
- value.toString(),propertyName,constraintValue.toString());
- }
-
-}
-
-/*python
-
-class Pattern(Constraint):
- """Constraint class for "pattern"
-
- Constrains the property or parameter to a value that is allowed by
- the provided regular expression.
- """
-
- constraint_key = Constraint.PATTERN
-
- valid_types = (str, )
-
- valid_prop_types = (Schema.STRING, )
-
- def __init__(self, property_name, property_type, constraint):
- super(Pattern, self).__init__(property_name, property_type, constraint)
- if not isinstance(self.constraint_value, self.valid_types):
- ExceptionCollector.appendException(
- InvalidSchemaError(message=_('The property "pattern" '
- 'expects a string.')))
- self.match = re.compile(self.constraint_value).match
-
- def _is_valid(self, value):
- match = self.match(value)
- return match is not None and match.end() == len(value)
-
- def _err_msg(self, value):
- return (_('The value "%(pvalue)s" of property "%(pname)s" does not '
- 'match pattern "%(cvalue)s".') %
- dict(pname=self.property_name,
- pvalue=value,
- cvalue=self.constraint_value))
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java
deleted file mode 100644
index ca721e6..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/Schema.java
+++ /dev/null
@@ -1,278 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements.constraints;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-
-public class Schema {
-
- private static final String TYPE = "type";
- private static final String REQUIRED = "required";
- private static final String DESCRIPTION = "description";
- private static final String DEFAULT = "default";
- private static final String CONSTRAINTS = "constraints";
- private static final String STATUS = "status";
- private static final String ENTRYSCHEMA = "entry_schema";
- private static final String KEYS[] = {
- TYPE, REQUIRED, DESCRIPTION,DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS};
-
- public static final String INTEGER = "integer";
- public static final String STRING = "string";
- public static final String BOOLEAN = "boolean";
- public static final String FLOAT = "float";
- public static final String RANGE = "range";
- public static final String NUMBER = "number";
- public static final String TIMESTAMP = "timestamp";
- public static final String LIST = "list";
- public static final String MAP = "map";
- public static final String SCALAR_UNIT_SIZE = "scalar-unit.size";
- public static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency";
- public static final String SCALAR_UNIT_TIME = "scalar-unit.time";
- public static final String VERSION = "version";
- public static final String PORTDEF = "PortDef";
- public static final String PORTSPEC = "PortSpec"; //??? PortSpec.SHORTNAME
- public static final String JSON = "json";
-
- public static final String PROPERTY_TYPES[] = {
- INTEGER, STRING, BOOLEAN, FLOAT, RANGE,NUMBER, TIMESTAMP, LIST, MAP,
- SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME,
- VERSION, PORTDEF, PORTSPEC, JSON};
-
- @SuppressWarnings("unused")
- private static final String SCALAR_UNIT_SIZE_DEFAULT = "B";
-
- private static Map<String,Long> SCALAR_UNIT_SIZE_DICT = new HashMap<>();
- static {
- SCALAR_UNIT_SIZE_DICT.put("B", 1L);
- SCALAR_UNIT_SIZE_DICT.put("KB", 1000L);
- SCALAR_UNIT_SIZE_DICT.put("KIB", 1024L);
- SCALAR_UNIT_SIZE_DICT.put("MB", 1000000L);
- SCALAR_UNIT_SIZE_DICT.put("MIB", 1048576L);
- SCALAR_UNIT_SIZE_DICT.put("GB", 1000000000L);
- SCALAR_UNIT_SIZE_DICT.put("GIB", 1073741824L);
- SCALAR_UNIT_SIZE_DICT.put("TB", 1000000000000L);
- SCALAR_UNIT_SIZE_DICT.put("TIB", 1099511627776L);
- }
-
- private String name;
- private LinkedHashMap<String,Object> schema;
- private int _len;
- private ArrayList<Constraint> constraintsList;
-
-
- public Schema(String _name,LinkedHashMap<String,Object> _schemaDict) {
- name = _name;
-
- if(!(_schemaDict instanceof LinkedHashMap)) {
- //msg = (_('Schema definition of "%(pname)s" must be a dict.')
- // % dict(pname=name))
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidSchemaError: Schema definition of \"%s\" must be a dict",name));
- }
-
- if(_schemaDict.get("type") == null) {
- //msg = (_('Schema definition of "%(pname)s" must have a "type" '
- // 'attribute.') % dict(pname=name))
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name));
- }
-
- schema = _schemaDict;
- _len = 0; //??? None
- constraintsList = new ArrayList<>();
- }
-
- public String getType() {
- return (String)schema.get(TYPE);
- }
-
- public boolean isRequired() {
- return (boolean)schema.getOrDefault(REQUIRED, true);
- }
-
- public String getDescription() {
- return (String)schema.getOrDefault(DESCRIPTION,"");
- }
-
- public Object getDefault() {
- return schema.get(DEFAULT);
- }
-
- public String getStatus() {
- return (String)schema.getOrDefault(STATUS,"");
- }
-
- @SuppressWarnings("unchecked")
- public ArrayList<Constraint> getConstraints() {
- if(constraintsList.size() == 0) {
- Object cob = schema.get(CONSTRAINTS);
- if(cob instanceof ArrayList) {
- ArrayList<Object> constraintSchemata = (ArrayList<Object>)cob;
- for(Object ob: constraintSchemata) {
- if(ob instanceof LinkedHashMap) {
- for(String cClass: ((LinkedHashMap<String,Object>)ob).keySet()) {
- Constraint c = Constraint.factory(cClass,name,getType(),ob);
- if(c != null) {
- constraintsList.add(c);
- }
- else {
- // error
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Constraint type \"%s\" for property \"%s\" is not supported",
- cClass,name));
- }
- break;
- }
- }
- }
- }
- }
- return constraintsList;
- }
-
- @SuppressWarnings("unchecked")
- public LinkedHashMap<String,Object> getEntrySchema() {
- return (LinkedHashMap<String,Object>)schema.get(ENTRYSCHEMA);
- }
-
- // Python intrinsic methods...
-
- // substitute for __getitem__ (aka self[key])
- public Object getItem(String key) {
- return schema.get(key);
- }
-
- /*
- def __iter__(self):
- for k in self.KEYS:
- try:
- self.schema[k]
- except KeyError:
- pass
- else:
- yield k
- */
-
- // substitute for __len__ (aka self.len())
- public int getLen() {
- int len = 0;
- for(String k: KEYS) {
- if(schema.get(k) != null) {
- len++;
- }
- _len = len;
- }
- return _len;
- }
- // getter
- public LinkedHashMap<String,Object> getSchema() {
- return schema;
- }
-
-}
-
-/*python
-
-class Schema(collections.Mapping):
-
-KEYS = (
- TYPE, REQUIRED, DESCRIPTION,
- DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS
-) = (
- 'type', 'required', 'description',
- 'default', 'constraints', 'entry_schema', 'status'
-)
-
-PROPERTY_TYPES = (
- INTEGER, STRING, BOOLEAN, FLOAT, RANGE,
- NUMBER, TIMESTAMP, LIST, MAP,
- SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME,
- VERSION, PORTDEF, PORTSPEC
-) = (
- 'integer', 'string', 'boolean', 'float', 'range',
- 'number', 'timestamp', 'list', 'map',
- 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time',
- 'version', 'PortDef', PortSpec.SHORTNAME
-)
-
-SCALAR_UNIT_SIZE_DEFAULT = 'B'
-SCALAR_UNIT_SIZE_DICT = {'B': 1, 'KB': 1000, 'KIB': 1024, 'MB': 1000000,
- 'MIB': 1048576, 'GB': 1000000000,
- 'GIB': 1073741824, 'TB': 1000000000000,
- 'TIB': 1099511627776}
-
-def __init__(self, name, schema_dict):
- self.name = name
- if not isinstance(schema_dict, collections.Mapping):
- msg = (_('Schema definition of "%(pname)s" must be a dict.')
- % dict(pname=name))
- ExceptionCollector.appendException(InvalidSchemaError(message=msg))
-
- try:
- schema_dict['type']
- except KeyError:
- msg = (_('Schema definition of "%(pname)s" must have a "type" '
- 'attribute.') % dict(pname=name))
- ExceptionCollector.appendException(InvalidSchemaError(message=msg))
-
- self.schema = schema_dict
- self._len = None
- self.constraints_list = []
-
-@property
-def type(self):
- return self.schema[self.TYPE]
-
-@property
-def required(self):
- return self.schema.get(self.REQUIRED, True)
-
-@property
-def description(self):
- return self.schema.get(self.DESCRIPTION, '')
-
-@property
-def default(self):
- return self.schema.get(self.DEFAULT)
-
-@property
-def status(self):
- return self.schema.get(self.STATUS, '')
-
-@property
-def constraints(self):
- if not self.constraints_list:
- constraint_schemata = self.schema.get(self.CONSTRAINTS)
- if constraint_schemata:
- self.constraints_list = [Constraint(self.name,
- self.type,
- cschema)
- for cschema in constraint_schemata]
- return self.constraints_list
-
-@property
-def entry_schema(self):
- return self.schema.get(self.ENTRYSCHEMA)
-
-def __getitem__(self, key):
- return self.schema[key]
-
-def __iter__(self):
- for k in self.KEYS:
- try:
- self.schema[k]
- except KeyError:
- pass
- else:
- yield k
-
-def __len__(self):
- if self._len is None:
- self._len = len(list(iter(self)))
- return self._len
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java
deleted file mode 100644
index 06622e4..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/elements/constraints/ValidValues.java
+++ /dev/null
@@ -1,84 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.elements.constraints;
-
-import java.util.ArrayList;
-
-public class ValidValues extends Constraint {
-
-
- protected void _setValues() {
-
- constraintKey = VALID_VALUES;
-
- for(String s: Schema.PROPERTY_TYPES) {
- validPropTypes.add(s);
- }
-
- }
-
-
- public ValidValues(String name,String type,Object c) {
- super(name,type,c);
-
- }
-
- @SuppressWarnings("unchecked")
- protected boolean _isValid(Object val) {
- if(!(constraintValue instanceof ArrayList)) {
- return false;
- }
- if(val instanceof ArrayList) {
- boolean bAll = true;
- for(Object v: (ArrayList<Object>)val) {
- if(!((ArrayList<Object>)constraintValue).contains(v)) {
- bAll = false;
- break;
- };
- }
- return bAll;
- }
- return ((ArrayList<Object>)constraintValue).contains(val);
- }
-
- protected String _errMsg(Object value) {
- return String.format("The value \"%s\" of property \"%s\" is not valid. Expected a value from \"%s\"",
- value.toString(),propertyName,constraintValue.toString());
- }
-
-}
-
-/*python
-
-class ValidValues(Constraint):
-"""Constraint class for "valid_values"
-
-Constrains a property or parameter to a value that is in the list of
-declared values.
-"""
-constraint_key = Constraint.VALID_VALUES
-
-valid_prop_types = Schema.PROPERTY_TYPES
-
-def __init__(self, property_name, property_type, constraint):
- super(ValidValues, self).__init__(property_name, property_type,
- constraint)
- if not isinstance(self.constraint_value, collections.Sequence):
- ExceptionCollector.appendException(
- InvalidSchemaError(message=_('The property "valid_values" '
- 'expects a list.')))
-
-def _is_valid(self, value):
- print '*** payton parser validating ',value,' in ',self.constraint_value#GGG
- if isinstance(value, list):
- return all(v in self.constraint_value for v in value)
- return value in self.constraint_value
-
-def _err_msg(self, value):
- allowed = '[%s]' % ', '.join(str(a) for a in self.constraint_value)
- return (_('The value "%(pvalue)s" of property "%(pname)s" is not '
- 'valid. Expected a value from "%(cvalue)s".') %
- dict(pname=self.property_name,
- pvalue=value,
- cvalue=allowed))
-
-
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java
deleted file mode 100644
index 6403d6e..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/extensions/ExtTools.java
+++ /dev/null
@@ -1,210 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.extensions;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-public class ExtTools {
-
- private static Logger log = LoggerFactory.getLogger(ExtTools.class.getName());
-
- private static LinkedHashMap<String,Object> EXTENSION_INFO = new LinkedHashMap<>();
-
- public ExtTools() {
-
- EXTENSION_INFO = _loadExtensions();
- }
-
- private LinkedHashMap<String,Object> _loadExtensions() {
-
- LinkedHashMap<String,Object> extensions = new LinkedHashMap<>();
-
- String path = ExtTools.class.getProtectionDomain().getCodeSource().getLocation().getPath();
- //String extdir = path + File.separator + "resources/extensions";
-
- String extdir = ExtTools.class.getClassLoader().getResource("extensions").getFile();
-
- // for all folders in extdir
- File extDir = new File(extdir);
- File extDirList[] = extDir.listFiles();
- if (extDirList == null) {
- String a = "aaaa";
-
- }
- if (extDirList != null) {
- for(File f: extDirList) {
- if(f.isDirectory()) {
- // for all .py files in folder
- File extFileList[] = f.listFiles();
- for(File pyf: extFileList) {
- String pyfName = pyf.getName();
- String pyfPath = pyf.getAbsolutePath();
- if(pyfName.endsWith(".py")) {
- // get VERSION,SECTIONS,DEF_FILE
- try {
- String version = null;
- ArrayList<String> sections = null;
- String defsFile = null;
- String line;
- InputStream fis = new FileInputStream(pyfPath);
- InputStreamReader isr = new InputStreamReader(fis, Charset.forName("UTF-8"));
- BufferedReader br = new BufferedReader(isr);
- Pattern pattern = Pattern.compile("^([^#]\\S+)\\s*=\\s*(\\S.*)$");
- while((line = br.readLine()) != null) {
- line = line.replace("'","\"");
- Matcher matcher = pattern.matcher(line.toString());
- if(matcher.find()) {
- if(matcher.group(1).equals("VERSION")) {
- version = matcher.group(2);
- if(version.startsWith("'") || version.startsWith("\"")) {
- version = version.substring(1,version.length()-1);
- }
- }
- else if(matcher.group(1).equals("DEFS_FILE")) {
- String fn = matcher.group(2);
- if(fn.startsWith("'") || fn.startsWith("\"")) {
- fn = fn.substring(1,fn.length()-1);
- }
- defsFile = pyf.getParent() + File.separator + fn;//matcher.group(2);
- }
- else if(matcher.group(1).equals("SECTIONS")) {
- sections = new ArrayList<>();
- Pattern secpat = Pattern.compile("\"([^\"]+)\"");
- Matcher secmat = secpat.matcher(matcher.group(2));
- while(secmat.find()) {
- sections.add(secmat.group(1));
- }
- }
- }
- }
- br.close();
-
- if(version != null && defsFile != null) {
- LinkedHashMap<String,Object> ext = new LinkedHashMap<>();
- ext.put("defs_file", defsFile);
- if(sections != null) {
- ext.put("sections", sections);
- }
- extensions.put(version, ext);
- }
- else {
- // error
- }
- }
- catch(Exception e) {
- log.error("ExtTools - _loadExtensions - {}", e.getMessage());
- // ...
- }
- }
- }
- }
- }
- }
- return extensions;
- }
-
- public ArrayList<String> getVersions() {
- return new ArrayList<String>(EXTENSION_INFO.keySet());
- }
-
- public LinkedHashMap<String,ArrayList<String>> getSections() {
- LinkedHashMap<String,ArrayList<String>> sections = new LinkedHashMap<>();
- for(String version: EXTENSION_INFO.keySet()) {
- LinkedHashMap<String,Object> eiv = (LinkedHashMap<String,Object>)EXTENSION_INFO.get(version);
- sections.put(version,(ArrayList<String>)eiv.get("sections"));
- }
- return sections;
- }
-
- public String getDefsFile(String version) {
- LinkedHashMap<String,Object> eiv = (LinkedHashMap<String,Object>)EXTENSION_INFO.get(version);
- return (String)eiv.get("defs_file");
- }
-
-}
-
-/*python
-
-from toscaparser.common.exception import ToscaExtAttributeError
-from toscaparser.common.exception import ToscaExtImportError
-
-log = logging.getLogger("tosca.model")
-
-REQUIRED_ATTRIBUTES = ['VERSION', 'DEFS_FILE']
-
-
-class ExtTools(object):
- def __init__(self):
- self.EXTENSION_INFO = self._load_extensions()
-
- def _load_extensions(self):
- '''Dynamically load all the extensions .'''
- extensions = {}
-
- # Use the absolute path of the class path
- abs_path = os.path.dirname(os.path.abspath(__file__))
-
- extdirs = [e for e in os.listdir(abs_path) if
- not e.startswith('tests') and
- os.path.isdir(os.path.join(abs_path, e))]
-
- for e in extdirs:
- log.info(e)
- extpath = abs_path + '/' + e
- # Grab all the extension files in the given path
- ext_files = [f for f in os.listdir(extpath) if f.endswith('.py')
- and not f.startswith('__init__')]
-
- # For each module, pick out the target translation class
- for f in ext_files:
- log.info(f)
- ext_name = 'toscaparser/extensions/' + e + '/' + f.strip('.py')
- ext_name = ext_name.replace('/', '.')
- try:
- extinfo = importlib.import_module(ext_name)
- version = getattr(extinfo, 'VERSION')
- defs_file = extpath + '/' + getattr(extinfo, 'DEFS_FILE')
-
- # Sections is an optional attribute
- sections = getattr(extinfo, 'SECTIONS', ())
-
- extensions[version] = {'sections': sections,
- 'defs_file': defs_file}
- except ImportError:
- raise ToscaExtImportError(ext_name=ext_name)
- except AttributeError:
- attrs = ', '.join(REQUIRED_ATTRIBUTES)
- raise ToscaExtAttributeError(ext_name=ext_name,
- attrs=attrs)
-
- print 'Extensions ',extensions#GGG
- return extensions
-
- def get_versions(self):
- return self.EXTENSION_INFO.keys()
-
- def get_sections(self):
- sections = {}
- for version in self.EXTENSION_INFO.keys():
- sections[version] = self.EXTENSION_INFO[version]['sections']
-
- return sections
-
- def get_defs_file(self, version):
- versiondata = self.EXTENSION_INFO.get(version)
-
- if versiondata:
- return versiondata.get('defs_file')
- else:
- return None
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java
deleted file mode 100644
index 6dc7deb..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Concat.java
+++ /dev/null
@@ -1,77 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.functions;
-
-import java.util.ArrayList;
-
-import org.openecomp.sdc.toscaparser.api.TopologyTemplate;
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class Concat extends Function {
- // Validate the function and provide an instance of the function
-
- // Concatenation of values are supposed to be produced at runtime and
- // therefore its the responsibility of the TOSCA engine to implement the
- // evaluation of Concat functions.
-
- // Arguments:
-
- // * List of strings that needs to be concatenated
-
- // Example:
-
- // [ 'http://',
- // get_attribute: [ server, public_address ],
- // ':' ,
- // get_attribute: [ server, port ] ]
-
-
- public Concat(TopologyTemplate ttpl,Object context,String name,ArrayList<Object> args) {
- super(ttpl,context,name,args);
- }
-
- @Override
- public Object result() {
- return this;
- }
-
- @Override
- void validate() {
- if(args.size() < 1) {
- ThreadLocalsHolder.getCollector().appendException(
- "ValueError: Invalid arguments for function \"concat\". " +
- "Expected at least one argument");
- }
- }
-
-}
-
-/*python
-
-class Concat(Function):
-"""Validate the function and provide an instance of the function
-
-Concatenation of values are supposed to be produced at runtime and
-therefore its the responsibility of the TOSCA engine to implement the
-evaluation of Concat functions.
-
-Arguments:
-
-* List of strings that needs to be concatenated
-
-Example:
-
- [ 'http://',
- get_attribute: [ server, public_address ],
- ':' ,
- get_attribute: [ server, port ] ]
-"""
-
-def validate(self):
- if len(self.args) < 1:
- ExceptionCollector.appendException(
- ValueError(_('Invalid arguments for function "{0}". Expected '
- 'at least one arguments.').format(CONCAT)))
-
-def result(self):
- return self
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java
deleted file mode 100644
index 102fbc0..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Function.java
+++ /dev/null
@@ -1,191 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.functions;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.TopologyTemplate;
-
-public abstract class Function {
-
- protected static final String GET_PROPERTY = "get_property";
- protected static final String GET_ATTRIBUTE = "get_attribute";
- protected static final String GET_INPUT = "get_input";
- protected static final String GET_OPERATION_OUTPUT = "get_operation_output";
- protected static final String CONCAT = "concat";
- protected static final String TOKEN = "token";
-
- protected static final String SELF = "SELF";
- protected static final String HOST = "HOST";
- protected static final String TARGET = "TARGET";
- protected static final String SOURCE = "SOURCE";
-
- protected static final String HOSTED_ON = "tosca.relationships.HostedOn";
-
- protected static HashMap<String,String> functionMappings = _getFunctionMappings();
-
- private static HashMap<String,String> _getFunctionMappings() {
- HashMap<String,String> map = new HashMap<>();
- map.put(GET_PROPERTY,"GetProperty");
- map.put(GET_INPUT, "GetInput");
- map.put(GET_ATTRIBUTE, "GetAttribute");
- map.put(GET_OPERATION_OUTPUT, "GetOperationOutput");
- map.put(CONCAT, "Concat");
- map.put(TOKEN, "Token");
- return map;
- }
-
- protected TopologyTemplate toscaTpl;
- protected Object context;
- protected String name;
- protected ArrayList<Object> args;
-
-
- public Function(TopologyTemplate _toscaTpl,Object _context,String _name,ArrayList<Object> _args) {
- toscaTpl = _toscaTpl;
- context = _context;
- name = _name;
- args = _args;
- validate();
-
- }
-
- abstract Object result();
-
- abstract void validate();
-
- @SuppressWarnings("unchecked")
- public static boolean isFunction(Object funcObj) {
- // Returns True if the provided function is a Tosca intrinsic function.
- //
- //Examples:
- //
- //* "{ get_property: { SELF, port } }"
- //* "{ get_input: db_name }"
- //* Function instance
-
- //:param function: Function as string or a Function instance.
- //:return: True if function is a Tosca intrinsic function, otherwise False.
- //
-
- if(funcObj instanceof LinkedHashMap) {
- LinkedHashMap<String,Object> function = (LinkedHashMap<String,Object>)funcObj;
- if(function.size() == 1) {
- String funcName = (new ArrayList<String>(function.keySet())).get(0);
- return functionMappings.keySet().contains(funcName);
- }
- }
- return (funcObj instanceof Function);
- }
-
- @SuppressWarnings("unchecked")
- public static Object getFunction(TopologyTemplate ttpl,Object context,Object rawFunctionObj) {
- // Gets a Function instance representing the provided template function.
-
- // If the format provided raw_function format is not relevant for template
- // functions or if the function name doesn't exist in function mapping the
- // method returns the provided raw_function.
- //
- // :param tosca_tpl: The tosca template.
- // :param node_template: The node template the function is specified for.
- // :param raw_function: The raw function as dict.
- // :return: Template function as Function instance or the raw_function if
- // parsing was unsuccessful.
-
- if(isFunction(rawFunctionObj)) {
- if(rawFunctionObj instanceof LinkedHashMap) {
- LinkedHashMap<String,Object> rawFunction = (LinkedHashMap<String,Object>)rawFunctionObj;
- String funcName = (new ArrayList<String>(rawFunction.keySet())).get(0);
- if(functionMappings.keySet().contains(funcName)) {
- String funcType = functionMappings.get(funcName);
- Object oargs = (new ArrayList<Object>(rawFunction.values())).get(0);
- ArrayList<Object> funcArgs;
- if(oargs instanceof ArrayList) {
- funcArgs = (ArrayList<Object>)oargs;
- }
- else {
- funcArgs = new ArrayList<>();
- funcArgs.add(oargs);
- }
-
- if(funcType.equals("GetInput")) {
- return new GetInput(ttpl,context,funcName,funcArgs);
- }
- else if(funcType.equals("GetAttribute")) {
- return new GetAttribute(ttpl,context,funcName,funcArgs);
- }
- else if(funcType.equals("GetProperty")) {
- return new GetProperty(ttpl,context,funcName,funcArgs);
- }
- else if(funcType.equals("GetOperationOutput")) {
- return new GetOperationOutput(ttpl,context,funcName,funcArgs);
- }
- else if(funcType.equals("Concat")) {
- return new Concat(ttpl,context,funcName,funcArgs);
- }
- else if(funcType.equals("Token")) {
- return new Token(ttpl,context,funcName,funcArgs);
- }
- }
- }
- }
- return rawFunctionObj;
- }
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import UnknownInputError
-from toscaparser.dataentity import DataEntity
-from toscaparser.elements.constraints import Schema
-from toscaparser.elements.datatype import DataType
-from toscaparser.elements.entity_type import EntityType
-from toscaparser.elements.relationshiptype import RelationshipType
-from toscaparser.elements.statefulentitytype import StatefulEntityType
-from toscaparser.utils.gettextutils import _
-
-
-GET_PROPERTY = 'get_property'
-GET_ATTRIBUTE = 'get_attribute'
-GET_INPUT = 'get_input'
-GET_OPERATION_OUTPUT = 'get_operation_output'
-CONCAT = 'concat'
-TOKEN = 'token'
-
-SELF = 'SELF'
-HOST = 'HOST'
-TARGET = 'TARGET'
-SOURCE = 'SOURCE'
-
-HOSTED_ON = 'tosca.relationships.HostedOn'
-
-
-@six.add_metaclass(abc.ABCMeta)
-class Function(object):
- """An abstract type for representing a Tosca template function."""
-
- def __init__(self, tosca_tpl, context, name, args):
- self.tosca_tpl = tosca_tpl
- self.context = context
- self.name = name
- self.args = args
- self.validate()
-
- @abc.abstractmethod
- def result(self):
- """Invokes the function and returns its result
-
- Some methods invocation may only be relevant on runtime (for example,
- getting runtime properties) and therefore its the responsibility of
- the orchestrator/translator to take care of such functions invocation.
-
- :return: Function invocation result.
- """
- return {self.name: self.args}
-
- @abc.abstractmethod
- def validate(self):
- """Validates function arguments."""
- pass
-*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java
deleted file mode 100644
index 549073b..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetAttribute.java
+++ /dev/null
@@ -1,535 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.functions;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.*;
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.elements.AttributeDef;
-import org.openecomp.sdc.toscaparser.api.elements.CapabilityTypeDef;
-import org.openecomp.sdc.toscaparser.api.elements.DataType;
-import org.openecomp.sdc.toscaparser.api.elements.EntityType;
-import org.openecomp.sdc.toscaparser.api.elements.NodeType;
-import org.openecomp.sdc.toscaparser.api.elements.PropertyDef;
-import org.openecomp.sdc.toscaparser.api.elements.RelationshipType;
-import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType;
-import org.openecomp.sdc.toscaparser.api.elements.constraints.Schema;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class GetAttribute extends Function {
- // Get an attribute value of an entity defined in the service template
-
- // Node template attributes values are set in runtime and therefore its the
- // responsibility of the Tosca engine to implement the evaluation of
- // get_attribute functions.
-
- // Arguments:
-
- // * Node template name | HOST.
- // * Attribute name.
-
- // If the HOST keyword is passed as the node template name argument the
- // function will search each node template along the HostedOn relationship
- // chain until a node which contains the attribute is found.
-
- // Examples:
-
- // * { get_attribute: [ server, private_address ] }
- // * { get_attribute: [ HOST, private_address ] }
- // * { get_attribute: [ HOST, private_address, 0 ] }
- // * { get_attribute: [ HOST, private_address, 0, some_prop] }
-
- public GetAttribute(TopologyTemplate ttpl,Object context,String name,ArrayList<Object> args) {
- super(ttpl,context,name,args);
- }
-
- @Override
- void validate() {
- if(args.size() < 2) {
- ThreadLocalsHolder.getCollector().appendException(
- "ValueError: Illegal arguments for function \"get_attribute\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"");
- return;
- }
- else if(args.size() == 2) {
- _findNodeTemplateContainingAttribute();
- }
- else {
- NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0));
- if(nodeTpl == null) {
- return;
- }
- int index = 2;
- AttributeDef attr = nodeTpl.getTypeDefinition().getAttributeDefValue((String)args.get(1));
- if(attr != null) {
- // found
- }
- else {
- index = 3;
- // then check the req or caps
- attr = _findReqOrCapAttribute((String)args.get(1),(String)args.get(2));
- if(attr == null) {
- return;
- }
- }
-
- String valueType = (String)attr.getSchema().get("type");
- if(args.size() > index) {
- for(Object elem: args.subList(index,args.size())) {
- if(valueType.equals("list")) {
- if(!(elem instanceof Integer)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: Illegal arguments for function \"get_attribute\" \"%s\". Expected positive integer argument",
- elem.toString()));
- }
- Object ob = attr.getSchema().get("entry_schema");
- valueType = (String)
- ((LinkedHashMap<String,Object>)ob).get("type");
- }
- else if(valueType.equals("map")) {
- Object ob = attr.getSchema().get("entry_schema");
- valueType = (String)
- ((LinkedHashMap<String,Object>)ob).get("type");
- }
- else {
- boolean bFound = false;
- for(String p: Schema.PROPERTY_TYPES) {
- if(p.equals(valueType)) {
- bFound = true;
- break;
- }
- }
- if(bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: 'Illegal arguments for function \"get_attribute\". Unexpected attribute/index value \"%d\"",
- elem));
- return;
- }
- else { // It is a complex type
- DataType dataType = new DataType(valueType,null);
- LinkedHashMap<String,PropertyDef> props =
- dataType.getAllProperties();
- PropertyDef prop = props.get((String)elem);
- if(prop != null) {
- valueType = (String)prop.getSchema().get("type");
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "KeyError: Illegal arguments for function \"get_attribute\". Attribute name \"%s\" not found in \"%\"",
- elem,valueType));
- }
- }
- }
- }
- }
- }
- }
-
- @Override
- public Object result() {
- return this;
- }
-
- private NodeTemplate getReferencedNodeTemplate() {
- // Gets the NodeTemplate instance the get_attribute function refers to
-
- // If HOST keyword was used as the node template argument, the node
- // template which contains the attribute along the HostedOn relationship
- // chain will be returned.
-
- return _findNodeTemplateContainingAttribute();
-
- }
-
- // Attributes can be explicitly created as part of the type definition
- // or a property name can be implicitly used as an attribute name
- private NodeTemplate _findNodeTemplateContainingAttribute() {
- NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0));
- if(nodeTpl != null &&
- !_attributeExistsInType(nodeTpl.getTypeDefinition()) &&
- !nodeTpl.getProperties().keySet().contains(getAttributeName())) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "KeyError: Attribute \"%s\" was not found in node template \"%s\"",
- getAttributeName(),nodeTpl.getName()));
- }
- return nodeTpl;
- }
-
- private boolean _attributeExistsInType(StatefulEntityType typeDefinition) {
- LinkedHashMap<String,AttributeDef> attrsDef = typeDefinition.getAttributesDef();
- return attrsDef.get(getAttributeName()) != null;
- }
-
- private NodeTemplate _findHostContainingAttribute(String nodeTemplateName) {
- NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName);
- if(nodeTemplate != null) {
- LinkedHashMap<String,Object> hostedOnRel =
- (LinkedHashMap<String,Object>)EntityType.TOSCA_DEF.get(HOSTED_ON);
- for(Object ro: nodeTemplate.getRequirements()) {
- if(ro != null && ro instanceof LinkedHashMap) {
- LinkedHashMap<String,Object> r = (LinkedHashMap<String,Object>)ro;
- for(String requirement: r.keySet()) {
- String targetName = (String)r.get(requirement);
- NodeTemplate targetNode = _findNodeTemplate(targetName);
- NodeType targetType = (NodeType)targetNode.getTypeDefinition();
- for(CapabilityTypeDef capability: targetType.getCapabilitiesObjects()) {
-// if(((ArrayList<String>)hostedOnRel.get("valid_target_types")).contains(capability.getType())) {
- if(capability.inheritsFrom((ArrayList<String>)hostedOnRel.get("valid_target_types"))) {
- if(_attributeExistsInType(targetType)) {
- return targetNode;
- }
- return _findHostContainingAttribute(targetName);
- }
- }
- }
- }
- }
- }
- return null;
- }
-
-
- private NodeTemplate _findNodeTemplate(String nodeTemplateName) {
- if(nodeTemplateName.equals(HOST)) {
- // Currently this is the only way to tell whether the function
- // is used within the outputs section of the TOSCA template.
- if(context instanceof ArrayList) {
- ThreadLocalsHolder.getCollector().appendException(
- "ValueError: \"get_attribute: [ HOST, ... ]\" is not allowed in \"outputs\" section of the TOSCA template");
- return null;
- }
- NodeTemplate nodeTpl = _findHostContainingAttribute(SELF);
- if(nodeTpl == null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: \"get_attribute: [ HOST, ... ]\" was used in " +
- "node template \"%s\" but \"%s\" was not found in " +
- "the relationship chain",((NodeTemplate)context).getName(),HOSTED_ON));
- return null;
- }
- return nodeTpl;
- }
- if(nodeTemplateName.equals(TARGET)) {
- if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) {
- ThreadLocalsHolder.getCollector().appendException(
- "KeyError: \"TARGET\" keyword can only be used in context " +
- " to \"Relationships\" target node");
- return null;
- }
- return ((RelationshipTemplate)context).getTarget();
- }
- if(nodeTemplateName.equals(SOURCE)) {
- if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) {
- ThreadLocalsHolder.getCollector().appendException(
- "KeyError: \"SOURCE\" keyword can only be used in context " +
- " to \"Relationships\" source node");
- return null;
- }
- return ((RelationshipTemplate)context).getTarget();
- }
- String name;
- if(nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) {
- name = ((NodeTemplate)context).getName();
- }
- else {
- name = nodeTemplateName;
- }
- for(NodeTemplate nt: toscaTpl.getNodeTemplates()) {
- if(nt.getName().equals(name)) {
- return nt;
- }
- }
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "KeyError: Node template \"%s\" was not found",nodeTemplateName));
- return null;
- }
-
- public AttributeDef _findReqOrCapAttribute(String reqOrCap,String attrName) {
-
- NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0));
- // Find attribute in node template's requirements
- for(Object ro: nodeTpl.getRequirements()) {
- if(ro != null && ro instanceof LinkedHashMap) {
- LinkedHashMap<String,Object> r = (LinkedHashMap<String,Object>)ro;
- for(String req: r.keySet()) {
- String nodeName = (String)r.get(req);
- if(req.equals(reqOrCap)) {
- NodeTemplate nodeTemplate = _findNodeTemplate(nodeName);
- return _getCapabilityAttribute(nodeTemplate,req,attrName);
- }
- }
- }
- }
- // If requirement was not found, look in node template's capabilities
- return _getCapabilityAttribute(nodeTpl,reqOrCap,attrName);
- }
-
- private AttributeDef _getCapabilityAttribute(NodeTemplate nodeTemplate,
- String capabilityName,
- String attrName) {
- // Gets a node template capability attribute
- LinkedHashMap<String,Capability> caps = nodeTemplate.getCapabilities();
- if(caps != null && caps.keySet().contains(capabilityName)) {
- Capability cap = caps.get(capabilityName);
- AttributeDef attribute = null;
- LinkedHashMap<String,AttributeDef> attrs =
- cap.getDefinition().getAttributesDef();
- if(attrs != null && attrs.keySet().contains(attrName)) {
- attribute = attrs.get(attrName);
- }
- if(attribute == null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "KeyError: Attribute \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"",
- attrName,capabilityName,nodeTemplate.getName(),((NodeTemplate)context).getName()));
- }
- return attribute;
- }
- String msg = String.format(
- "Requirement/Capability \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"",
- capabilityName,((NodeTemplate)context).getName(),nodeTemplate.getName());
- ThreadLocalsHolder.getCollector().appendException("KeyError: " + msg);
- return null;
- }
-
- String getNodeTemplateName() {
- return (String)args.get(0);
- }
-
- String getAttributeName() {
- return (String)args.get(1);
- }
-
-}
-
-/*python
-
-class GetAttribute(Function):
-"""Get an attribute value of an entity defined in the service template
-
-Node template attributes values are set in runtime and therefore its the
-responsibility of the Tosca engine to implement the evaluation of
-get_attribute functions.
-
-Arguments:
-
-* Node template name | HOST.
-* Attribute name.
-
-If the HOST keyword is passed as the node template name argument the
-function will search each node template along the HostedOn relationship
-chain until a node which contains the attribute is found.
-
-Examples:
-
-* { get_attribute: [ server, private_address ] }
-* { get_attribute: [ HOST, private_address ] }
-* { get_attribute: [ HOST, private_address, 0 ] }
-* { get_attribute: [ HOST, private_address, 0, some_prop] }
-"""
-
-def validate(self):
- if len(self.args) < 2:
- ExceptionCollector.appendException(
- ValueError(_('Illegal arguments for function "{0}". Expected '
- 'arguments: "node-template-name", "req-or-cap"'
- '(optional), "property name"'
- ).format(GET_ATTRIBUTE)))
- return
- elif len(self.args) == 2:
- self._find_node_template_containing_attribute()
- else:
- node_tpl = self._find_node_template(self.args[0])
- if node_tpl is None:
- return
- index = 2
- attrs = node_tpl.type_definition.get_attributes_def()
- found = [attrs[self.args[1]]] if self.args[1] in attrs else []
- if found:
- attr = found[0]
- else:
- index = 3
- # then check the req or caps
- attr = self._find_req_or_cap_attribute(self.args[1],
- self.args[2])
-
- value_type = attr.schema['type']
- if len(self.args) > index:
- for elem in self.args[index:]:
- if value_type == "list":
- if not isinstance(elem, int):
- ExceptionCollector.appendException(
- ValueError(_('Illegal arguments for function'
- ' "{0}". "{1}" Expected positive'
- ' integer argument'
- ).format(GET_ATTRIBUTE, elem)))
- value_type = attr.schema['entry_schema']['type']
- elif value_type == "map":
- value_type = attr.schema['entry_schema']['type']
- elif value_type in Schema.PROPERTY_TYPES:
- ExceptionCollector.appendException(
- ValueError(_('Illegal arguments for function'
- ' "{0}". Unexpected attribute/'
- 'index value "{1}"'
- ).format(GET_ATTRIBUTE, elem)))
- return
- else: # It is a complex type
- data_type = DataType(value_type)
- props = data_type.get_all_properties()
- found = [props[elem]] if elem in props else []
- if found:
- prop = found[0]
- value_type = prop.schema['type']
- else:
- ExceptionCollector.appendException(
- KeyError(_('Illegal arguments for function'
- ' "{0}". Attribute name "{1}" not'
- ' found in "{2}"'
- ).format(GET_ATTRIBUTE,
- elem,
- value_type)))
-
-def result(self):
- return self
-
-def get_referenced_node_template(self):
- """Gets the NodeTemplate instance the get_attribute function refers to.
-
- If HOST keyword was used as the node template argument, the node
- template which contains the attribute along the HostedOn relationship
- chain will be returned.
- """
- return self._find_node_template_containing_attribute()
-
-# Attributes can be explicitly created as part of the type definition
-# or a property name can be implicitly used as an attribute name
-def _find_node_template_containing_attribute(self):
- node_tpl = self._find_node_template(self.args[0])
- if node_tpl and \
- not self._attribute_exists_in_type(node_tpl.type_definition) \
- and self.attribute_name not in node_tpl.get_properties():
- ExceptionCollector.appendException(
- KeyError(_('Attribute "%(att)s" was not found in node '
- 'template "%(ntpl)s".') %
- {'att': self.attribute_name,
- 'ntpl': node_tpl.name}))
- return node_tpl
-
-def _attribute_exists_in_type(self, type_definition):
- attrs_def = type_definition.get_attributes_def()
- found = [attrs_def[self.attribute_name]] \
- if self.attribute_name in attrs_def else []
- return len(found) == 1
-
-def _find_host_containing_attribute(self, node_template_name=SELF):
- node_template = self._find_node_template(node_template_name)
- if node_template:
- hosted_on_rel = EntityType.TOSCA_DEF[HOSTED_ON]
- for r in node_template.requirements:
- for requirement, target_name in r.items():
- target_node = self._find_node_template(target_name)
- target_type = target_node.type_definition
- for capability in target_type.get_capabilities_objects():
- if capability.type in \
- hosted_on_rel['valid_target_types']:
- if self._attribute_exists_in_type(target_type):
- return target_node
- return self._find_host_containing_attribute(
- target_name)
-
-def _find_node_template(self, node_template_name):
- if node_template_name == HOST:
- # Currently this is the only way to tell whether the function
- # is used within the outputs section of the TOSCA template.
- if isinstance(self.context, list):
- ExceptionCollector.appendException(
- ValueError(_(
- '"get_attribute: [ HOST, ... ]" is not allowed in '
- '"outputs" section of the TOSCA template.')))
- return
- node_tpl = self._find_host_containing_attribute()
- if not node_tpl:
- ExceptionCollector.appendException(
- ValueError(_(
- '"get_attribute: [ HOST, ... ]" was used in node '
- 'template "{0}" but "{1}" was not found in '
- 'the relationship chain.').format(self.context.name,
- HOSTED_ON)))
- return
- return node_tpl
- if node_template_name == TARGET:
- if not isinstance(self.context.type_definition, RelationshipType):
- ExceptionCollector.appendException(
- KeyError(_('"TARGET" keyword can only be used in context'
- ' to "Relationships" target node')))
- return
- return self.context.target
- if node_template_name == SOURCE:
- if not isinstance(self.context.type_definition, RelationshipType):
- ExceptionCollector.appendException(
- KeyError(_('"SOURCE" keyword can only be used in context'
- ' to "Relationships" source node')))
- return
- return self.context.source
- name = self.context.name \
- if node_template_name == SELF and \
- not isinstance(self.context, list) \
- else node_template_name
- for node_template in self.tosca_tpl.nodetemplates:
- if node_template.name == name:
- return node_template
- ExceptionCollector.appendException(
- KeyError(_(
- 'Node template "{0}" was not found.'
- ).format(node_template_name)))
-
-def _find_req_or_cap_attribute(self, req_or_cap, attr_name):
- node_tpl = self._find_node_template(self.args[0])
- # Find attribute in node template's requirements
- for r in node_tpl.requirements:
- for req, node_name in r.items():
- if req == req_or_cap:
- node_template = self._find_node_template(node_name)
- return self._get_capability_attribute(
- node_template,
- req,
- attr_name)
- # If requirement was not found, look in node template's capabilities
- return self._get_capability_attribute(node_tpl,
- req_or_cap,
- attr_name)
-
-def _get_capability_attribute(self,
- node_template,
- capability_name,
- attr_name):
- """Gets a node template capability attribute."""
- caps = node_template.get_capabilities()
- if caps and capability_name in caps.keys():
- cap = caps[capability_name]
- attribute = None
- attrs = cap.definition.get_attributes_def()
- if attrs and attr_name in attrs.keys():
- attribute = attrs[attr_name]
- if not attribute:
- ExceptionCollector.appendException(
- KeyError(_('Attribute "%(attr)s" was not found in '
- 'capability "%(cap)s" of node template '
- '"%(ntpl1)s" referenced from node template '
- '"%(ntpl2)s".') % {'attr': attr_name,
- 'cap': capability_name,
- 'ntpl1': node_template.name,
- 'ntpl2': self.context.name}))
- return attribute
- msg = _('Requirement/Capability "{0}" referenced from node template '
- '"{1}" was not found in node template "{2}".').format(
- capability_name,
- self.context.name,
- node_template.name)
- ExceptionCollector.appendException(KeyError(msg))
-
-@property
-def node_template_name(self):
- return self.args[0]
-
-@property
-def attribute_name(self):
- return self.args[1]
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java
deleted file mode 100644
index 4332f70..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetInput.java
+++ /dev/null
@@ -1,110 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.functions;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.DataEntity;
-import org.openecomp.sdc.toscaparser.api.TopologyTemplate;
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.parameters.Input;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class GetInput extends Function {
-
- public GetInput(TopologyTemplate toscaTpl,Object context,String name,ArrayList<Object> _args) {
- super(toscaTpl,context,name,_args);
-
- }
-
- @Override
- void validate() {
- if(args.size() != 1) {
- //PA - changed to WARNING from CRITICAL after talking to Renana, 22/05/2017
- ThreadLocalsHolder.getCollector().appendWarning(String.format(
- "ValueError: Expected one argument for function \"get_input\" but received \"%s\"",
- args.toString()));
- }
- boolean bFound = false;
- for(Input inp: toscaTpl.getInputs()) {
- if(inp.getName().equals(args.get(0))) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownInputError: Unknown input \"%s\"",args.get(0)));
- }
- }
-
- public Object result() {
- if(toscaTpl.getParsedParams() != null &&
- toscaTpl.getParsedParams().get(getInputName()) != null) {
- LinkedHashMap<String,Object> ttinp = (LinkedHashMap<String,Object>)toscaTpl.getTpl().get("inputs");
- LinkedHashMap<String,Object> ttinpinp = (LinkedHashMap<String,Object>)ttinp.get(getInputName());
- String type = (String)ttinpinp.get("type");
-
- return DataEntity.validateDatatype(
- type, toscaTpl.getParsedParams().get(getInputName()),null,null,null);
- }
-
- Input inputDef = null;
- for(Input inpDef: toscaTpl.getInputs()) {
- if(getInputName().equals(inpDef.getName())) {
- inputDef = inpDef;
- break;
- }
- }
- if(inputDef != null) {
- return inputDef.getDefault();
- }
- return null;
- }
-
- public String getInputName() {
- return (String)args.get(0);
- }
-
-}
-
-/*python
-
-class GetInput(Function):
-"""Get a property value declared within the input of the service template.
-
-Arguments:
-
-* Input name.
-
-Example:
-
-* get_input: port
-"""
-
-def validate(self):
- if len(self.args) != 1:
- ExceptionCollector.appendException(
- ValueError(_(
- 'Expected one argument for function "get_input" but '
- 'received "%s".') % self.args))
- inputs = [input.name for input in self.tosca_tpl.inputs]
- if self.args[0] not in inputs:
- ExceptionCollector.appendException(
- UnknownInputError(input_name=self.args[0]))
-
-def result(self):
- if self.tosca_tpl.parsed_params and \
- self.input_name in self.tosca_tpl.parsed_params:
- return DataEntity.validate_datatype(
- self.tosca_tpl.tpl['inputs'][self.input_name]['type'],
- self.tosca_tpl.parsed_params[self.input_name])
-
- input = [input_def for input_def in self.tosca_tpl.inputs
- if self.input_name == input_def.name][0]
- return input.default
-
-@property
-def input_name(self):
- return self.args[0]
-
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java
deleted file mode 100644
index 22f2cd7..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetOperationOutput.java
+++ /dev/null
@@ -1,225 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.functions;
-
-import java.util.ArrayList;
-
-import org.openecomp.sdc.toscaparser.api.*;
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.elements.InterfacesDef;
-import org.openecomp.sdc.toscaparser.api.elements.RelationshipType;
-import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class GetOperationOutput extends Function {
-
- public GetOperationOutput(TopologyTemplate ttpl,Object context,String name,ArrayList<Object> args) {
- super(ttpl,context,name,args);
- }
-
- @Override
- public void validate() {
- if(args.size() == 4) {
- _findNodeTemplate((String)args.get(0));
- String interfaceName = _findInterfaceName((String)args.get(1));
- _findOperationName(interfaceName,(String)args.get(2));
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(
- "ValueError: Illegal arguments for function \"get_operation_output\". " +
- "Expected arguments: \"template_name\",\"interface_name\"," +
- "\"operation_name\",\"output_variable_name\"");
- }
- }
-
- private String _findInterfaceName(String _interfaceName) {
- boolean bFound = false;
- for(String sect: InterfacesDef.SECTIONS) {
- if(sect.equals(_interfaceName)) {
- bFound = true;
- break;
- }
- }
- if(bFound) {
- return _interfaceName;
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: invalid interface name \"%s\" in \"get_operation_output\"",
- _interfaceName));
- return null;
- }
- }
-
- private String _findOperationName(String interfaceName,String operationName) {
-
- if(interfaceName.equals("Configure") ||
- interfaceName.equals("tosca.interfaces.node.relationship.Configure")) {
- boolean bFound = false;
- for(String sect: StatefulEntityType.interfacesRelationshipConfigureOperations) {
- if(sect.equals(operationName)) {
- bFound = true;
- break;
- }
- }
- if(bFound) {
- return operationName;
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"",
- operationName));
- return null;
- }
- }
- if(interfaceName.equals("Standard") ||
- interfaceName.equals("tosca.interfaces.node.lifecycle.Standard")) {
- boolean bFound = false;
- for(String sect: StatefulEntityType.interfacesNodeLifecycleOperations) {
- if(sect.equals(operationName)) {
- bFound = true;
- break;
- }
- }
- if(bFound) {
- return operationName;
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: Invalid operation of Configure interface \"%s\" in \"get_operation_output\"",
- operationName));
- return null;
- }
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: Invalid interface name \"%s\" in \"get_operation_output\"",
- interfaceName));
- return null;
- }
- }
-
- private NodeTemplate _findNodeTemplate(String nodeTemplateName) {
- if(nodeTemplateName.equals(TARGET)) {
- if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) {
- ThreadLocalsHolder.getCollector().appendException(
- "KeyError: \"TARGET\" keyword can only be used in context " +
- " to \"Relationships\" target node");
- return null;
- }
- return ((RelationshipTemplate)context).getTarget();
- }
- if(nodeTemplateName.equals(SOURCE)) {
- if(!(((EntityTemplate)context).getTypeDefinition() instanceof RelationshipType)) {
- ThreadLocalsHolder.getCollector().appendException(
- "KeyError: \"SOURCE\" keyword can only be used in context " +
- " to \"Relationships\" source node");
- return null;
- }
- return ((RelationshipTemplate)context).getTarget();
- }
- String name;
- if(nodeTemplateName.equals(SELF) && !(context instanceof ArrayList)) {
- name = ((NodeTemplate)context).getName();
- }
- else {
- name = nodeTemplateName;
- }
- for(NodeTemplate nt: toscaTpl.getNodeTemplates()) {
- if(nodeTemplateName.equals(name)) {
- return nt;
- }
- }
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "KeyError: Node template \"%s\" was not found",nodeTemplateName));
- return null;
- }
-
- @Override
- public Object result() {
- return this;
- }
-
-}
-
-/*python
-
-class GetOperationOutput(Function):
-def validate(self):
- if len(self.args) == 4:
- self._find_node_template(self.args[0])
- interface_name = self._find_interface_name(self.args[1])
- self._find_operation_name(interface_name, self.args[2])
- else:
- ExceptionCollector.appendException(
- ValueError(_('Illegal arguments for function "{0}". Expected '
- 'arguments: "template_name","interface_name",'
- '"operation_name","output_variable_name"'
- ).format(GET_OPERATION_OUTPUT)))
- return
-
-def _find_interface_name(self, interface_name):
- if interface_name in toscaparser.elements.interfaces.SECTIONS:
- return interface_name
- else:
- ExceptionCollector.appendException(
- ValueError(_('Enter a valid interface name'
- ).format(GET_OPERATION_OUTPUT)))
- return
-
-def _find_operation_name(self, interface_name, operation_name):
- if(interface_name == 'Configure' or
- interface_name == 'tosca.interfaces.node.relationship.Configure'):
- if(operation_name in
- StatefulEntityType.
- interfaces_relationship_configure_operations):
- return operation_name
- else:
- ExceptionCollector.appendException(
- ValueError(_('Enter an operation of Configure interface'
- ).format(GET_OPERATION_OUTPUT)))
- return
- elif(interface_name == 'Standard' or
- interface_name == 'tosca.interfaces.node.lifecycle.Standard'):
- if(operation_name in
- StatefulEntityType.interfaces_node_lifecycle_operations):
- return operation_name
- else:
- ExceptionCollector.appendException(
- ValueError(_('Enter an operation of Standard interface'
- ).format(GET_OPERATION_OUTPUT)))
- return
- else:
- ExceptionCollector.appendException(
- ValueError(_('Enter a valid operation name'
- ).format(GET_OPERATION_OUTPUT)))
- return
-
-def _find_node_template(self, node_template_name):
- if node_template_name == TARGET:
- if not isinstance(self.context.type_definition, RelationshipType):
- ExceptionCollector.appendException(
- KeyError(_('"TARGET" keyword can only be used in context'
- ' to "Relationships" target node')))
- return
- return self.context.target
- if node_template_name == SOURCE:
- if not isinstance(self.context.type_definition, RelationshipType):
- ExceptionCollector.appendException(
- KeyError(_('"SOURCE" keyword can only be used in context'
- ' to "Relationships" source node')))
- return
- return self.context.source
- name = self.context.name \
- if node_template_name == SELF and \
- not isinstance(self.context, list) \
- else node_template_name
- for node_template in self.tosca_tpl.nodetemplates:
- if node_template.name == name:
- return node_template
- ExceptionCollector.appendException(
- KeyError(_(
- 'Node template "{0}" was not found.'
- ).format(node_template_name)))
-
-def result(self):
- return self
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java
deleted file mode 100644
index 3550542..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/GetProperty.java
+++ /dev/null
@@ -1,636 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.functions;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.*;
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.elements.CapabilityTypeDef;
-import org.openecomp.sdc.toscaparser.api.elements.EntityType;
-import org.openecomp.sdc.toscaparser.api.elements.NodeType;
-import org.openecomp.sdc.toscaparser.api.elements.PropertyDef;
-import org.openecomp.sdc.toscaparser.api.elements.RelationshipType;
-import org.openecomp.sdc.toscaparser.api.elements.StatefulEntityType;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class GetProperty extends Function {
- // Get a property value of an entity defined in the same service template
-
- // Arguments:
-
- // * Node template name | SELF | HOST | SOURCE | TARGET.
- // * Requirement or capability name (optional).
- // * Property name.
-
- // If requirement or capability name is specified, the behavior is as follows:
- // The req or cap name is first looked up in the specified node template's
- // requirements.
- // If found, it would search for a matching capability
- // of an other node template and get its property as specified in function
- // arguments.
- // Otherwise, the req or cap name would be looked up in the specified
- // node template's capabilities and if found, it would return the property of
- // the capability as specified in function arguments.
-
- // Examples:
-
- // * { get_property: [ mysql_server, port ] }
- // * { get_property: [ SELF, db_port ] }
- // * { get_property: [ SELF, database_endpoint, port ] }
- // * { get_property: [ SELF, database_endpoint, port, 1 ] }
-
-
- public GetProperty(TopologyTemplate ttpl,Object context,String name,ArrayList<Object> args) {
- super(ttpl,context,name,args);
- }
-
- @Override
- void validate() {
- if(args.size() < 2) {
- ThreadLocalsHolder.getCollector().appendException(
- "ValueError: Illegal arguments for function \"get_property\". Expected arguments: \"node-template-name\", \"req-or-cap\" (optional), \"property name.\"");
- return;
- }
- if(args.size() == 2) {
- Property foundProp = _findProperty((String)args.get(1));
- if(foundProp == null) {
- return;
- }
- Object prop = foundProp.getValue();
- if(prop instanceof Function) {
- Function.getFunction(toscaTpl,context, prop);
- }
- }
- else if(args.size() >= 3) {
- // do not use _find_property to avoid raise KeyError
- // if the prop is not found
- // First check if there is property with this name
- NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0));
- LinkedHashMap<String,Property> props;
- if(nodeTpl != null) {
- props = nodeTpl.getProperties();
- }
- else {
- props = new LinkedHashMap<>();
- }
- int index = 2;
- Object propertyValue;
- if(props.get(args.get(1)) != null) {
- propertyValue = ((Property)props.get(args.get(1))).getValue();
- }
- else {
- index = 3;
- // then check the req or caps
- propertyValue = _findReqOrCapProperty((String)args.get(1),(String)args.get(2));
- }
-
- if(args.size() > index) {
- for(Object elem: args.subList(index,args.size()-1)) {
- if(propertyValue instanceof ArrayList) {
- int intElem = (int)elem;
- propertyValue = _getIndexValue(propertyValue,intElem);
- }
- else {
- propertyValue = _getAttributeValue(propertyValue,(String)elem);
- }
- }
- }
- }
- }
-
- @SuppressWarnings("unchecked")
- private Object _findReqOrCapProperty(String reqOrCap,String propertyName) {
- NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0));
- if(nodeTpl == null) {
- return null;
- }
- // look for property in node template's requirements
- for(Object r: nodeTpl.getRequirements()) {
- if(r instanceof LinkedHashMap) {
- LinkedHashMap<String,Object> rlist = (LinkedHashMap<String,Object>)r;
- for(String req: rlist.keySet()) {
- String nodeName = (String)rlist.get(req);
- if(req.equals(reqOrCap)) {
- NodeTemplate nodeTemplate = _findNodeTemplate(nodeName);
- return _getCapabilityProperty(nodeTemplate,req,propertyName,true);
- }
- }
- }
- }
- // If requirement was not found, look in node template's capabilities
- return _getCapabilityProperty(nodeTpl,reqOrCap,propertyName,true);
- }
-
- private Object _getCapabilityProperty(NodeTemplate nodeTemplate,
- String capabilityName,
- String propertyName,
- boolean throwErrors) {
-
- // Gets a node template capability property
- Object property = null;
- LinkedHashMap<String,Capability> caps = nodeTemplate.getCapabilities();
- if(caps != null && caps.get(capabilityName) != null) {
- Capability cap = caps.get(capabilityName);
- LinkedHashMap<String,Property> props = cap.getProperties();
- if(props != null && props.get(propertyName) != null) {
- property = ((Property)props.get(propertyName)).getValue();
- }
- if(property == null && throwErrors) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"",
- propertyName,capabilityName,nodeTemplate.getName(),((NodeTemplate)context).getName()));
- }
- return property;
- }
- if(throwErrors) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "KeyError: Requirement/Capability \"%s\" referenced from node template \"%s\" was not found in node template \"%s\"",
- capabilityName,((NodeTemplate)context).getName(),nodeTemplate.getName()));
- }
-
- return null;
- }
-
- private Property _findProperty(String propertyName) {
- NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0));
- if(nodeTpl == null) {
- return null;
- }
- LinkedHashMap<String,Property> props = nodeTpl.getProperties();
- Property found = props.get(propertyName);
- if(found == null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "KeyError: Property \"%s\" was not found in node template \"%s\"",
- propertyName,nodeTpl.getName()));
- }
- return found;
- }
-
- private NodeTemplate _findNodeTemplate(String nodeTemplateName) {
- if(nodeTemplateName.equals(SELF)) {
- return (NodeTemplate)context;
- }
- // enable the HOST value in the function
- if(nodeTemplateName.equals(HOST)) {
- NodeTemplate node = _findHostContainingProperty(null);
- if(node == null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "KeyError: Property \"%s\" was not found in capability \"%s\" of node template \"%s\" referenced from node template \"%s\"",
- (String)args.get(2),(String)args.get(1),((NodeTemplate)context).getName()));
- return null;
- }
- return node;
- }
- if(nodeTemplateName.equals(TARGET)) {
- if(!(((RelationshipTemplate)context).getTypeDefinition() instanceof RelationshipType)) {
- ThreadLocalsHolder.getCollector().appendException(
- "KeyError: \"TARGET\" keyword can only be used in context to \"Relationships\" target node");
- return null;
- }
- return ((RelationshipTemplate)context).getTarget();
- }
- if(nodeTemplateName.equals(SOURCE)) {
- if(!(((RelationshipTemplate)context).getTypeDefinition() instanceof RelationshipType)) {
- ThreadLocalsHolder.getCollector().appendException(
- "KeyError: \"SOURCE\" keyword can only be used in context to \"Relationships\" target node");
- return null;
- }
- return ((RelationshipTemplate)context).getSource();
- }
- if(toscaTpl.getNodeTemplates() == null) {
- return null;
- }
- for(NodeTemplate nodeTemplate: toscaTpl.getNodeTemplates()) {
- if(nodeTemplate.getName().equals(nodeTemplateName)) {
- return nodeTemplate;
- }
- }
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "KeyError: Node template \"%s\" was not found. Referenced from Node Template \"%s\"",
- nodeTemplateName,((NodeTemplate)context).getName()));
-
- return null;
- }
-
- @SuppressWarnings("rawtypes")
- private Object _getIndexValue(Object value,int index) {
- if(value instanceof ArrayList) {
- if(index < ((ArrayList)value).size()) {
- return ((ArrayList)value).get(index);
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an element with index %d",
- args.get(2),args.get(1),((NodeTemplate)context).getName(),index));
-
- }
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a list",
- args.get(2),args.get(1),((NodeTemplate)context).getName()));
- }
- return null;
- }
-
- @SuppressWarnings("unchecked")
- private Object _getAttributeValue(Object value,String attribute) {
- if(value instanceof LinkedHashMap) {
- Object ov = ((LinkedHashMap<String,Object>)value).get(attribute);
- if(ov != null) {
- return ov;
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must have an attribute named \"%s\"",
- args.get(2),args.get(1),((NodeTemplate)context).getName(),attribute));
- }
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "KeyError: Property \"%s\" found in capability \"%s\" referenced from node template \"%s\" must be a dict",
- args.get(2),args.get(1),((NodeTemplate)context).getName()));
- }
- return null;
- }
-
- // Add this functions similar to get_attribute case
- private NodeTemplate _findHostContainingProperty(String nodeTemplateName) {
- if(nodeTemplateName == null) {
- nodeTemplateName = SELF;
- }
- NodeTemplate nodeTemplate = _findNodeTemplate(nodeTemplateName);
- LinkedHashMap<String,Object> hostedOnRel = (LinkedHashMap<String,Object>)
- EntityType.TOSCA_DEF.get(HOSTED_ON);
- for(Object r: nodeTemplate.getRequirements()) {
- if(r instanceof LinkedHashMap) {
- LinkedHashMap<String,Object> rlist = (LinkedHashMap<String,Object>)r;
- for(String requirement: rlist.keySet()) {
- String targetName = (String)rlist.get(requirement);
- NodeTemplate targetNode = _findNodeTemplate(targetName);
- NodeType targetType = (NodeType)targetNode.getTypeDefinition();
- for(CapabilityTypeDef capDef: targetType.getCapabilitiesObjects()) {
- if(capDef.inheritsFrom((ArrayList<String>)hostedOnRel.get("valid_target_types"))) {
- if(_propertyExistsInType(targetType)) {
- return targetNode;
- }
- // If requirement was not found, look in node
- // template's capabilities
- if(args.size() > 2 &&
- _getCapabilityProperty(targetNode,(String)args.get(1),(String)args.get(2),false) != null) {
- return targetNode;
- }
-
- return _findHostContainingProperty(targetName);
- }
- }
- }
- }
- }
- return null;
- }
-
- private boolean _propertyExistsInType(StatefulEntityType typeDefinition) {
- LinkedHashMap<String,PropertyDef> propsDef = typeDefinition.getPropertiesDef();
- return propsDef.keySet().contains((String)args.get(1));
- }
-
- @Override
- public Object result() {
- Object propertyValue;
- if(args.size() >= 3) {
- // First check if there is property with this name
- NodeTemplate nodeTpl = _findNodeTemplate((String)args.get(0));
- LinkedHashMap<String,Property> props;
- if(nodeTpl != null) {
- props = nodeTpl.getProperties();
- }
- else {
- props = new LinkedHashMap<>();
- }
- int index = 2;
- if(props.get(args.get(1)) != null) {
- propertyValue = ((Property)props.get(args.get(1))).getValue();
- }
- else {
- index = 3;
- // then check the req or caps
- propertyValue = _findReqOrCapProperty((String)args.get(1),(String)args.get(2));
- }
-
- if(args.size() > index) {
- for(Object elem: args.subList(index,args.size()-1)) {
- if(propertyValue instanceof ArrayList) {
- int intElem = (int)elem;
- propertyValue = _getIndexValue(propertyValue,intElem);
- }
- else {
- propertyValue = _getAttributeValue(propertyValue,(String)elem);
- }
- }
- }
- }
- else {
- propertyValue = _findProperty((String)args.get(1)).getValue();
- }
- if(propertyValue instanceof Function) {
- return ((Function)propertyValue).result();
- }
- return Function.getFunction(toscaTpl,context,propertyValue);
- }
-
- public String getNodeTemplateName() {
- return (String)args.get(0);
- }
-
- public String getPropertyName() {
- if(args.size() > 2) {
- return (String)args.get(2);
- }
- return (String)args.get(1);
- }
-
- public String getReqorCap() {
- if(args.size() > 2) {
- return (String)args.get(1);
- }
- return null;
- }
-
-}
-
-/*python
-
-class GetProperty(Function):
-"""Get a property value of an entity defined in the same service template.
-
-Arguments:
-
-* Node template name | SELF | HOST | SOURCE | TARGET.
-* Requirement or capability name (optional).
-* Property name.
-
-If requirement or capability name is specified, the behavior is as follows:
-The req or cap name is first looked up in the specified node template's
-requirements.
-If found, it would search for a matching capability
-of an other node template and get its property as specified in function
-arguments.
-Otherwise, the req or cap name would be looked up in the specified
-node template's capabilities and if found, it would return the property of
-the capability as specified in function arguments.
-
-Examples:
-
-* { get_property: [ mysql_server, port ] }
-* { get_property: [ SELF, db_port ] }
-* { get_property: [ SELF, database_endpoint, port ] }
-* { get_property: [ SELF, database_endpoint, port, 1 ] }
-"""
-
-def validate(self):
- if len(self.args) < 2:
- ExceptionCollector.appendException(
- ValueError(_(
- 'Expected arguments: "node-template-name", "req-or-cap" '
- '(optional), "property name".')))
- return
- if len(self.args) == 2:
- found_prop = self._find_property(self.args[1])
- if not found_prop:
- return
- prop = found_prop.value
- if not isinstance(prop, Function):
- get_function(self.tosca_tpl, self.context, prop)
- elif len(self.args) >= 3:
- # do not use _find_property to avoid raise KeyError
- # if the prop is not found
- # First check if there is property with this name
- node_tpl = self._find_node_template(self.args[0])
- props = node_tpl.get_properties() if node_tpl else []
- index = 2
- found = [props[self.args[1]]] if self.args[1] in props else []
- if found:
- property_value = found[0].value
- else:
- index = 3
- # then check the req or caps
- property_value = self._find_req_or_cap_property(self.args[1],
- self.args[2])
- if len(self.args) > index:
- for elem in self.args[index:]:
- if isinstance(property_value, list):
- int_elem = int(elem)
- property_value = self._get_index_value(property_value,
- int_elem)
- else:
- property_value = self._get_attribute_value(
- property_value,
- elem)
-
-def _find_req_or_cap_property(self, req_or_cap, property_name):
- node_tpl = self._find_node_template(self.args[0])
- # Find property in node template's requirements
- for r in node_tpl.requirements:
- for req, node_name in r.items():
- if req == req_or_cap:
- node_template = self._find_node_template(node_name)
- return self._get_capability_property(
- node_template,
- req,
- property_name)
- # If requirement was not found, look in node template's capabilities
- return self._get_capability_property(node_tpl,
- req_or_cap,
- property_name)
-
-def _get_capability_property(self,
- node_template,
- capability_name,
- property_name):
- """Gets a node template capability property."""
- caps = node_template.get_capabilities()
- if caps and capability_name in caps.keys():
- cap = caps[capability_name]
- property = None
- props = cap.get_properties()
- if props and property_name in props.keys():
- property = props[property_name].value
- if not property:
- ExceptionCollector.appendException(
- KeyError(_('Property "%(prop)s" was not found in '
- 'capability "%(cap)s" of node template '
- '"%(ntpl1)s" referenced from node template '
- '"%(ntpl2)s".') % {'prop': property_name,
- 'cap': capability_name,
- 'ntpl1': node_template.name,
- 'ntpl2': self.context.name}))
- return property
- msg = _('Requirement/Capability "{0}" referenced from node template '
- '"{1}" was not found in node template "{2}".').format(
- capability_name,
- self.context.name,
- node_template.name)
- ExceptionCollector.appendException(KeyError(msg))
-
-def _find_property(self, property_name):
- node_tpl = self._find_node_template(self.args[0])
- if not node_tpl:
- return
- props = node_tpl.get_properties()
- found = [props[property_name]] if property_name in props else []
- if len(found) == 0:
- ExceptionCollector.appendException(
- KeyError(_('Property "%(prop)s" was not found in node '
- 'template "%(ntpl)s".') %
- {'prop': property_name,
- 'ntpl': node_tpl.name}))
- return None
- return found[0]
-
-def _find_node_template(self, node_template_name):
- if node_template_name == SELF:
- return self.context
- # enable the HOST value in the function
- if node_template_name == HOST:
- return self._find_host_containing_property()
- if node_template_name == TARGET:
- if not isinstance(self.context.type_definition, RelationshipType):
- ExceptionCollector.appendException(
- KeyError(_('"TARGET" keyword can only be used in context'
- ' to "Relationships" target node')))
- return
- return self.context.target
- if node_template_name == SOURCE:
- if not isinstance(self.context.type_definition, RelationshipType):
- ExceptionCollector.appendException(
- KeyError(_('"SOURCE" keyword can only be used in context'
- ' to "Relationships" source node')))
- return
- return self.context.source
- if not hasattr(self.tosca_tpl, 'nodetemplates'):
- return
- for node_template in self.tosca_tpl.nodetemplates:
- if node_template.name == node_template_name:
- return node_template
- ExceptionCollector.appendException(
- KeyError(_(
- 'Node template "{0}" was not found.'
- ).format(node_template_name)))
-
-def _get_index_value(self, value, index):
- if isinstance(value, list):
- if index < len(value):
- return value[index]
- else:
- ExceptionCollector.appendException(
- KeyError(_(
- "Property '{0}' found in capability '{1}'"
- " referenced from node template {2}"
- " must have an element with index {3}.").
- format(self.args[2],
- self.args[1],
- self.context.name,
- index)))
- else:
- ExceptionCollector.appendException(
- KeyError(_(
- "Property '{0}' found in capability '{1}'"
- " referenced from node template {2}"
- " must be a list.").format(self.args[2],
- self.args[1],
- self.context.name)))
-
-def _get_attribute_value(self, value, attibute):
- if isinstance(value, dict):
- if attibute in value:
- return value[attibute]
- else:
- ExceptionCollector.appendException(
- KeyError(_(
- "Property '{0}' found in capability '{1}'"
- " referenced from node template {2}"
- " must have an attribute named {3}.").
- format(self.args[2],
- self.args[1],
- self.context.name,
- attibute)))
- else:
- ExceptionCollector.appendException(
- KeyError(_(
- "Property '{0}' found in capability '{1}'"
- " referenced from node template {2}"
- " must be a dict.").format(self.args[2],
- self.args[1],
- self.context.name)))
-
-# Add this functions similar to get_attribute case
-def _find_host_containing_property(self, node_template_name=SELF):
- node_template = self._find_node_template(node_template_name)
- hosted_on_rel = EntityType.TOSCA_DEF[HOSTED_ON]
- for r in node_template.requirements:
- for requirement, target_name in r.items():
- target_node = self._find_node_template(target_name)
- target_type = target_node.type_definition
- for capability in target_type.get_capabilities_objects():
- if capability.type in hosted_on_rel['valid_target_types']:
- if self._property_exists_in_type(target_type):
- return target_node
- return self._find_host_containing_property(
- target_name)
- return None
-
-def _property_exists_in_type(self, type_definition):
- props_def = type_definition.get_properties_def()
- found = [props_def[self.args[1]]] \
- if self.args[1] in props_def else []
- return len(found) == 1
-
-def result(self):
- if len(self.args) >= 3:
- # First check if there is property with this name
- node_tpl = self._find_node_template(self.args[0])
- props = node_tpl.get_properties() if node_tpl else []
- index = 2
- found = [props[self.args[1]]] if self.args[1] in props else []
- if found:
- property_value = found[0].value
- else:
- index = 3
- # then check the req or caps
- property_value = self._find_req_or_cap_property(self.args[1],
- self.args[2])
- if len(self.args) > index:
- for elem in self.args[index:]:
- if isinstance(property_value, list):
- int_elem = int(elem)
- property_value = self._get_index_value(property_value,
- int_elem)
- else:
- property_value = self._get_attribute_value(
- property_value,
- elem)
- else:
- property_value = self._find_property(self.args[1]).value
- if isinstance(property_value, Function):
- return property_value.result()
- return get_function(self.tosca_tpl,
- self.context,
- property_value)
-
-@property
-def node_template_name(self):
- return self.args[0]
-
-@property
-def property_name(self):
- if len(self.args) > 2:
- return self.args[2]
- return self.args[1]
-
-@property
-def req_or_cap(self):
- if len(self.args) > 2:
- return self.args[1]
- return None
-*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java
deleted file mode 100644
index 4438908..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/functions/Token.java
+++ /dev/null
@@ -1,112 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.functions;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.NodeTemplate;
-import org.openecomp.sdc.toscaparser.api.TopologyTemplate;
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class Token extends Function {
- // Validate the function and provide an instance of the function
-
- //The token function is used within a TOSCA service template on a string to
- //parse out (tokenize) substrings separated by one or more token characters
- //within a larger string.
-
- //Arguments:
-
- //* The composite string that contains one or more substrings separated by
- // token characters.
- //* The string that contains one or more token characters that separate
- // substrings within the composite string.
- //* The integer indicates the index of the substring to return from the
- // composite string. Note that the first substring is denoted by using
- // the '0' (zero) integer value.
-
- //Example:
-
- // [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ]
-
-
- public Token(TopologyTemplate ttpl,Object context,String name,ArrayList<Object> args) {
- super(ttpl,context,name,args);
- }
-
- @Override
- public Object result() {
- return this;
- }
-
- @Override
- void validate() {
- if(args.size() < 3) {
- ThreadLocalsHolder.getCollector().appendException(
- "ValueError: Invalid arguments for function \"token\". " +
- "Expected at least three arguments");
- }
- else {
- if(!(args.get(1) instanceof String) ||
- ((String)args.get(1)).length() != 1) {
- ThreadLocalsHolder.getCollector().appendException(
- "ValueError: Invalid arguments for function \"token\". " +
- "Expected single char value as second argument");
- }
- if(!(args.get(2) instanceof Integer)) {
- ThreadLocalsHolder.getCollector().appendException(
- "ValueError: Invalid arguments for function \"token\"" +
- "Expected integer value as third argument");
- }
- }
- }
-
-}
-
-/*python
-
-class Token(Function):
-"""Validate the function and provide an instance of the function
-
-The token function is used within a TOSCA service template on a string to
-parse out (tokenize) substrings separated by one or more token characters
-within a larger string.
-
-
-Arguments:
-
-* The composite string that contains one or more substrings separated by
- token characters.
-* The string that contains one or more token characters that separate
- substrings within the composite string.
-* The integer indicates the index of the substring to return from the
- composite string. Note that the first substring is denoted by using
- the '0' (zero) integer value.
-
-Example:
-
- [ get_attribute: [ my_server, data_endpoint, ip_address ], ':', 1 ]
-
-"""
-
-def validate(self):
- if len(self.args) < 3:
- ExceptionCollector.appendException(
- ValueError(_('Invalid arguments for function "{0}". Expected '
- 'at least three arguments.').format(TOKEN)))
- else:
- if not isinstance(self.args[1], str) or len(self.args[1]) != 1:
- ExceptionCollector.appendException(
- ValueError(_('Invalid arguments for function "{0}". '
- 'Expected single char value as second '
- 'argument.').format(TOKEN)))
-
- if not isinstance(self.args[2], int):
- ExceptionCollector.appendException(
- ValueError(_('Invalid arguments for function "{0}". '
- 'Expected integer value as third '
- 'argument.').format(TOKEN)))
-
-def result(self):
- return self
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java
deleted file mode 100644
index 7b3e64f..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Input.java
+++ /dev/null
@@ -1,226 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.parameters;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.DataEntity;
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.elements.EntityType;
-import org.openecomp.sdc.toscaparser.api.elements.constraints.Constraint;
-import org.openecomp.sdc.toscaparser.api.elements.constraints.Schema;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class Input {
-
- private static final String TYPE = "type";
- private static final String DESCRIPTION = "description";
- private static final String DEFAULT = "default";
- private static final String CONSTRAINTS = "constraints";
- private static final String REQUIRED = "required";
- private static final String STATUS = "status";
- private static final String ENTRY_SCHEMA = "entry_schema";
-
- public static final String INTEGER = "integer";
- public static final String STRING = "string";
- public static final String BOOLEAN = "boolean";
- public static final String FLOAT = "float";
- public static final String LIST = "list";
- public static final String MAP = "map";
- public static final String JSON = "json";
-
- private static String INPUTFIELD[] = {
- TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED,STATUS, ENTRY_SCHEMA
- };
-
- private static String PRIMITIVE_TYPES[] = {
- INTEGER, STRING, BOOLEAN, FLOAT, LIST, MAP, JSON
- };
-
- private String name;
- private Schema schema;
- private LinkedHashMap<String,Object> customDefs;
-
- public Input(String _name,LinkedHashMap<String,Object> _schemaDict,LinkedHashMap<String,Object> _customDefs) {
- name = _name;
- schema = new Schema(_name,_schemaDict);
- customDefs = _customDefs;
- }
-
- public String getName() {
- return name;
- }
-
- public String getType() {
- return schema.getType();
- }
-
- public String getDescription() {
- return schema.getDescription();
- }
-
- public boolean isRequired() {
- return schema.isRequired();
- }
-
- public Object getDefault() {
- return schema.getDefault();
- }
-
- public ArrayList<Constraint> getConstraints() {
- return schema.getConstraints();
- }
-
- public void validate(Object value) {
- _validateField();
- _validateType(getType());
- if(value != null) {
- _validateValue(value);
- }
- }
-
- private void _validateField() {
- for(String key: schema.getSchema().keySet()) {
- boolean bFound = false;
- for(String ifld: INPUTFIELD) {
- if(key.equals(ifld)) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Input \"%s\" contains unknown field \"%s\"",
- name,key));
- }
- }
- }
-
- private void _validateType(String inputType) {
- boolean bFound = false;
- for(String pt: Schema.PROPERTY_TYPES) {
- if(pt.equals(inputType)) {
- bFound = true;
- break;
- }
- }
-
- if(!bFound) {
- if(customDefs.get(inputType) != null) {
- bFound = true;
- }
- }
-
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: Invalid type \"%s\"",inputType));
- }
- }
-
- private void _validateValue(Object value) {
- Object datatype = null;
- if(EntityType.TOSCA_DEF.get(getType()) != null) {
- datatype = EntityType.TOSCA_DEF.get(getType());
- }
- else if(EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType()) != null) {
- datatype = EntityType.TOSCA_DEF.get(EntityType.DATATYPE_NETWORK_PREFIX + getType());
- }
-
- String type = getType();
- // if it's one of the basic types DON'T look in customDefs
- if(Arrays.asList(PRIMITIVE_TYPES).contains(type)) {
- DataEntity.validateDatatype(getType(), value, null, (LinkedHashMap<String,Object>)datatype, null);
- return;
- }
- else if(customDefs.get(getType()) != null) {
- datatype = customDefs.get(getType());
- DataEntity.validateDatatype(getType(), value, (LinkedHashMap<String,Object>)datatype, customDefs, null);
- return;
- }
-
- DataEntity.validateDatatype(getType(), value, null, (LinkedHashMap<String,Object>)datatype, null);
- }
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import MissingRequiredFieldError
-from toscaparser.common.exception import UnknownFieldError
-from toscaparser.dataentity import DataEntity
-from toscaparser.elements.constraints import Schema
-from toscaparser.elements.entity_type import EntityType
-from toscaparser.utils.gettextutils import _
-
-
-log = logging.getLogger('tosca')
-
-
-class Input(object):
-
- INPUTFIELD = (TYPE, DESCRIPTION, DEFAULT, CONSTRAINTS, REQUIRED, STATUS,
- ENTRY_SCHEMA) = ('type', 'description', 'default',
- 'constraints', 'required', 'status',
- 'entry_schema')
-
- def __init__(self, name, schema_dict):
- self.name = name
- self.schema = Schema(name, schema_dict)
-
- self._validate_field()
- self.validate_type(self.type)
-
- @property
- def type(self):
- return self.schema.type
-
- @property
- def required(self):
- return self.schema.required
-
- @property
- def description(self):
- return self.schema.description
-
- @property
- def default(self):
- return self.schema.default
-
- @property
- def constraints(self):
- return self.schema.constraints
-
- @property
- def status(self):
- return self.schema.status
-
- def validate(self, value=None):
- if value is not None:
- self._validate_value(value)
-
- def _validate_field(self):
- for name in self.schema.schema:
- if name not in self.INPUTFIELD:
- ExceptionCollector.appendException(
- UnknownFieldError(what='Input "%s"' % self.name,
- field=name))
-
- def validate_type(self, input_type):
- if input_type not in Schema.PROPERTY_TYPES:
- ExceptionCollector.appendException(
- ValueError(_('Invalid type "%s".') % type))
-
- # tODO(anyone) Need to test for any built-in datatype not just network
- # that is, tosca.datatypes.* and not assume tosca.datatypes.network.*
- # tODO(anyone) Add support for tosca.datatypes.Credential
- def _validate_value(self, value):
- tosca = EntityType.TOSCA_DEF
- datatype = None
- if self.type in tosca:
- datatype = tosca[self.type]
- elif EntityType.DATATYPE_NETWORK_PREFIX + self.type in tosca:
- datatype = tosca[EntityType.DATATYPE_NETWORK_PREFIX + self.type]
-
- DataEntity.validate_datatype(self.type, value, None, datatype)
-
-*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java
deleted file mode 100644
index 34ecf12..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/parameters/Output.java
+++ /dev/null
@@ -1,109 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.parameters;
-
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-
-public class Output {
-
- private static final String DESCRIPTION = "description";
- public static final String VALUE = "value";
- private static final String OUTPUTFIELD[] = {DESCRIPTION, VALUE};
-
- private String name;
- private LinkedHashMap<String,Object> attrs;//TYPE???
-
- public Output(String oname,LinkedHashMap<String,Object> oattrs) {
- name = oname;
- attrs = oattrs;
- }
-
- public String getDescription() {
- return (String)attrs.get(DESCRIPTION);
- }
-
- public Object getValue() {
- return attrs.get(VALUE);
- }
-
- public void validate() {
- _validateField();
- }
-
- private void _validateField() {
- if(!(attrs instanceof LinkedHashMap)) {
- //TODO wrong error message...
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValidationError: Output \"%s\" has wrong type. Expecting a dict",
- name));
- }
-
- if(getValue() == null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "MissingRequiredFieldError: Output \"%s\" is missing required \"%s\"",
- name,VALUE));
- }
- for(String key: attrs.keySet()) {
- boolean bFound = false;
- for(String of: OUTPUTFIELD) {
- if(key.equals(of)) {
- bFound = true;
- break;
- }
- }
- if(!bFound) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "UnknownFieldError: Output \"%s\" contains unknown field \"%s\"",
- name,key));
- }
- }
- }
-
- // getter/setter
-
- public String getName() {
- return name;
- }
-
- public void setAttr(String name,Object value) {
- attrs.put(name, value);
- }
-}
-
-/*python
-
-class Output(object):
-
- OUTPUTFIELD = (DESCRIPTION, VALUE) = ('description', 'value')
-
- def __init__(self, name, attrs):
- self.name = name
- self.attrs = attrs
-
- @property
- def description(self):
- return self.attrs.get(self.DESCRIPTION)
-
- @property
- def value(self):
- return self.attrs.get(self.VALUE)
-
- def validate(self):
- self._validate_field()
-
- def _validate_field(self):
- if not isinstance(self.attrs, dict):
- ExceptionCollector.appendException(
- MissingRequiredFieldError(what='Output "%s"' % self.name,
- required=self.VALUE))
- if self.value is None:
- ExceptionCollector.appendException(
- MissingRequiredFieldError(what='Output "%s"' % self.name,
- required=self.VALUE))
- for name in self.attrs:
- if name not in self.OUTPUTFIELD:
- ExceptionCollector.appendException(
- UnknownFieldError(what='Output "%s"' % self.name,
- field=name))
-*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java
deleted file mode 100644
index 85b54ee..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/prereq/CSAR.java
+++ /dev/null
@@ -1,782 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.prereq;
-
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.RandomAccessFile;
-import java.net.URL;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.nio.file.StandardCopyOption;
-import java.util.*;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipFile;
-import java.util.zip.ZipInputStream;
-
-import org.openecomp.sdc.toscaparser.api.ImportsLoader;
-import org.openecomp.sdc.toscaparser.api.common.JToscaException;
-import org.openecomp.sdc.toscaparser.api.utils.JToscaErrorCodes;
-import org.openecomp.sdc.toscaparser.api.utils.ThreadLocalsHolder;
-import org.openecomp.sdc.toscaparser.api.utils.UrlUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
-
-public class CSAR {
-
- private static Logger log = LoggerFactory.getLogger(CSAR.class.getName());
- private static final ArrayList<String> META_PROPERTIES_FILES = new ArrayList<>(Arrays.asList("TOSCA-Metadata/TOSCA.meta", "csar.meta"));
-
- private String path;
- private boolean isFile;
- private boolean isValidated;
- private boolean errorCaught;
- private String csar;
- private String tempDir;
-// private Metadata metaData;
- private File tempFile;
- private LinkedHashMap<String, LinkedHashMap<String, Object>> metaProperties;
-
- public CSAR(String csarPath, boolean aFile) {
- path = csarPath;
- isFile = aFile;
- isValidated = false;
- errorCaught = false;
- csar = null;
- tempDir = null;
- tempFile = null;
- metaProperties = new LinkedHashMap<>();
- }
-
- @SuppressWarnings("unchecked")
- public boolean validate() throws JToscaException {
- isValidated = true;
-
- //validate that the file or URL exists
-
- if(isFile) {
- File f = new File(path);
- if (!f.isFile()) {
- ThreadLocalsHolder.getCollector().appendException(String.format("\"%s\" is not a file", path));
- return false;
- }
- else {
- this.csar = path;
- }
- }
- else {
- if(!UrlUtils.validateUrl(path)) {
- ThreadLocalsHolder.getCollector().appendException(String.format("ImportError: \"%s\" does not exist",path));
- return false;
- }
- // get it to a local file
- try {
- File tempFile = File.createTempFile("csartmp",".csar");
- Path ptf = Paths.get(tempFile.getPath());
- URL webfile = new URL(path);
- InputStream in = webfile.openStream();
- Files.copy(in,ptf,StandardCopyOption.REPLACE_EXISTING);
- }
- catch(Exception e) {
- ThreadLocalsHolder.getCollector().appendException("ImportError: failed to load CSAR from " + path);
- return false;
- }
-
- log.debug("CSAR - validate - currently only files are supported");
- return false;
- }
-
- _parseAndValidateMetaProperties();
-
- if(errorCaught) {
- return false;
- }
-
- // validate that external references in the main template actually exist and are accessible
- _validateExternalReferences();
-
- return !errorCaught;
-
- }
-
- private void _parseAndValidateMetaProperties() throws JToscaException {
-
- ZipFile zf = null;
-
- try {
-
- // validate that it is a valid zip file
- RandomAccessFile raf = new RandomAccessFile(csar, "r");
- long n = raf.readInt();
- raf.close();
- // check if Zip's magic number
- if (n != 0x504B0304) {
- String errorString = String.format("\"%s\" is not a valid zip file", csar);
- log.error(errorString);
- throw new JToscaException(errorString , JToscaErrorCodes.INVALID_CSAR_FORMAT.getValue());
- }
-
- // validate that it contains the metadata file in the correct location
- zf = new ZipFile(csar);
- ZipEntry ze = zf.getEntry("TOSCA-Metadata/TOSCA.meta");
- if (ze == null) {
-
- String errorString = String.format(
- "\"%s\" is not a valid CSAR as it does not contain the " +
- "required file \"TOSCA.meta\" in the folder \"TOSCA-Metadata\"", csar);
- log.error(errorString);
- throw new JToscaException(errorString, JToscaErrorCodes.MISSING_META_FILE.getValue());
- }
-
- //Going over expected metadata files and parsing them
- for (String metaFile: META_PROPERTIES_FILES) {
-
- byte ba[] = new byte[4096];
- ze = zf.getEntry(metaFile);
- if (ze != null) {
- InputStream inputStream = zf.getInputStream(ze);
- n = inputStream.read(ba, 0, 4096);
- String md = new String(ba);
- md = md.substring(0, (int) n);
-
- String errorString = String.format(
- "The file \"%s\" in the" +
- " CSAR \"%s\" does not contain valid YAML content", ze.getName(), csar);
-
- try {
- Yaml yaml = new Yaml();
- Object mdo = yaml.load(md);
- if (!(mdo instanceof LinkedHashMap)) {
- log.error(errorString);
- throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue());
- }
-
- String[] split = ze.getName().split("/");
- String fileName = split[split.length - 1];
-
- if (!metaProperties.containsKey(fileName)) {
- metaProperties.put(fileName, (LinkedHashMap<String, Object>) mdo);
- }
- }
- catch(Exception e) {
- log.error(errorString);
- throw new JToscaException(errorString, JToscaErrorCodes.INVALID_META_YAML_CONTENT.getValue());
- }
- }
- }
-
- // verify it has "Entry-Definition"
- String edf = _getMetadata("Entry-Definitions");
- if (edf == null) {
- String errorString = String.format(
- "The CSAR \"%s\" is missing the required metadata " +
- "\"Entry-Definitions\" in \"TOSCA-Metadata/TOSCA.meta\"", csar);
- log.error(errorString);
- throw new JToscaException(errorString, JToscaErrorCodes.ENTRY_DEFINITION_NOT_DEFINED.getValue());
- }
-
- //validate that "Entry-Definitions' metadata value points to an existing file in the CSAR
- boolean foundEDF = false;
- Enumeration<? extends ZipEntry> entries = zf.entries();
- while (entries.hasMoreElements()) {
- ze = entries.nextElement();
- if (ze.getName().equals(edf)) {
- foundEDF = true;
- break;
- }
- }
- if (!foundEDF) {
- String errorString = String.format(
- "The \"Entry-Definitions\" file defined in the CSAR \"%s\" does not exist", csar);
- log.error(errorString);
- throw new JToscaException(errorString, JToscaErrorCodes.MISSING_ENTRY_DEFINITION_FILE.getValue());
- }
- } catch (JToscaException e) {
- //ThreadLocalsHolder.getCollector().appendCriticalException(e.getMessage());
- throw e;
- } catch (Exception e) {
- ThreadLocalsHolder.getCollector().appendException("ValidationError: " + e.getMessage());
- errorCaught = true;
- }
-
- try {
- if (zf != null) {
- zf.close();
- }
- } catch (IOException e) {
- }
- }
-
- public void cleanup() {
- try {
- if(tempFile != null) {
- tempFile.delete();
- }
- }
- catch(Exception e) {
- }
- }
-
- private String _getMetadata(String key) throws JToscaException {
- if(!isValidated) {
- validate();
- }
- Object value = _getMetaProperty("TOSCA.meta").get(key);
- return value != null ? value.toString() : null;
- }
-
- public String getAuthor() throws JToscaException {
- return _getMetadata("Created-By");
- }
-
- public String getVersion() throws JToscaException {
- return _getMetadata("CSAR-Version");
- }
-
- public LinkedHashMap<String, LinkedHashMap<String, Object>> getMetaProperties() {
- return metaProperties;
- }
-
- private LinkedHashMap<String, Object> _getMetaProperty(String propertiesFile) {
- return metaProperties.get(propertiesFile);
- }
-
- public String getMainTemplate() throws JToscaException {
- String entryDef = _getMetadata("Entry-Definitions");
- ZipFile zf;
- boolean ok = false;
- try {
- zf = new ZipFile(path);
- ok = (zf.getEntry(entryDef) != null);
- zf.close();
- }
- catch(IOException e) {
- if(!ok) {
- log.error("CSAR - getMainTemplate - failed to open {}", path);
- }
- }
- if(ok) {
- return entryDef;
- }
- else {
- return null;
- }
- }
-
- @SuppressWarnings("unchecked")
- public LinkedHashMap<String,Object> getMainTemplateYaml() throws JToscaException {
- String mainTemplate = tempDir + File.separator + getMainTemplate();
- if(mainTemplate != null) {
- try {
- InputStream input = new FileInputStream(new File(mainTemplate));
- Yaml yaml = new Yaml();
- Object data = yaml.load(input);
- if(!(data instanceof LinkedHashMap)) {
- throw new IOException();
- }
- return (LinkedHashMap<String,Object>)data;
- }
- catch(Exception e) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "The file \"%s\" in the CSAR \"%s\" does not " +
- "contain valid TOSCA YAML content",
- mainTemplate,csar));
- }
- }
- return null;
- }
-
- public String getDescription() throws JToscaException {
- String desc = _getMetadata("Description");
- if(desc != null) {
- return desc;
- }
-
- Map<String, Object> metaData = metaProperties.get("TOSCA.meta");
- metaData.put("Description", getMainTemplateYaml().get("description"));
- return _getMetadata("Description");
- }
-
- public String getTempDir() {
- return tempDir;
- }
-
- public void decompress() throws IOException, JToscaException {
- if(!isValidated) {
- validate();
- }
- tempDir = Files.createTempDirectory("JTP").toString();
- unzip(path,tempDir);
-
- }
-
- private void _validateExternalReferences() throws JToscaException {
- // Extracts files referenced in the main template
- // These references are currently supported:
- // * imports
- // * interface implementations
- // * artifacts
- try {
- decompress();
- String mainTplFile = getMainTemplate();
- if(mainTplFile == null) {
- return;
- }
-
- LinkedHashMap<String,Object> mainTpl = getMainTemplateYaml();
- if(mainTpl.get("imports") != null) {
- // this loads the imports
- ImportsLoader il = new ImportsLoader((ArrayList<Object>)mainTpl.get("imports"),
- tempDir + File.separator + mainTplFile,
- (Object)null,
- (LinkedHashMap<String,Object>)null);
- }
-
- if(mainTpl.get("topology_template") != null) {
- LinkedHashMap<String,Object> topologyTemplate =
- (LinkedHashMap<String,Object>)mainTpl.get("topology_template");
-
- if(topologyTemplate.get("node_templates") != null) {
- LinkedHashMap<String,Object> nodeTemplates =
- (LinkedHashMap<String,Object>)topologyTemplate.get("node_templates");
- for(String nodeTemplateKey: nodeTemplates.keySet()) {
- LinkedHashMap<String,Object> nodeTemplate =
- (LinkedHashMap<String,Object>)nodeTemplates.get(nodeTemplateKey);
- if(nodeTemplate.get("artifacts") != null) {
- LinkedHashMap<String,Object> artifacts =
- (LinkedHashMap<String,Object>)nodeTemplate.get("artifacts");
- for(String artifactKey: artifacts.keySet()) {
- Object artifact = artifacts.get(artifactKey);
- if(artifact instanceof String) {
- _validateExternalReference(mainTplFile,(String)artifact,true);
- }
- else if(artifact instanceof LinkedHashMap) {
- String file = (String)((LinkedHashMap<String,Object>)artifact).get("file");
- if(file != null) {
- _validateExternalReference(mainTplFile,file,true);
- }
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: Unexpected artifact definition for \"%s\"",
- artifactKey));
- errorCaught = true;
- }
- }
- }
- if(nodeTemplate.get("interfaces") != null) {
- LinkedHashMap<String,Object> interfaces =
- (LinkedHashMap<String,Object>)nodeTemplate.get("interfaces");
- for(String interfaceKey: interfaces.keySet()) {
- LinkedHashMap<String,Object> _interface =
- (LinkedHashMap<String,Object>)interfaces.get(interfaceKey);
- for(String operationKey: _interface.keySet()) {
- Object operation = _interface.get(operationKey);
- if(operation instanceof String) {
- _validateExternalReference(mainTplFile,(String)operation,false);
- }
- else if(operation instanceof LinkedHashMap) {
- String imp = (String)((LinkedHashMap<String,Object>)operation).get("implementation");
- if(imp != null) {
- _validateExternalReference(mainTplFile,imp,true);
- }
- }
- }
- }
- }
- }
- }
- }
- }
- catch(IOException e) {
- errorCaught = true;
- }
- finally {
- // delete tempDir (only here?!?)
- File fdir = new File(tempDir);
- deleteDir(fdir);
- tempDir = null;
- }
- }
-
- public static void deleteDir(File fdir) {
- try {
- if (fdir.isDirectory()) {
- for (File c : fdir.listFiles())
- deleteDir(c);
- }
- fdir.delete();
- }
- catch(Exception e) {
- }
- }
-
- private void _validateExternalReference(String tplFile,String resourceFile,boolean raiseExc) {
- // Verify that the external resource exists
-
- // If resource_file is a URL verify that the URL is valid.
- // If resource_file is a relative path verify that the path is valid
- // considering base folder (self.temp_dir) and tpl_file.
- // Note that in a CSAR resource_file cannot be an absolute path.
- if(UrlUtils.validateUrl(resourceFile)) {
- String msg = String.format("URLException: The resource at \"%s\" cannot be accessed",resourceFile);
- try {
- if(UrlUtils.isUrlAccessible(resourceFile)) {
- return;
- }
- else {
- ThreadLocalsHolder.getCollector().appendException(msg);
- errorCaught = true;
- }
- }
- catch (Exception e) {
- ThreadLocalsHolder.getCollector().appendException(msg);
- }
- }
-
- String dirPath = Paths.get(tplFile).getParent().toString();
- String filePath = tempDir + File.separator + dirPath + File.separator + resourceFile;
- File f = new File(filePath);
- if(f.isFile()) {
- return;
- }
-
- if(raiseExc) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: The resource \"%s\" does not exist",resourceFile));
- }
- errorCaught = true;
- }
-
- private void unzip(String zipFilePath, String destDirectory) throws IOException {
- File destDir = new File(destDirectory);
- if (!destDir.exists()) {
- destDir.mkdir();
- }
- ZipInputStream zipIn = new ZipInputStream(new FileInputStream(zipFilePath));
- ZipEntry entry = zipIn.getNextEntry();
- // iterates over entries in the zip file
- while (entry != null) {
- // create all directories needed for nested items
- String[] parts = entry.getName().split("/");
- String s = destDirectory + File.separator ;
- for(int i=0; i< parts.length-1; i++) {
- s += parts[i];
- File idir = new File(s);
- if(!idir.exists()) {
- idir.mkdir();
- }
- s += File.separator;
- }
- String filePath = destDirectory + File.separator + entry.getName();
- if (!entry.isDirectory()) {
- // if the entry is a file, extracts it
- extractFile(zipIn, filePath);
- } else {
- // if the entry is a directory, make the directory
- File dir = new File(filePath);
- dir.mkdir();
- }
- zipIn.closeEntry();
- entry = zipIn.getNextEntry();
- }
- zipIn.close();
- }
-
- /**
- * Extracts a zip entry (file entry)
- * @param zipIn
- * @param filePath
- * @throws IOException
- */
- private static final int BUFFER_SIZE = 4096;
-
- private void extractFile(ZipInputStream zipIn, String filePath) throws IOException {
- //BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(filePath));
- FileOutputStream fos = new FileOutputStream(filePath);
- BufferedOutputStream bos = new BufferedOutputStream(fos);
- byte[] bytesIn = new byte[BUFFER_SIZE];
- int read = 0;
- while ((read = zipIn.read(bytesIn)) != -1) {
- bos.write(bytesIn, 0, read);
- }
- bos.close();
- }
-
-}
-
-/*python
-
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import URLException
-from toscaparser.common.exception import ValidationError
-from toscaparser.imports import ImportsLoader
-from toscaparser.utils.gettextutils import _
-from toscaparser.utils.urlutils import UrlUtils
-
-try: # Python 2.x
- from BytesIO import BytesIO
-except ImportError: # Python 3.x
- from io import BytesIO
-
-
-class CSAR(object):
-
- def __init__(self, csar_file, a_file=True):
- self.path = csar_file
- self.a_file = a_file
- self.is_validated = False
- self.error_caught = False
- self.csar = None
- self.temp_dir = None
-
- def validate(self):
- """Validate the provided CSAR file."""
-
- self.is_validated = True
-
- # validate that the file or URL exists
- missing_err_msg = (_('"%s" does not exist.') % self.path)
- if self.a_file:
- if not os.path.isfile(self.path):
- ExceptionCollector.appendException(
- ValidationError(message=missing_err_msg))
- return False
- else:
- self.csar = self.path
- else: # a URL
- if not UrlUtils.validate_url(self.path):
- ExceptionCollector.appendException(
- ValidationError(message=missing_err_msg))
- return False
- else:
- response = requests.get(self.path)
- self.csar = BytesIO(response.content)
-
- # validate that it is a valid zip file
- if not zipfile.is_zipfile(self.csar):
- err_msg = (_('"%s" is not a valid zip file.') % self.path)
- ExceptionCollector.appendException(
- ValidationError(message=err_msg))
- return False
-
- # validate that it contains the metadata file in the correct location
- self.zfile = zipfile.ZipFile(self.csar, 'r')
- filelist = self.zfile.namelist()
- if 'TOSCA-Metadata/TOSCA.meta' not in filelist:
- err_msg = (_('"%s" is not a valid CSAR as it does not contain the '
- 'required file "TOSCA.meta" in the folder '
- '"TOSCA-Metadata".') % self.path)
- ExceptionCollector.appendException(
- ValidationError(message=err_msg))
- return False
-
- # validate that 'Entry-Definitions' property exists in TOSCA.meta
- data = self.zfile.read('TOSCA-Metadata/TOSCA.meta')
- invalid_yaml_err_msg = (_('The file "TOSCA-Metadata/TOSCA.meta" in '
- 'the CSAR "%s" does not contain valid YAML '
- 'content.') % self.path)
- try:
- meta = yaml.load(data)
- if type(meta) is dict:
- self.metadata = meta
- else:
- ExceptionCollector.appendException(
- ValidationError(message=invalid_yaml_err_msg))
- return False
- except yaml.YAMLError:
- ExceptionCollector.appendException(
- ValidationError(message=invalid_yaml_err_msg))
- return False
-
- if 'Entry-Definitions' not in self.metadata:
- err_msg = (_('The CSAR "%s" is missing the required metadata '
- '"Entry-Definitions" in '
- '"TOSCA-Metadata/TOSCA.meta".')
- % self.path)
- ExceptionCollector.appendException(
- ValidationError(message=err_msg))
- return False
-
- # validate that 'Entry-Definitions' metadata value points to an
- # existing file in the CSAR
- entry = self.metadata.get('Entry-Definitions')
- if entry and entry not in filelist:
- err_msg = (_('The "Entry-Definitions" file defined in the '
- 'CSAR "%s" does not exist.') % self.path)
- ExceptionCollector.appendException(
- ValidationError(message=err_msg))
- return False
-
- # validate that external references in the main template actually
- # exist and are accessible
- self._validate_external_references()
- return not self.error_caught
-
- def get_metadata(self):
- """Return the metadata dictionary."""
-
- # validate the csar if not already validated
- if not self.is_validated:
- self.validate()
-
- # return a copy to avoid changes overwrite the original
- return dict(self.metadata) if self.metadata else None
-
- def _get_metadata(self, key):
- if not self.is_validated:
- self.validate()
- return self.metadata.get(key)
-
- def get_author(self):
- return self._get_metadata('Created-By')
-
- def get_version(self):
- return self._get_metadata('CSAR-Version')
-
- def get_main_template(self):
- entry_def = self._get_metadata('Entry-Definitions')
- if entry_def in self.zfile.namelist():
- return entry_def
-
- def get_main_template_yaml(self):
- main_template = self.get_main_template()
- if main_template:
- data = self.zfile.read(main_template)
- invalid_tosca_yaml_err_msg = (
- _('The file "%(template)s" in the CSAR "%(csar)s" does not '
- 'contain valid TOSCA YAML content.') %
- {'template': main_template, 'csar': self.path})
- try:
- tosca_yaml = yaml.load(data)
- if type(tosca_yaml) is not dict:
- ExceptionCollector.appendException(
- ValidationError(message=invalid_tosca_yaml_err_msg))
- return tosca_yaml
- except Exception:
- ExceptionCollector.appendException(
- ValidationError(message=invalid_tosca_yaml_err_msg))
-
- def get_description(self):
- desc = self._get_metadata('Description')
- if desc is not None:
- return desc
-
- self.metadata['Description'] = \
- self.get_main_template_yaml().get('description')
- return self.metadata['Description']
-
- def decompress(self):
- if not self.is_validated:
- self.validate()
- self.temp_dir = tempfile.NamedTemporaryFile().name
- with zipfile.ZipFile(self.csar, "r") as zf:
- zf.extractall(self.temp_dir)
-
- def _validate_external_references(self):
- """Extracts files referenced in the main template
-
- These references are currently supported:
- * imports
- * interface implementations
- * artifacts
- """
- try:
- self.decompress()
- main_tpl_file = self.get_main_template()
- if not main_tpl_file:
- return
- main_tpl = self.get_main_template_yaml()
-
- if 'imports' in main_tpl:
- ImportsLoader(main_tpl['imports'],
- os.path.join(self.temp_dir, main_tpl_file))
-
- if 'topology_template' in main_tpl:
- topology_template = main_tpl['topology_template']
-
- if 'node_templates' in topology_template:
- node_templates = topology_template['node_templates']
-
- for node_template_key in node_templates:
- node_template = node_templates[node_template_key]
- if 'artifacts' in node_template:
- artifacts = node_template['artifacts']
- for artifact_key in artifacts:
- artifact = artifacts[artifact_key]
- if isinstance(artifact, six.string_types):
- self._validate_external_reference(
- main_tpl_file,
- artifact)
- elif isinstance(artifact, dict):
- if 'file' in artifact:
- self._validate_external_reference(
- main_tpl_file,
- artifact['file'])
- else:
- ExceptionCollector.appendException(
- ValueError(_('Unexpected artifact '
- 'definition for "%s".')
- % artifact_key))
- self.error_caught = True
- if 'interfaces' in node_template:
- interfaces = node_template['interfaces']
- for interface_key in interfaces:
- interface = interfaces[interface_key]
- for opertation_key in interface:
- operation = interface[opertation_key]
- if isinstance(operation, six.string_types):
- self._validate_external_reference(
- main_tpl_file,
- operation,
- False)
- elif isinstance(operation, dict):
- if 'implementation' in operation:
- self._validate_external_reference(
- main_tpl_file,
- operation['implementation'])
- finally:
- if self.temp_dir:
- shutil.rmtree(self.temp_dir)
-
- def _validate_external_reference(self, tpl_file, resource_file,
- raise_exc=True):
- """Verify that the external resource exists
-
- If resource_file is a URL verify that the URL is valid.
- If resource_file is a relative path verify that the path is valid
- considering base folder (self.temp_dir) and tpl_file.
- Note that in a CSAR resource_file cannot be an absolute path.
- """
- if UrlUtils.validate_url(resource_file):
- msg = (_('The resource at "%s" cannot be accessed.') %
- resource_file)
- try:
- if UrlUtils.url_accessible(resource_file):
- return
- else:
- ExceptionCollector.appendException(
- URLException(what=msg))
- self.error_caught = True
- except Exception:
- ExceptionCollector.appendException(
- URLException(what=msg))
- self.error_caught = True
-
- if os.path.isfile(os.path.join(self.temp_dir,
- os.path.dirname(tpl_file),
- resource_file)):
- return
-
- if raise_exc:
- ExceptionCollector.appendException(
- ValueError(_('The resource "%s" does not exist.')
- % resource_file))
- self.error_caught = True
-*/
-
-
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/CopyUtils.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/CopyUtils.java
deleted file mode 100644
index db236e1..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/CopyUtils.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.utils;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-public class CopyUtils {
-
- @SuppressWarnings("unchecked")
- public static Object copyLhmOrAl(Object src) {
- if(src instanceof LinkedHashMap) {
- LinkedHashMap<String,Object> dst = new LinkedHashMap<String,Object>();
- for(Map.Entry<String,Object> me: ((LinkedHashMap<String,Object>)src).entrySet()) {
- dst.put(me.getKey(),me.getValue());
- }
- return dst;
- }
- else if(src instanceof ArrayList) {
- ArrayList<Object> dst = new ArrayList<Object>();
- for(Object o: (ArrayList<Object>)src) {
- dst.add(o);
- }
- return dst;
- }
- else {
- return null;
- }
- }
-}
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/DumpUtils.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/DumpUtils.java
deleted file mode 100644
index 32c69cd..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/DumpUtils.java
+++ /dev/null
@@ -1,55 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.utils;
-
-import java.util.ArrayList;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-public class DumpUtils {
-
- @SuppressWarnings("unchecked")
- public static void dumpYaml(Object yo,int level) {
- final String indent = " ";
- try {
- if(yo == null) {
- System.out.println("<null>");
- return;
- }
- String cname = yo.getClass().getSimpleName();
- System.out.print(cname);
- if(cname.equals("LinkedHashMap")) {
- LinkedHashMap<String,Object> lhm = (LinkedHashMap<String,Object>)yo;
- System.out.println();
- for(Map.Entry<String,Object> me: lhm.entrySet()) {
- System.out.print(indent.substring(0,level) + me.getKey() + ": ");
- dumpYaml(me.getValue(),level+2);
- }
- }
- else if(cname.equals("ArrayList")) {
- ArrayList<Object> al = (ArrayList<Object>)yo;
- System.out.println();
- for (int i=0; i<al.size(); i++) {
- System.out.format("%s[%d] ",indent.substring(0,level),i);
- dumpYaml(al.get(i),level+2);
- }
- }
- else if(cname.equals("String")) {
- System.out.println(" ==> \"" + (String)yo + "\"");
- }
- else if(cname.equals("Integer")) {
- System.out.println(" ==> " + (int)yo);
- }
- else if(cname.equals("Boolean")) {
- System.out.println(" ==> " + (boolean)yo);
- }
- else if(cname.equals("Double")) {
- System.out.println(" ==> " + (double)yo);
- }
- else {
- System.out.println(" !! unexpected type");
- }
- }
- catch(Exception e) {
- System.out.println("Exception!! " + e.getMessage());
- }
- }
-} \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java
deleted file mode 100644
index 354fef0..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/JToscaErrorCodes.java
+++ /dev/null
@@ -1,32 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.utils;
-
-
-public enum JToscaErrorCodes {
- MISSING_META_FILE("JT1001"),
- INVALID_META_YAML_CONTENT("JT1002"),
- ENTRY_DEFINITION_NOT_DEFINED("JT1003"),
- MISSING_ENTRY_DEFINITION_FILE ("JT1004"),
- GENERAL_ERROR("JT1005"),
- PATH_NOT_VALID("JT1006"),
- CSAR_TOSCA_VALIDATION_ERROR("JT1007"),
- INVALID_CSAR_FORMAT("JT1008");
-
- private String value;
-
- private JToscaErrorCodes(String value) {
- this.value = value;
- }
-
- public String getValue() {
- return value;
- }
-
- public static JToscaErrorCodes getByCode(String code) {
- for(JToscaErrorCodes v : values()){
- if( v.getValue().equals(code)){
- return v;
- }
- }
- return null;
- }
-} \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java
deleted file mode 100644
index 6b3c1ce..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/TOSCAVersionProperty.java
+++ /dev/null
@@ -1,182 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.utils;
-
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-
-public class TOSCAVersionProperty {// test with functions/test_concat.yaml
-
- private String version;
-
- private static final String versionRe =
- "^(?<gMajorVersion>([0-9][0-9]*))" +
- "(\\.(?<gMinorVersion>([0-9][0-9]*)))?" +
- "(\\.(?<gFixVersion>([0-9][0-9]*)))?" +
- "(\\.(?<gQualifier>([0-9A-Za-z]+)))?" +
- "(\\-(?<gBuildVersion>[0-9])*)?$";
-
- private String minorVersion = null;
- private String majorVersion = null;
- private String fixVersion = null;
- private String qualifier = null;
- private String buildVersion = null;
-
-
- public TOSCAVersionProperty(Object _version) {
- version = _version.toString();
-
- if(version.equals("0") || version.equals("0.0") || version.equals("0.0.0")) {
- //log.warning(_('Version assumed as not provided'))
- version = "";
- return;
- }
-
- Pattern pattern = Pattern.compile(versionRe);
- Matcher matcher = pattern.matcher(version);
- if(!matcher.find()) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidTOSCAVersionPropertyException: " +
- "Value of TOSCA version property \"%s\" is invalid",
- version));
- return;
- }
- minorVersion = matcher.group("gMinorVersion");
- majorVersion = matcher.group("gMajorVersion");
- fixVersion = matcher.group("gFixVersion");
- qualifier = _validateQualifier(matcher.group("gQualifier"));
- buildVersion = _validateBuild(matcher.group("gBuildVersion"));
- _validateMajorVersion(majorVersion);
-
- }
-
- private String _validateMajorVersion(String value) {
- // Validate major version
-
- // Checks if only major version is provided and assumes
- // minor version as 0.
- // Eg: If version = 18, then it returns version = '18.0'
-
- if(minorVersion == null && buildVersion == null && !value.equals("0")) {
- //log.warning(_('Minor version assumed "0".'))
- version = version + "0";
- }
- return value;
- }
-
- private String _validateQualifier(String value) {
- // Validate qualifier
-
- // TOSCA version is invalid if a qualifier is present without the
- // fix version or with all of major, minor and fix version 0s.
-
- // For example, the following versions are invalid
- // 18.0.abc
- // 0.0.0.abc
-
- if((fixVersion == null && value != null) ||
- (minorVersion.equals("0") && majorVersion.equals("0") &&
- fixVersion.equals("0") && value != null)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidTOSCAVersionPropertyException: " +
- "Value of TOSCA version property \"%s\" is invalid",
- version));
- }
- return value;
- }
-
- private String _validateBuild(String value) {
- // Validate build version
-
- // TOSCA version is invalid if build version is present without the qualifier.
- // Eg: version = 18.0.0-1 is invalid.
-
- if(qualifier == null && value != null) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "InvalidTOSCAVersionPropertyException: " +
- "Value of TOSCA version property \"%s\" is invalid",
- version));
- }
- return value;
- }
-
- public Object getVersion() {
- return version;
- }
-
-}
-
-/*python
-
-class TOSCAVersionProperty(object):
-
- VERSION_RE = re.compile('^(?P<major_version>([0-9][0-9]*))'
- '(\.(?P<minor_version>([0-9][0-9]*)))?'
- '(\.(?P<fix_version>([0-9][0-9]*)))?'
- '(\.(?P<qualifier>([0-9A-Za-z]+)))?'
- '(\-(?P<build_version>[0-9])*)?$')
-
- def __init__(self, version):
- self.version = str(version)
- match = self.VERSION_RE.match(self.version)
- if not match:
- ExceptionCollector.appendException(
- InvalidTOSCAVersionPropertyException(what=(self.version)))
- return
- ver = match.groupdict()
- if self.version in ['0', '0.0', '0.0.0']:
- log.warning(_('Version assumed as not provided'))
- self.version = None
- self.minor_version = ver['minor_version']
- self.major_version = ver['major_version']
- self.fix_version = ver['fix_version']
- self.qualifier = self._validate_qualifier(ver['qualifier'])
- self.build_version = self._validate_build(ver['build_version'])
- self._validate_major_version(self.major_version)
-
- def _validate_major_version(self, value):
- """Validate major version
-
- Checks if only major version is provided and assumes
- minor version as 0.
- Eg: If version = 18, then it returns version = '18.0'
- """
-
- if self.minor_version is None and self.build_version is None and \
- value != '0':
- log.warning(_('Minor version assumed "0".'))
- self.version = '.'.join([value, '0'])
- return value
-
- def _validate_qualifier(self, value):
- """Validate qualifier
-
- TOSCA version is invalid if a qualifier is present without the
- fix version or with all of major, minor and fix version 0s.
-
- For example, the following versions are invalid
- 18.0.abc
- 0.0.0.abc
- """
- if (self.fix_version is None and value) or \
- (self.minor_version == self.major_version ==
- self.fix_version == '0' and value):
- ExceptionCollector.appendException(
- InvalidTOSCAVersionPropertyException(what=(self.version)))
- return value
-
- def _validate_build(self, value):
- """Validate build version
-
- TOSCA version is invalid if build version is present without the
- qualifier.
- Eg: version = 18.0.0-1 is invalid.
- """
- if not self.qualifier and value:
- ExceptionCollector.appendException(
- InvalidTOSCAVersionPropertyException(what=(self.version)))
- return value
-
- def get_version(self):
- return self.version
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java
deleted file mode 100644
index 47ba972..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ThreadLocalsHolder.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.utils;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-
-public class ThreadLocalsHolder {
-
- private static final ThreadLocal<ExceptionCollector> exceptionCollectorThreadLocal = new ThreadLocal<>();
-
- private ThreadLocalsHolder(){}
-
- public static ExceptionCollector getCollector() {
- return exceptionCollectorThreadLocal.get();
- }
-
- public static void setCollector(ExceptionCollector exceptionCollector) {
- cleanup();
- exceptionCollectorThreadLocal.set(exceptionCollector);
- }
-
- public static void cleanup(){
- exceptionCollectorThreadLocal.remove();
- }
-
-}
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java
deleted file mode 100644
index 092f827..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/UrlUtils.java
+++ /dev/null
@@ -1,123 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.utils;
-
-import java.io.IOException;
-import java.net.HttpURLConnection;
-import java.net.MalformedURLException;
-import java.net.URL;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-
-public class UrlUtils {
-
- public static boolean validateUrl(String sUrl) {
- // Validates whether the given path is a URL or not
-
- // If the given path includes a scheme (http, https, ftp, ...) and a net
- // location (a domain name such as www.github.com) it is validated as a URL
- try {
- URL url = new URL(sUrl);
- if(url.getProtocol().equals("file")) {
- return true;
- }
- return url.getAuthority() != null;
- }
- catch(MalformedURLException e) {
- return false;
- }
- }
-
- public static String joinUrl(String sUrl,String relativePath) {
- // Builds a new URL from the given URL and the relative path
-
- // Example:
- // url: http://www.githib.com/openstack/heat
- // relative_path: heat-translator
- // - joined: http://www.githib.com/openstack/heat-translator
- if(!validateUrl(sUrl)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: The URL \"%s\" is malformed",sUrl));
- }
- try {
- URL base = new URL(sUrl);
- return (new URL(base,relativePath)).toString();
- }
- catch(MalformedURLException e) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: Joining URL \"%s\" and relative path \"%s\" caused an exception",sUrl,relativePath));
- return sUrl;
- }
- }
-
- public static boolean isUrlAccessible(String sUrl) {
- // Validates whether the given URL is accessible
-
- // Returns true if the get call returns a 200 response code.
- // Otherwise, returns false.
- try {
- HttpURLConnection connection = (HttpURLConnection) new URL(sUrl).openConnection();
- connection.setRequestMethod("HEAD");
- int responseCode = connection.getResponseCode();
- return responseCode == 200;
- }
- catch(IOException e) {
- return false;
- }
- }
-
-}
-
-/*python
-
-from six.moves.urllib.parse import urljoin
-from six.moves.urllib.parse import urlparse
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.utils.gettextutils import _
-
-try:
- # Python 3.x
- import urllib.request as urllib2
-except ImportError:
- # Python 2.x
- import urllib2
-
-
-class UrlUtils(object):
-
- @staticmethod
- def validate_url(path):
- """Validates whether the given path is a URL or not.
-
- If the given path includes a scheme (http, https, ftp, ...) and a net
- location (a domain name such as www.github.com) it is validated as a
- URL.
- """
- parsed = urlparse(path)
- if parsed.scheme == 'file':
- # If the url uses the file scheme netloc will be ""
- return True
- else:
- return bool(parsed.scheme) and bool(parsed.netloc)
-
- @staticmethod
- def join_url(url, relative_path):
- """Builds a new URL from the given URL and the relative path.
-
- Example:
- url: http://www.githib.com/openstack/heat
- relative_path: heat-translator
- - joined: http://www.githib.com/openstack/heat-translator
- """
- if not UrlUtils.validate_url(url):
- ExceptionCollector.appendException(
- ValueError(_('"%s" is not a valid URL.') % url))
- return urljoin(url, relative_path)
-
- @staticmethod
- def url_accessible(url):
- """Validates whether the given URL is accessible.
-
- Returns true if the get call returns a 200 response code.
- Otherwise, returns false.
- """
- return urllib2.urlopen(url).getcode() == 200
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java
deleted file mode 100644
index 291316f..0000000
--- a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/api/utils/ValidateUtils.java
+++ /dev/null
@@ -1,409 +0,0 @@
-package org.openecomp.sdc.toscaparser.api.utils;
-
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.LinkedHashMap;
-
-import org.openecomp.sdc.toscaparser.api.common.ExceptionCollector;
-
-public class ValidateUtils {
-
- private static final String RANGE_UNBOUNDED = "UNBOUNDED";
-
- public static Object strToNum(Object value) {
- // Convert a string representation of a number into a numeric type
- // tODO(TBD) we should not allow numeric values in, input should be str
- if(value instanceof Number) {
- return value;
- }
- if(!(value instanceof String)) {
-
- }
- try {
- return Integer.parseInt((String)value);
- }
- catch(NumberFormatException e) {
- }
- try {
- return Float.parseFloat((String)value);
- }
- catch(Exception e) {
- }
- return null;
- }
-
- public static Object validateNumeric(Object value) {
- if(!(value instanceof Number)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: \"%s\" is not a numeric",value.toString()));
- }
- return value;
- }
-
- public static Object validateInteger(Object value) {
- if(!(value instanceof Integer)) {
- // allow "true" and "false"
- if(value instanceof Boolean) {
- return (Boolean)value ? 1 : 0;
- }
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: \"%s\" is not an integer",value.toString()));
- }
- return value;
- }
-
- public static Object validateFloat(Object value) {
- if(!(value instanceof Float || value instanceof Double)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: \"%s\" is not a float",value.toString()));
- }
- return value;
- }
-
- public static Object validateString(Object value) {
- if(!(value instanceof String)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: \'%s\' is not a string",value.toString()));
- }
- return value;
- }
-
- public static Object validateList(Object value) {
- if(!(value instanceof ArrayList)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: \"%s\" is not a list",value.toString()));
- }
- return value;
- }
-
-
- @SuppressWarnings("unchecked")
- public static Object validateRange(Object range) {
- // list class check
- validateList(range);
- // validate range list has a min and max
- if(range instanceof ArrayList && ((ArrayList<Object>)range).size() != 2) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: \"%s\" is not a valid range",range.toString()));
- // too dangerous to continue...
- return range;
- }
- // validate min and max are numerics or the keyword UNBOUNDED
- boolean minTest = false;
- boolean maxTest = false;
- Object r0 = ((ArrayList<Object>)range).get(0);
- Object r1 = ((ArrayList<Object>)range).get(1);
-
- if(!(r0 instanceof Integer) && !(r0 instanceof Float) ||
- !(r1 instanceof Integer) && !(r1 instanceof Float)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: \"%s\" is not a valid range",range.toString()));
- // too dangerous to continue...
- return range;
- }
-
- Float min = 0.0F;
- Float max = 0.0F;
- if(r0 instanceof String && ((String)r0).equals(RANGE_UNBOUNDED)) {
- minTest = true;
- }
- else {
- min = r0 instanceof Integer ? ((Integer)r0).floatValue() : (Float)r0;
- }
- if(r1 instanceof String && ((String)r1).equals(RANGE_UNBOUNDED)) {
- maxTest = true;
- }
- else {
- max = r1 instanceof Integer ? ((Integer)r1).floatValue() : (Float)r1;
- }
-
- // validate the max > min (account for UNBOUNDED)
- if(!minTest && !maxTest) {
- // Note: min == max is allowed
- if(min > max) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError:\"%s\" is not a valid range",range.toString()));
- }
- }
- return range;
- }
-
- @SuppressWarnings("unchecked")
- public static Object validateValueInRange(Object value,Object range,String propName) {
- // verify all 3 are numeric and convert to Floats
- if(!(value instanceof Integer || value instanceof Float)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: validateInRange: \"%s\" is not a number",range.toString()));
- return value;
- }
- Float fval = value instanceof Integer ? ((Integer)value).floatValue() : (Float)value;
-
- //////////////////////////
- //"validateRange(range);"
- //////////////////////////
- // better safe than sorry...
- // validate that range list has a min and max
- if(range instanceof ArrayList && ((ArrayList<Object>)range).size() != 2) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: \"%s\" is not a valid range",range.toString()));
- // too dangerous to continue...
- return value;
- }
- // validate min and max are numerics or the keyword UNBOUNDED
- boolean minTest = false;
- boolean maxTest = false;
- Object r0 = ((ArrayList<Object>)range).get(0);
- Object r1 = ((ArrayList<Object>)range).get(1);
-
- if(!(r0 instanceof Integer) && !(r0 instanceof Float) ||
- !(r1 instanceof Integer) && !(r1 instanceof Float)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: \"%s\" is not a valid range",range.toString()));
- // too dangerous to continue...
- return value;
- }
-
- Float min = 0.0F;
- Float max = 0.0F;
- if(r0 instanceof String && ((String)r0).equals(RANGE_UNBOUNDED)) {
- minTest = true;
- }
- else {
- min = r0 instanceof Integer ? ((Integer)r0).floatValue() : (Float)r0;
- }
- if(r1 instanceof String && ((String)r1).equals(RANGE_UNBOUNDED)) {
- maxTest = true;
- }
- else {
- max = r1 instanceof Integer ? ((Integer)r1).floatValue() : (Float)r1;
- }
-
- // validate the max > min (account for UNBOUNDED)
- if(!minTest && !maxTest) {
- // Note: min == max is allowed
- if(min > max) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError:\"%s\" is not a valid range",range.toString()));
- }
- }
- // finally...
- boolean bError = false;
- //Note: value is valid if equal to min
- if(!minTest) {
- if(fval < min) {
- bError = true;
- }
- }
- // Note: value is valid if equal to max
- if(!maxTest) {
- if(fval > max) {
- bError = true;
- }
- }
- if(bError) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "RangeValueError: Property \"%s\", \"%s\" not in range [\"%s\" - \"%s\"",
- propName,value.toString(),r0.toString(),r1.toString()));
- }
- return value;
- }
-
- public static Object validateMap(Object ob) {
- if(!(ob instanceof LinkedHashMap)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError\"%s\" is not a map.",ob.toString()));
- }
- return ob;
- }
-
- public static Object validateBoolean(Object value) {
- if(value instanceof Boolean) {
- return value;
- }
- if(value instanceof String) {
- String normalized = ((String)value).toLowerCase();
- if(normalized.equals("true") || normalized.equals("false")) {
- return normalized.equals("true");
- }
- }
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: \"%s\" is not a boolean",value.toString()));
- return value;
- }
-
- public static Object validateTimestamp(Object value) {
- /*
- try:
- # Note: we must return our own exception message
- # as dateutil's parser returns different types / values on
- # different systems. OSX, for example, returns a tuple
- # containing a different error message than Linux
- dateutil.parser.parse(value)
- except Exception as e:
- original_err_msg = str(e)
- log.error(original_err_msg)
- ExceptionCollector.appendException(
- ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') %
- {'val': value, 'msg': original_err_msg}))
- */
-
- // timestamps are loaded as Date objects by the YAML parser
- if(!(value instanceof Date)) {
- ThreadLocalsHolder.getCollector().appendException(String.format(
- "ValueError: \"%s\" is not a valid timestamp",
- value.toString()));
-
- }
- return value;
- }
-
-}
-
-/*python
-
-from toscaparser.elements import constraints
-from toscaparser.common.exception import ExceptionCollector
-from toscaparser.common.exception import InvalidTOSCAVersionPropertyException
-from toscaparser.common.exception import RangeValueError
-from toscaparser.utils.gettextutils import _
-
-log = logging.getLogger('tosca')
-
-RANGE_UNBOUNDED = 'UNBOUNDED'
-
-
-def str_to_num(value):
- '''Convert a string representation of a number into a numeric type.'''
- # tODO(TBD) we should not allow numeric values in, input should be str
- if isinstance(value, numbers.Number):
- return value
- try:
- return int(value)
- except ValueError:
- return float(value)
-
-
-def validate_numeric(value):
- if not isinstance(value, numbers.Number):
- ExceptionCollector.appendException(
- ValueError(_('"%s" is not a numeric.') % value))
- return value
-
-
-def validate_integer(value):
- if not isinstance(value, int):
- try:
- value = int(value)
- except Exception:
- ExceptionCollector.appendException(
- ValueError(_('"%s" is not an integer.') % value))
- return value
-
-
-def validate_float(value):
- if not isinstance(value, float):
- ExceptionCollector.appendException(
- ValueError(_('"%s" is not a float.') % value))
- return value
-
-
-def validate_string(value):
- if not isinstance(value, six.string_types):
- ExceptionCollector.appendException(
- ValueError(_('"%s" is not a string.') % value))
- return value
-
-
-def validate_list(value):
- if not isinstance(value, list):
- ExceptionCollector.appendException(
- ValueError(_('"%s" is not a list.') % value))
- return value
-
-
-def validate_range(range):
- # list class check
- validate_list(range)
- # validate range list has a min and max
- if len(range) != 2:
- ExceptionCollector.appendException(
- ValueError(_('"%s" is not a valid range.') % range))
- # validate min and max are numerics or the keyword UNBOUNDED
- min_test = max_test = False
- if not range[0] == RANGE_UNBOUNDED:
- min = validate_numeric(range[0])
- else:
- min_test = True
- if not range[1] == RANGE_UNBOUNDED:
- max = validate_numeric(range[1])
- else:
- max_test = True
- # validate the max > min (account for UNBOUNDED)
- if not min_test and not max_test:
- # Note: min == max is allowed
- if min > max:
- ExceptionCollector.appendException(
- ValueError(_('"%s" is not a valid range.') % range))
-
- return range
-
-
-def validate_value_in_range(value, range, prop_name):
- validate_numeric(value)
- validate_range(range)
-
- # Note: value is valid if equal to min
- if range[0] != RANGE_UNBOUNDED:
- if value < range[0]:
- ExceptionCollector.appendException(
- RangeValueError(pname=prop_name,
- pvalue=value,
- vmin=range[0],
- vmax=range[1]))
- # Note: value is valid if equal to max
- if range[1] != RANGE_UNBOUNDED:
- if value > range[1]:
- ExceptionCollector.appendException(
- RangeValueError(pname=prop_name,
- pvalue=value,
- vmin=range[0],
- vmax=range[1]))
- return value
-
-
-def validate_map(value):
- if not isinstance(value, collections.Mapping):
- ExceptionCollector.appendException(
- ValueError(_('"%s" is not a map.') % value))
- return value
-
-
-def validate_boolean(value):
- if isinstance(value, bool):
- return value
-
- if isinstance(value, str):
- normalised = value.lower()
- if normalised in ['true', 'false']:
- return normalised == 'true'
-
- ExceptionCollector.appendException(
- ValueError(_('"%s" is not a boolean.') % value))
-
-
-def validate_timestamp(value):
- try:
- # Note: we must return our own exception message
- # as dateutil's parser returns different types / values on
- # different systems. OSX, for example, returns a tuple
- # containing a different error message than Linux
- dateutil.parser.parse(value)
- except Exception as e:
- original_err_msg = str(e)
- log.error(original_err_msg)
- ExceptionCollector.appendException(
- ValueError(_('"%(val)s" is not a valid timestamp. "%(msg)s"') %
- {'val': value, 'msg': original_err_msg}))
- return
-
-*/ \ No newline at end of file
diff --git a/jtosca/src/main/resources/TOSCA_definition_1_0.yaml b/jtosca/src/main/resources/TOSCA_definition_1_0.yaml
deleted file mode 100644
index 554b7b6..0000000
--- a/jtosca/src/main/resources/TOSCA_definition_1_0.yaml
+++ /dev/null
@@ -1,967 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-##########################################################################
-# The content of this file reflects TOSCA Simple Profile in YAML version
-# 1.0.0. It describes the definition for TOSCA types including Node Type,
-# Relationship Type, Capability Type and Interfaces.
-##########################################################################
-tosca_definitions_version: tosca_simple_yaml_1_0
-
-##########################################################################
-# Node Type.
-# A Node Type is a reusable entity that defines the type of one or more
-# Node Templates.
-##########################################################################
-node_types:
- tosca.nodes.Root:
- description: >
- The TOSCA root node all other TOSCA base node types derive from.
- attributes:
- tosca_id:
- type: string
- tosca_name:
- type: string
- state:
- type: string
- capabilities:
- feature:
- type: tosca.capabilities.Node
- requirements:
- - dependency:
- capability: tosca.capabilities.Node
- node: tosca.nodes.Root
- relationship: tosca.relationships.DependsOn
- occurrences: [ 0, UNBOUNDED ]
- interfaces:
- Standard:
- type: tosca.interfaces.node.lifecycle.Standard
-
- tosca.nodes.Compute:
- derived_from: tosca.nodes.Root
- attributes:
- private_address:
- type: string
- public_address:
- type: string
- networks:
- type: map
- entry_schema:
- type: tosca.datatypes.network.NetworkInfo
- ports:
- type: map
- entry_schema:
- type: tosca.datatypes.network.PortInfo
- capabilities:
- host:
- type: tosca.capabilities.Container
- binding:
- type: tosca.capabilities.network.Bindable
- os:
- type: tosca.capabilities.OperatingSystem
- scalable:
- type: tosca.capabilities.Scalable
- endpoint:
- type: tosca.capabilities.Endpoint.Admin
- requirements:
- - local_storage:
- capability: tosca.capabilities.Attachment
- node: tosca.nodes.BlockStorage
- relationship: tosca.relationships.AttachesTo
- occurrences: [0, UNBOUNDED]
-
- tosca.nodes.SoftwareComponent:
- derived_from: tosca.nodes.Root
- properties:
- # domain-specific software component version
- component_version:
- type: version
- required: false
- description: >
- Software component version.
- admin_credential:
- type: tosca.datatypes.Credential
- required: false
- requirements:
- - host:
- capability: tosca.capabilities.Container
- node: tosca.nodes.Compute
- relationship: tosca.relationships.HostedOn
-
- tosca.nodes.DBMS:
- derived_from: tosca.nodes.SoftwareComponent
- properties:
- port:
- required: false
- type: integer
- description: >
- The port the DBMS service will listen to for data and requests.
- root_password:
- required: false
- type: string
- description: >
- The root password for the DBMS service.
- capabilities:
- host:
- type: tosca.capabilities.Container
- valid_source_types: [tosca.nodes.Database]
-
- tosca.nodes.Database:
- derived_from: tosca.nodes.Root
- properties:
- user:
- required: false
- type: string
- description: >
- User account name for DB administration
- port:
- required: false
- type: integer
- description: >
- The port the database service will use to listen for incoming data and
- requests.
- name:
- required: false
- type: string
- description: >
- The name of the database.
- password:
- required: false
- type: string
- description: >
- The password for the DB user account
- requirements:
- - host:
- capability: tosca.capabilities.Container
- node: tosca.nodes.DBMS
- relationship: tosca.relationships.HostedOn
- capabilities:
- database_endpoint:
- type: tosca.capabilities.Endpoint.Database
-
- tosca.nodes.WebServer:
- derived_from: tosca.nodes.SoftwareComponent
- capabilities:
- data_endpoint:
- type: tosca.capabilities.Endpoint
- admin_endpoint:
- type: tosca.capabilities.Endpoint.Admin
- host:
- type: tosca.capabilities.Container
- valid_source_types: [tosca.nodes.WebApplication]
-
- tosca.nodes.WebApplication:
- derived_from: tosca.nodes.Root
- properties:
- context_root:
- type: string
- required: false
- requirements:
- - host:
- capability: tosca.capabilities.Container
- node: tosca.nodes.WebServer
- relationship: tosca.relationships.HostedOn
- capabilities:
- app_endpoint:
- type: tosca.capabilities.Endpoint
-
- tosca.nodes.BlockStorage:
- derived_from: tosca.nodes.Root
- properties:
- size:
- type: scalar-unit.size
- constraints:
- - greater_or_equal: 1 MB
- volume_id:
- type: string
- required: false
- snapshot_id:
- type: string
- required: false
- attributes:
- volume_id:
- type: string
- capabilities:
- attachment:
- type: tosca.capabilities.Attachment
-
- tosca.nodes.network.Network:
- derived_from: tosca.nodes.Root
- description: >
- The TOSCA Network node represents a simple, logical network service.
- properties:
- ip_version:
- type: integer
- required: false
- default: 4
- constraints:
- - valid_values: [ 4, 6 ]
- description: >
- The IP version of the requested network. Valid values are 4 for ipv4
- or 6 for ipv6.
- cidr:
- type: string
- required: false
- description: >
- The cidr block of the requested network.
- start_ip:
- type: string
- required: false
- description: >
- The IP address to be used as the start of a pool of addresses within
- the full IP range derived from the cidr block.
- end_ip:
- type: string
- required: false
- description: >
- The IP address to be used as the end of a pool of addresses within
- the full IP range derived from the cidr block.
- gateway_ip:
- type: string
- required: false
- description: >
- The gateway IP address.
- network_name:
- type: string
- required: false
- description: >
- An identifier that represents an existing Network instance in the
- underlying cloud infrastructure or can be used as the name of the
- newly created network. If network_name is provided and no other
- properties are provided (with exception of network_id), then an
- existing network instance will be used. If network_name is provided
- alongside with more properties then a new network with this name will
- be created.
- network_id:
- type: string
- required: false
- description: >
- An identifier that represents an existing Network instance in the
- underlying cloud infrastructure. This property is mutually exclusive
- with all other properties except network_name. This can be used alone
- or together with network_name to identify an existing network.
- segmentation_id:
- type: string
- required: false
- description: >
- A segmentation identifier in the underlying cloud infrastructure.
- E.g. VLAN ID, GRE tunnel ID, etc..
- network_type:
- type: string
- required: false
- description: >
- It specifies the nature of the physical network in the underlying
- cloud infrastructure. Examples are flat, vlan, gre or vxlan.
- For flat and vlan types, physical_network should be provided too.
- physical_network:
- type: string
- required: false
- description: >
- It identifies the physical network on top of which the network is
- implemented, e.g. physnet1. This property is required if network_type
- is flat or vlan.
- dhcp_enabled:
- type: boolean
- required: false
- default: true
- description: >
- Indicates should DHCP service be enabled on the network or not.
- capabilities:
- link:
- type: tosca.capabilities.network.Linkable
-
- tosca.nodes.network.Port:
- derived_from: tosca.nodes.Root
- description: >
- The TOSCA Port node represents a logical entity that associates between
- Compute and Network normative types. The Port node type effectively
- represents a single virtual NIC on the Compute node instance.
- properties:
- ip_address:
- type: string
- required: false
- description: >
- Allow the user to set a static IP.
- order:
- type: integer
- required: false
- default: 0
- constraints:
- - greater_or_equal: 0
- description: >
- The order of the NIC on the compute instance (e.g. eth2).
- is_default:
- type: boolean
- required: false
- default: false
- description: >
- If is_default=true this port will be used for the default gateway
- route. Only one port that is associated to single compute node can
- set as is_default=true.
- ip_range_start:
- type: string
- required: false
- description: >
- Defines the starting IP of a range to be allocated for the compute
- instances that are associated with this Port.
- ip_range_end:
- type: string
- required: false
- description: >
- Defines the ending IP of a range to be allocated for the compute
- instances that are associated with this Port.
- attributes:
- ip_address:
- type: string
- requirements:
- - binding:
- description: >
- Binding requirement expresses the relationship between Port and
- Compute nodes. Effectively it indicates that the Port will be
- attached to specific Compute node instance
- capability: tosca.capabilities.network.Bindable
- relationship: tosca.relationships.network.BindsTo
- node: tosca.nodes.Compute
- - link:
- description: >
- Link requirement expresses the relationship between Port and Network
- nodes. It indicates which network this port will connect to.
- capability: tosca.capabilities.network.Linkable
- relationship: tosca.relationships.network.LinksTo
- node: tosca.nodes.network.Network
-
- tosca.nodes.network.FloatingIP:
- derived_from: tosca.nodes.Root
- description: >
- The TOSCA FloatingIP node represents a floating IP that can associate to a Port.
- properties:
- floating_network:
- type: string
- required: true
- floating_ip_address:
- type: string
- required: false
- port_id:
- type: string
- required: false
- requirements:
- - link:
- capability: tosca.capabilities.network.Linkable
- relationship: tosca.relationships.network.LinksTo
- node: tosca.nodes.network.Port
-
- tosca.nodes.ObjectStorage:
- derived_from: tosca.nodes.Root
- description: >
- The TOSCA ObjectStorage node represents storage that provides the ability
- to store data as objects (or BLOBs of data) without consideration for the
- underlying filesystem or devices
- properties:
- name:
- type: string
- required: true
- description: >
- The logical name of the object store (or container).
- size:
- type: scalar-unit.size
- required: false
- constraints:
- - greater_or_equal: 0 GB
- description: >
- The requested initial storage size.
- maxsize:
- type: scalar-unit.size
- required: false
- constraints:
- - greater_or_equal: 0 GB
- description: >
- The requested maximum storage size.
- capabilities:
- storage_endpoint:
- type: tosca.capabilities.Endpoint
-
- tosca.nodes.LoadBalancer:
- derived_from: tosca.nodes.Root
- properties:
- algorithm:
- type: string
- required: false
- status: experimental
- capabilities:
- client:
- type: tosca.capabilities.Endpoint.Public
- occurrences: [0, UNBOUNDED]
- description: the Floating (IP) client’s on the public network can connect to
- requirements:
- - application:
- capability: tosca.capabilities.Endpoint
- relationship: tosca.relationships.RoutesTo
- occurrences: [0, UNBOUNDED]
- description: Connection to one or more load balanced applications
-
- tosca.nodes.Container.Application:
- derived_from: tosca.nodes.Root
- requirements:
- - host:
- capability: tosca.capabilities.Container
- node: tosca.nodes.Container.Runtime
- relationship: tosca.relationships.HostedOn
-
- tosca.nodes.Container.Runtime:
- derived_from: tosca.nodes.SoftwareComponent
- capabilities:
- host:
- type: tosca.capabilities.Container
- scalable:
- type: tosca.capabilities.Scalable
-
- tosca.nodes.Container.Application.Docker:
- derived_from: tosca.nodes.Container.Application
- requirements:
- - host:
- capability: tosca.capabilities.Container.Docker
-
-##########################################################################
-# Relationship Type.
-# A Relationship Type is a reusable entity that defines the type of one
-# or more relationships between Node Types or Node Templates.
-##########################################################################
-relationship_types:
- tosca.relationships.Root:
- description: >
- The TOSCA root Relationship Type all other TOSCA base Relationship Types
- derive from.
- attributes:
- tosca_id:
- type: string
- tosca_name:
- type: string
- interfaces:
- Configure:
- type: tosca.interfaces.relationship.Configure
-
- tosca.relationships.DependsOn:
- derived_from: tosca.relationships.Root
-
- tosca.relationships.HostedOn:
- derived_from: tosca.relationships.Root
- valid_target_types: [ tosca.capabilities.Container ]
-
- tosca.relationships.ConnectsTo:
- derived_from: tosca.relationships.Root
- valid_target_types: [ tosca.capabilities.Endpoint ]
- credential:
- type: tosca.datatypes.Credential
- required: false
-
- tosca.relationships.AttachesTo:
- derived_from: tosca.relationships.Root
- valid_target_types: [ tosca.capabilities.Attachment ]
- properties:
- location:
- required: true
- type: string
- constraints:
- - min_length: 1
- device:
- required: false
- type: string
-
- tosca.relationships.RoutesTo:
- derived_from: tosca.relationships.ConnectsTo
- valid_target_types: [ tosca.capabilities.Endpoint ]
-
- tosca.relationships.network.LinksTo:
- derived_from: tosca.relationships.DependsOn
- valid_target_types: [ tosca.capabilities.network.Linkable ]
-
- tosca.relationships.network.BindsTo:
- derived_from: tosca.relationships.DependsOn
- valid_target_types: [ tosca.capabilities.network.Bindable ]
-
-##########################################################################
-# Capability Type.
-# A Capability Type is a reusable entity that describes a kind of
-# capability that a Node Type can declare to expose.
-##########################################################################
-capability_types:
- tosca.capabilities.Root:
- description: >
- The TOSCA root Capability Type all other TOSCA base Capability Types
- derive from.
-
- tosca.capabilities.Node:
- derived_from: tosca.capabilities.Root
-
- tosca.capabilities.Container:
- derived_from: tosca.capabilities.Root
- properties:
- num_cpus:
- required: false
- type: integer
- constraints:
- - greater_or_equal: 1
- cpu_frequency:
- required: false
- type: scalar-unit.frequency
- constraints:
- - greater_or_equal: 0.1 GHz
- disk_size:
- required: false
- type: scalar-unit.size
- constraints:
- - greater_or_equal: 0 MB
- mem_size:
- required: false
- type: scalar-unit.size
- constraints:
- - greater_or_equal: 0 MB
-
- tosca.capabilities.Endpoint:
- derived_from: tosca.capabilities.Root
- properties:
- protocol:
- type: string
- required: true
- default: tcp
- port:
- type: tosca.datatypes.network.PortDef
- required: false
- secure:
- type: boolean
- required: false
- default: false
- url_path:
- type: string
- required: false
- port_name:
- type: string
- required: false
- network_name:
- type: string
- required: false
- default: PRIVATE
- initiator:
- type: string
- required: false
- default: source
- constraints:
- - valid_values: [source, target, peer]
- ports:
- type: map
- required: false
- constraints:
- - min_length: 1
- entry_schema:
- type: tosca.datatypes.network.PortSpec
- attributes:
- ip_address:
- type: string
-
- tosca.capabilities.Endpoint.Admin:
- derived_from: tosca.capabilities.Endpoint
- properties:
- secure:
- type: boolean
- default: true
- constraints:
- - equal: true
-
- tosca.capabilities.Endpoint.Public:
- derived_from: tosca.capabilities.Endpoint
- properties:
- # Change the default network_name to use the first public network found
- network_name:
- type: string
- default: PUBLIC
- constraints:
- - equal: PUBLIC
- floating:
- description: >
- Indicates that the public address should be allocated from a pool of
- floating IPs that are associated with the network.
- type: boolean
- default: false
- status: experimental
- dns_name:
- description: The optional name to register with DNS
- type: string
- required: false
- status: experimental
-
- tosca.capabilities.Scalable:
- derived_from: tosca.capabilities.Root
- properties:
- min_instances:
- type: integer
- required: true
- default: 1
- description: >
- This property is used to indicate the minimum number of instances
- that should be created for the associated TOSCA Node Template by
- a TOSCA orchestrator.
- max_instances:
- type: integer
- required: true
- default: 1
- description: >
- This property is used to indicate the maximum number of instances
- that should be created for the associated TOSCA Node Template by
- a TOSCA orchestrator.
- default_instances:
- type: integer
- required: false
- description: >
- An optional property that indicates the requested default number
- of instances that should be the starting number of instances a
- TOSCA orchestrator should attempt to allocate.
- The value for this property MUST be in the range between the values
- set for min_instances and max_instances properties.
-
- tosca.capabilities.Endpoint.Database:
- derived_from: tosca.capabilities.Endpoint
-
- tosca.capabilities.Attachment:
- derived_from: tosca.capabilities.Root
-
- tosca.capabilities.network.Linkable:
- derived_from: tosca.capabilities.Root
- description: >
- A node type that includes the Linkable capability indicates that it can
- be pointed by tosca.relationships.network.LinksTo relationship type, which
- represents an association relationship between Port and Network node types.
-
- tosca.capabilities.network.Bindable:
- derived_from: tosca.capabilities.Root
- description: >
- A node type that includes the Bindable capability indicates that it can
- be pointed by tosca.relationships.network.BindsTo relationship type, which
- represents a network association relationship between Port and Compute node
- types.
-
- tosca.capabilities.OperatingSystem:
- derived_from: tosca.capabilities.Root
- properties:
- architecture:
- required: false
- type: string
- description: >
- The host Operating System (OS) architecture.
- type:
- required: false
- type: string
- description: >
- The host Operating System (OS) type.
- distribution:
- required: false
- type: string
- description: >
- The host Operating System (OS) distribution. Examples of valid values
- for an “type” of “Linux” would include:
- debian, fedora, rhel and ubuntu.
- version:
- required: false
- type: version
- description: >
- The host Operating System version.
-
- tosca.capabilities.Container.Docker:
- derived_from: tosca.capabilities.Container
- properties:
- version:
- type: list
- required: false
- entry_schema:
- type: version
- description: >
- The Docker version capability.
- publish_all:
- type: boolean
- default: false
- required: false
- description: >
- Indicates that all ports (ranges) listed in the dockerfile
- using the EXPOSE keyword be published.
- publish_ports:
- type: list
- entry_schema:
- type: tosca.datatypes.network.PortSpec
- required: false
- description: >
- List of ports mappings from source (Docker container)
- to target (host) ports to publish.
- expose_ports:
- type: list
- entry_schema:
- type: tosca.datatypes.network.PortSpec
- required: false
- description: >
- List of ports mappings from source (Docker container) to expose
- to other Docker containers (not accessible outside host).
- volumes:
- type: list
- entry_schema:
- type: string
- required: false
- description: >
- The dockerfile VOLUME command which is used to enable access
- from the Docker container to a directory on the host machine.
- host_id:
- type: string
- required: false
- description: >
- The optional identifier of an existing host resource
- that should be used to run this container on.
- volume_id:
- type: string
- required: false
- description: >
- The optional identifier of an existing storage volume (resource)
- that should be used to create the container's mount point(s) on.
-
-##########################################################################
- # Interfaces Type.
- # The Interfaces element describes a list of one or more interface
- # definitions for a modelable entity (e.g., a Node or Relationship Type)
- # as defined within the TOSCA Simple Profile specification.
-##########################################################################
-interface_types:
- tosca.interfaces.node.lifecycle.Standard:
- create:
- description: Standard lifecycle create operation.
- configure:
- description: Standard lifecycle configure operation.
- start:
- description: Standard lifecycle start operation.
- stop:
- description: Standard lifecycle stop operation.
- delete:
- description: Standard lifecycle delete operation.
-
- tosca.interfaces.relationship.Configure:
- pre_configure_source:
- description: Operation to pre-configure the source endpoint.
- pre_configure_target:
- description: Operation to pre-configure the target endpoint.
- post_configure_source:
- description: Operation to post-configure the source endpoint.
- post_configure_target:
- description: Operation to post-configure the target endpoint.
- add_target:
- description: Operation to add a target node.
- remove_target:
- description: Operation to remove a target node.
- add_source: >
- description: Operation to notify the target node of a source node which
- is now available via a relationship.
- description:
- target_changed: >
- description: Operation to notify source some property or attribute of the
- target changed
-
-##########################################################################
- # Data Type.
- # A Datatype is a complex data type declaration which contains other
- # complex or simple data types.
-##########################################################################
-data_types:
- tosca.datatypes.Root:
- description: >
- The TOSCA root Data Type all other TOSCA base Data Types derive from
-
- tosca.datatypes.network.NetworkInfo:
- derived_from: tosca.datatypes.Root
- properties:
- network_name:
- type: string
- network_id:
- type: string
- addresses:
- type: list
- entry_schema:
- type: string
-
- tosca.datatypes.network.PortInfo:
- derived_from: tosca.datatypes.Root
- properties:
- port_name:
- type: string
- port_id:
- type: string
- network_id:
- type: string
- mac_address:
- type: string
- addresses:
- type: list
- entry_schema:
- type: string
-
- tosca.datatypes.network.PortDef:
- derived_from: tosca.datatypes.Root
- type: integer
- constraints:
- - in_range: [ 1, 65535 ]
-
- tosca.datatypes.network.PortSpec:
- derived_from: tosca.datatypes.Root
- properties:
- protocol:
- type: string
- required: true
- default: tcp
- constraints:
- - valid_values: [ udp, tcp, igmp ]
- target:
- type: tosca.datatypes.network.PortDef
- required: false
- target_range:
- type: range
- required: false
- constraints:
- - in_range: [ 1, 65535 ]
- source:
- type: tosca.datatypes.network.PortDef
- required: false
- source_range:
- type: range
- required: false
- constraints:
- - in_range: [ 1, 65535 ]
-
- tosca.datatypes.Credential:
- derived_from: tosca.datatypes.Root
- properties:
- protocol:
- type: string
- required: false
- token_type:
- type: string
- default: password
- required: true
- token:
- type: string
- required: true
- keys:
- type: map
- entry_schema:
- type: string
- required: false
- user:
- type: string
- required: false
-
-##########################################################################
- # Artifact Type.
- # An Artifact Type is a reusable entity that defines the type of one or more
- # files which Node Types or Node Templates can have dependent relationships
- # and used during operations such as during installation or deployment.
-##########################################################################
-artifact_types:
- tosca.artifacts.Root:
- description: >
- The TOSCA Artifact Type all other TOSCA Artifact Types derive from
- properties:
- version: version
-
- tosca.artifacts.File:
- derived_from: tosca.artifacts.Root
-
- tosca.artifacts.Deployment:
- derived_from: tosca.artifacts.Root
- description: TOSCA base type for deployment artifacts
-
- tosca.artifacts.Deployment.Image:
- derived_from: tosca.artifacts.Deployment
-
- tosca.artifacts.Deployment.Image.VM:
- derived_from: tosca.artifacts.Deployment.Image
-
- tosca.artifacts.Implementation:
- derived_from: tosca.artifacts.Root
- description: TOSCA base type for implementation artifacts
-
- tosca.artifacts.Implementation.Bash:
- derived_from: tosca.artifacts.Implementation
- description: Script artifact for the Unix Bash shell
- mime_type: application/x-sh
- file_ext: [ sh ]
-
- tosca.artifacts.Implementation.Python:
- derived_from: tosca.artifacts.Implementation
- description: Artifact for the interpreted Python language
- mime_type: application/x-python
- file_ext: [ py ]
-
- tosca.artifacts.Deployment.Image.Container.Docker:
- derived_from: tosca.artifacts.Deployment.Image
- description: Docker container image
-
- tosca.artifacts.Deployment.Image.VM.ISO:
- derived_from: tosca.artifacts.Deployment.Image
- description: Virtual Machine (VM) image in ISO disk format
- mime_type: application/octet-stream
- file_ext: [ iso ]
-
- tosca.artifacts.Deployment.Image.VM.QCOW2:
- derived_from: tosca.artifacts.Deployment.Image
- description: Virtual Machine (VM) image in QCOW v2 standard disk format
- mime_type: application/octet-stream
- file_ext: [ qcow2 ]
-
-##########################################################################
- # Policy Type.
- # TOSCA Policy Types represent logical grouping of TOSCA nodes that have
- # an implied relationship and need to be orchestrated or managed together
- # to achieve some result.
-##########################################################################
-policy_types:
- tosca.policies.Root:
- description: The TOSCA Policy Type all other TOSCA Policy Types derive from.
-
- tosca.policies.Placement:
- derived_from: tosca.policies.Root
- description: The TOSCA Policy Type definition that is used to govern
- placement of TOSCA nodes or groups of nodes.
-
- tosca.policies.Scaling:
- derived_from: tosca.policies.Root
- description: The TOSCA Policy Type definition that is used to govern
- scaling of TOSCA nodes or groups of nodes.
-
- tosca.policies.Monitoring:
- derived_from: tosca.policies.Root
- description: The TOSCA Policy Type definition that is used to govern
- monitoring of TOSCA nodes or groups of nodes.
-
- tosca.policies.Update:
- derived_from: tosca.policies.Root
- description: The TOSCA Policy Type definition that is used to govern
- update of TOSCA nodes or groups of nodes.
-
- tosca.policies.Performance:
- derived_from: tosca.policies.Root
- description: The TOSCA Policy Type definition that is used to declare
- performance requirements for TOSCA nodes or groups of nodes.
-
-##########################################################################
- # Group Type.
- # Group Type represents logical grouping of TOSCA nodes that have an
- # implied membership relationship and may need to be orchestrated or
- # managed together to achieve some result.
-##########################################################################
-group_types:
- tosca.groups.Root:
- description: The TOSCA Group Type all other TOSCA Group Types derive from
- interfaces:
- Standard:
- type: tosca.interfaces.node.lifecycle.Standard
diff --git a/jtosca/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml b/jtosca/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml
deleted file mode 100644
index 365d70e..0000000
--- a/jtosca/src/main/resources/extensions/nfv/TOSCA_nfv_definition_1_0.yaml
+++ /dev/null
@@ -1,240 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-##########################################################################
-# The content of this file reflects TOSCA NFV Profile in YAML version
-# 1.0.0. It describes the definition for TOSCA NFV types including Node Type,
-# Relationship Type, Capability Type and Interfaces.
-##########################################################################
-tosca_definitions_version: tosca_simple_profile_for_nfv_1_0_0
-
-##########################################################################
-# Node Type.
-# A Node Type is a reusable entity that defines the type of one or more
-# Node Templates.
-##########################################################################
-node_types:
- tosca.nodes.nfv.VNF:
- derived_from: tosca.nodes.Root # Or should this be its own top - level type?
- properties:
- id:
- type: string
- description: ID of this VNF
- vendor:
- type: string
- description: name of the vendor who generate this VNF
- version:
- type: version
- description: version of the software for this VNF
- requirements:
- - virtualLink:
- capability: tosca.capabilities.nfv.VirtualLinkable
- relationship: tosca.relationships.nfv.VirtualLinksTo
- node: tosca.nodes.nfv.VL
-
- tosca.nodes.nfv.VDU:
- derived_from: tosca.nodes.Compute
- capabilities:
- high_availability:
- type: tosca.capabilities.nfv.HA
- virtualbinding:
- type: tosca.capabilities.nfv.VirtualBindable
- monitoring_parameter:
- type: tosca.capabilities.nfv.Metric
- requirements:
- - high_availability:
- capability: tosca.capabilities.nfv.HA
- relationship: tosca.relationships.nfv.HA
- node: tosca.nodes.nfv.VDU
- occurrences: [ 0, 1 ]
-
- tosca.nodes.nfv.CP:
- derived_from: tosca.nodes.network.Port
- properties:
- type:
- type: string
- required: false
- requirements:
- - virtualLink:
- capability: tosca.capabilities.nfv.VirtualLinkable
- relationship: tosca.relationships.nfv.VirtualLinksTo
- node: tosca.nodes.nfv.VL
- - virtualBinding:
- capability: tosca.capabilities.nfv.VirtualBindable
- relationship: tosca.relationships.nfv.VirtualBindsTo
- node: tosca.nodes.nfv.VDU
- attributes:
- address:
- type: string
-
- tosca.nodes.nfv.VL:
- derived_from: tosca.nodes.network.Network
- properties:
- vendor:
- type: string
- required: true
- description: name of the vendor who generate this VL
- capabilities:
- virtual_linkable:
- type: tosca.capabilities.nfv.VirtualLinkable
-
- tosca.nodes.nfv.VL.ELine:
- derived_from: tosca.nodes.nfv.VL
- capabilities:
- virtual_linkable:
- occurrences: 2
-
- tosca.nodes.nfv.VL.ELAN:
- derived_from: tosca.nodes.nfv.VL
-
- tosca.nodes.nfv.VL.ETree:
- derived_from: tosca.nodes.nfv.VL
-
- tosca.nodes.nfv.FP:
- derived_from: tosca.nodes.Root
- properties:
- policy:
- type: string
- required: false
- description: name of the vendor who generate this VL
- requirements:
- - forwarder:
- capability: tosca.capabilities.nfv.Forwarder
- relationship: tosca.relationships.nfv.ForwardsTo
-
-##########################################################################
-# Relationship Type.
-# A Relationship Type is a reusable entity that defines the type of one
-# or more relationships between Node Types or Node Templates.
-##########################################################################
-
-relationship_types:
- tosca.relationships.nfv.VirtualLinksTo:
- derived_from: tosca.relationships.network.LinksTo
- valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ]
-
- tosca.relationships.nfv.VirtualBindsTo:
- derived_from: tosca.relationships.network.BindsTo
- valid_target_types: [ tosca.capabilities.nfv.VirtualBindable ]
-
- tosca.relationships.nfv.HA:
- derived_from: tosca.relationships.Root
- valid_target_types: [ tosca.capabilities.nfv.HA ]
-
- tosca.relationships.nfv.Monitor:
- derived_from: tosca.relationships.ConnectsTo
- valid_target_types: [ tosca.capabilities.nfv.Metric ]
-
- tosca.relationships.nfv.ForwardsTo:
- derived_from: tosca.relationships.root
- valid_target_types: [ tosca.capabilities.nfv.Forwarder]
-
-##########################################################################
-# Capability Type.
-# A Capability Type is a reusable entity that describes a kind of
-# capability that a Node Type can declare to expose.
-##########################################################################
-
-capability_types:
- tosca.capabilities.nfv.VirtualLinkable:
- derived_from: tosca.capabilities.network.Linkable
-
- tosca.capabilities.nfv.VirtualBindable:
- derived_from: tosca.capabilities.network.Bindable
-
- tosca.capabilities.nfv.HA:
- derived_from: tosca.capabilities.Root
- valid_source_types: [ tosca.nodes.nfv.VDU ]
-
- tosca.capabilities.nfv.HA.ActiveActive:
- derived_from: tosca.capabilities.nfv.HA
-
- tosca.capabilities.nfv.HA.ActivePassive:
- derived_from: tosca.capabilities.nfv.HA
-
- tosca.capabilities.nfv.Metric:
- derived_from: tosca.capabilities.Root
-
- tosca.capabilities.nfv.Forwarder:
- derived_from: tosca.capabilities.Root
-
-##########################################################################
- # Interfaces Type.
- # The Interfaces element describes a list of one or more interface
- # definitions for a modelable entity (e.g., a Node or Relationship Type)
- # as defined within the TOSCA Simple Profile specification.
-##########################################################################
-
-##########################################################################
- # Data Type.
- # A Datatype is a complex data type declaration which contains other
- # complex or simple data types.
-##########################################################################
-
-##########################################################################
- # Artifact Type.
- # An Artifact Type is a reusable entity that defines the type of one or more
- # files which Node Types or Node Templates can have dependent relationships
- # and used during operations such as during installation or deployment.
-##########################################################################
-
-##########################################################################
- # Policy Type.
- # TOSCA Policy Types represent logical grouping of TOSCA nodes that have
- # an implied relationship and need to be orchestrated or managed together
- # to achieve some result.
-##########################################################################
-
-##########################################################################
- # Group Type
- #
-##########################################################################
-group_types:
- tosca.groups.nfv.VNFFG:
- derived_from: tosca.groups.Root
-
- properties:
- vendor:
- type: string
- required: true
- description: name of the vendor who generate this VNFFG
-
- version:
- type: string
- required: true
- description: version of this VNFFG
-
- number_of_endpoints:
- type: integer
- required: true
- description: count of the external endpoints included in this VNFFG
-
- dependent_virtual_link:
- type: list
- entry_schema:
- type: string
- required: true
- description: Reference to a VLD used in this Forwarding Graph
-
- connection_point:
- type: list
- entry_schema:
- type: string
- required: true
- description: Reference to Connection Points forming the VNFFG
-
- constituent_vnfs:
- type: list
- entry_schema:
- type: string
- required: true
- description: Reference to a list of VNFD used in this VNF Forwarding Graph
diff --git a/jtosca/src/main/resources/extensions/nfv/nfv.py b/jtosca/src/main/resources/extensions/nfv/nfv.py
deleted file mode 100644
index 0c7c2b9..0000000
--- a/jtosca/src/main/resources/extensions/nfv/nfv.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-# VERSION and DEFS_FILE are required for all extensions
-
-VERSION = 'tosca_simple_profile_for_nfv_1_0_0'
-
-DEFS_FILE = "TOSCA_nfv_definition_1_0.yaml"
-
-SECTIONS = ('metadata')
diff --git a/jtosca/src/test/java/org.openecomp.sdc.toscaparser/JToscaMetadataParse.java b/jtosca/src/test/java/org.openecomp.sdc.toscaparser/JToscaMetadataParse.java
deleted file mode 100644
index 584a0fd..0000000
--- a/jtosca/src/test/java/org.openecomp.sdc.toscaparser/JToscaMetadataParse.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package org.openecomp.sdc.toscaparser;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-
-import java.io.File;
-import java.util.LinkedHashMap;
-
-import org.junit.Test;
-import org.openecomp.sdc.toscaparser.api.ToscaTemplate;
-import org.openecomp.sdc.toscaparser.api.common.JToscaException;
-
-public class JToscaMetadataParse {
-
- @Test
- public void testMetadataParsedCorrectly() throws JToscaException {
- String fileStr = JToscaMetadataParse.class.getClassLoader().getResource("csars/csar_hello_world.csar").getFile();
- File file = new File(fileStr);
- ToscaTemplate toscaTemplate = new ToscaTemplate(file.getAbsolutePath(), null, true, null);
- LinkedHashMap<String, Object> metadataProperties = toscaTemplate.getMetaProperties("TOSCA.meta");
- assertNotNull(metadataProperties);
- Object entryDefinition = metadataProperties.get("Entry-Definitions");
- assertNotNull(entryDefinition);
- assertEquals("tosca_helloworld.yaml", entryDefinition);
- }
-}
diff --git a/jtosca/src/test/resources/csars/csar_hello_world.csar b/jtosca/src/test/resources/csars/csar_hello_world.csar
deleted file mode 100644
index 43ffbbc..0000000
--- a/jtosca/src/test/resources/csars/csar_hello_world.csar
+++ /dev/null
Binary files differ
diff --git a/jtosca/src/test/resources/csars/service-ServiceFdnt-csar.csar b/jtosca/src/test/resources/csars/service-ServiceFdnt-csar.csar
deleted file mode 100644
index 983dc9b..0000000
--- a/jtosca/src/test/resources/csars/service-ServiceFdnt-csar.csar
+++ /dev/null
Binary files differ