aboutsummaryrefslogtreecommitdiffstats
path: root/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements
diff options
context:
space:
mode:
Diffstat (limited to 'jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements')
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ArtifactTypeDef.java105
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/AttributeDef.java40
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/CapabilityTypeDef.java222
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/DataType.java116
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/EntityType.java419
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/GroupType.java214
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/InterfacesDef.java227
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/Metadata.java35
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/NodeType.java519
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/PolicyType.java289
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/PortSpec.java159
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/PropertyDef.java230
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/RelationshipType.java102
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnit.java261
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnitFrequency.java14
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnitSize.java19
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnitTime.java17
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/StatefulEntityType.java220
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/TypeValidation.java147
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Constraint.java237
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Equal.java61
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/GreaterOrEqual.java112
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/GreaterThan.java101
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/InRange.java169
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Length.java78
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/LessOrEqual.java105
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/LessThan.java103
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/MaxLength.java89
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/MinLength.java89
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Pattern.java95
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Schema.java276
-rw-r--r--jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/ValidValues.java84
32 files changed, 4954 insertions, 0 deletions
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ArtifactTypeDef.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ArtifactTypeDef.java
new file mode 100644
index 0000000..fda1a3e
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ArtifactTypeDef.java
@@ -0,0 +1,105 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import java.util.LinkedHashMap;
+
+public class ArtifactTypeDef extends StatefulEntityType {
+
+ private String type;
+ private LinkedHashMap<String,Object> customDef;
+ private LinkedHashMap<String,Object> properties;
+ private LinkedHashMap<String,Object> parentArtifacts;
+
+
+
+ public ArtifactTypeDef(String atype,LinkedHashMap<String,Object> _customDef) {
+ super(atype,ARTIFACT_PREFIX,_customDef);
+
+ type = atype;
+ customDef = _customDef;
+ properties = null;
+ if(defs != null) {
+ properties = (LinkedHashMap<String,Object>)defs.get(PROPERTIES);
+ }
+ parentArtifacts = _getParentArtifacts();
+ }
+
+ private LinkedHashMap<String,Object> _getParentArtifacts() {
+ LinkedHashMap<String,Object> artifacts = new LinkedHashMap<>();
+ String parentArtif = null;
+ if(getParentType() != null) {
+ parentArtif = getParentType().getType();
+ }
+ if(parentArtif != null && !parentArtif.isEmpty()) {
+ while(!parentArtif.equals("tosca.artifacts.Root")) {
+ Object ob = TOSCA_DEF.get(parentArtif);
+ artifacts.put(parentArtif,ob);
+ parentArtif =
+ (String)((LinkedHashMap<String,Object>)ob).get("derived_from");
+ }
+ }
+ return artifacts;
+ }
+
+ public ArtifactTypeDef getParentType() {
+ // Return a artifact entity from which this entity is derived
+ if(defs == null) {
+ return null;
+ }
+ String partifactEntity = derivedFrom(defs);
+ if(partifactEntity != null) {
+ return new ArtifactTypeDef(partifactEntity,customDef);
+ }
+ return null;
+ }
+
+ public Object getArtifact(String name) {
+ // Return the definition of an artifact field by name
+ if(defs != null) {
+ return defs.get(name);
+ }
+ return null;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+}
+
+/*python
+class ArtifactTypeDef(StatefulEntityType):
+ '''TOSCA built-in artifacts type.'''
+
+ def __init__(self, atype, custom_def=None):
+ super(ArtifactTypeDef, self).__init__(atype, self.ARTIFACT_PREFIX,
+ custom_def)
+ self.type = atype
+ self.custom_def = custom_def
+ self.properties = None
+ if self.PROPERTIES in self.defs:
+ self.properties = self.defs[self.PROPERTIES]
+ self.parent_artifacts = self._get_parent_artifacts()
+
+ def _get_parent_artifacts(self):
+ artifacts = {}
+ parent_artif = self.parent_type.type if self.parent_type else None
+ if parent_artif:
+ while parent_artif != 'tosca.artifacts.Root':
+ artifacts[parent_artif] = self.TOSCA_DEF[parent_artif]
+ parent_artif = artifacts[parent_artif]['derived_from']
+ return artifacts
+
+ @property
+ def parent_type(self):
+ '''Return a artifact entity from which this entity is derived.'''
+ if not hasattr(self, 'defs'):
+ return None
+ partifact_entity = self.derived_from(self.defs)
+ if partifact_entity:
+ return ArtifactTypeDef(partifact_entity, self.custom_def)
+
+ def get_artifact(self, name):
+ '''Return the definition of an artifact field by name.'''
+ if name in self.defs:
+ return self.defs[name]
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/AttributeDef.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/AttributeDef.java
new file mode 100644
index 0000000..63c1d63
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/AttributeDef.java
@@ -0,0 +1,40 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import java.util.LinkedHashMap;
+
+public class AttributeDef {
+ // TOSCA built-in Attribute type
+
+ private String name;
+ private Object value;
+ private LinkedHashMap<String,Object> schema;
+
+ public AttributeDef(String adName, Object adValue, LinkedHashMap<String,Object> adSchema) {
+ name = adName;
+ value = adValue;
+ schema = adSchema;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public Object getValue() {
+ return value;
+ }
+
+ public LinkedHashMap<String,Object> getSchema() {
+ return schema;
+ }
+}
+
+/*python
+
+class AttributeDef(object):
+ '''TOSCA built-in Attribute type.'''
+
+ def __init__(self, name, value=None, schema=None):
+ self.name = name
+ self.value = value
+ self.schema = schema
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/CapabilityTypeDef.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/CapabilityTypeDef.java
new file mode 100644
index 0000000..7ab3a9c
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/CapabilityTypeDef.java
@@ -0,0 +1,222 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.openecomp.sdc.toscaparser.elements.PropertyDef;
+
+public class CapabilityTypeDef extends StatefulEntityType {
+ // TOSCA built-in capabilities type
+
+ private static final String TOSCA_TYPEURI_CAPABILITY_ROOT = "tosca.capabilities.Root";
+
+ private String name;
+ private String nodetype;
+ private LinkedHashMap<String,Object> customDef;
+ private LinkedHashMap<String,Object> properties;
+ private LinkedHashMap<String,Object> parentCapabilities;
+
+ @SuppressWarnings("unchecked")
+ public CapabilityTypeDef(String cname,String ctype,String ntype,LinkedHashMap<String,Object> ccustomDef) {
+ super(ctype,CAPABILITY_PREFIX,ccustomDef);
+
+ name = cname;
+ nodetype = ntype;
+ properties = null;
+ customDef = ccustomDef;
+ if(defs != null) {
+ properties = (LinkedHashMap<String,Object>)defs.get(PROPERTIES);
+ }
+ parentCapabilities = _getParentCapabilities(customDef);
+ }
+
+ @SuppressWarnings("unchecked")
+ public ArrayList<PropertyDef> getPropertiesDefObjects () {
+ // Return a list of property definition objects
+ ArrayList<PropertyDef> propsdefs = new ArrayList<>();
+ LinkedHashMap<String,Object> parentProperties = new LinkedHashMap<>();
+ if(parentCapabilities != null) {
+ for(Map.Entry<String,Object> me: parentCapabilities.entrySet()) {
+ parentProperties.put(me.getKey(),((LinkedHashMap<String,Object>)me.getValue()).get("properties"));
+ }
+ }
+ if(properties != null) {
+ for(Map.Entry<String,Object> me: properties.entrySet()) {
+ propsdefs.add(new PropertyDef(me.getKey(),null,(LinkedHashMap<String,Object>)me.getValue()));
+ }
+ }
+ if(parentProperties != null) {
+ for(Map.Entry<String,Object> me: parentProperties.entrySet()) {
+ LinkedHashMap<String,Object> props = (LinkedHashMap<String,Object>)me.getValue();
+ for(Map.Entry<String,Object> pe: props.entrySet()) {
+ String prop = pe.getKey();
+ LinkedHashMap<String,Object> schema = (LinkedHashMap<String,Object>)pe.getValue();
+ // add parent property if not overridden by children type
+ if(properties == null || properties.get(prop) == null) {
+ propsdefs.add(new PropertyDef(prop, null, schema));
+ }
+ }
+ }
+ }
+ return propsdefs;
+ }
+
+ public LinkedHashMap<String,PropertyDef> getPropertiesDef() {
+ LinkedHashMap<String,PropertyDef> pds = new LinkedHashMap<>();
+ for(PropertyDef pd: getPropertiesDefObjects()) {
+ pds.put(pd.getName(),pd);
+ }
+ return pds;
+ }
+
+ public PropertyDef getPropertyDefValue(String pdname) {
+ // Return the definition of a given property name
+ LinkedHashMap<String,PropertyDef> propsDef = getPropertiesDef();
+ if(propsDef != null && propsDef.get(pdname) != null) {
+ return (PropertyDef)propsDef.get(pdname).getPDValue();
+ }
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ private LinkedHashMap<String,Object> _getParentCapabilities(LinkedHashMap<String,Object> customDef) {
+ LinkedHashMap<String,Object> capabilities = new LinkedHashMap<>();
+ CapabilityTypeDef parentCap = getParentType();
+ if(parentCap != null) {
+ String sParentCap = parentCap.getType();
+ while(!sParentCap.equals(TOSCA_TYPEURI_CAPABILITY_ROOT)) {
+ if(TOSCA_DEF.get(sParentCap) != null) {
+ capabilities.put(sParentCap,TOSCA_DEF.get(sParentCap));
+ }
+ else if(customDef != null && customDef.get(sParentCap) != null) {
+ capabilities.put(sParentCap,customDef.get(sParentCap));
+ }
+ sParentCap = (String)((LinkedHashMap<String,Object>)capabilities.get(sParentCap)).get("derived_from");
+ }
+ }
+ return capabilities;
+ }
+
+ public CapabilityTypeDef getParentType() {
+ // Return a capability this capability is derived from
+ if(defs == null) {
+ return null;
+ }
+ String pnode = derivedFrom(defs);
+ if(pnode != null && !pnode.isEmpty()) {
+ return new CapabilityTypeDef(name, pnode, nodetype, customDef);
+ }
+ return null;
+ }
+
+ public boolean inheritsFrom(ArrayList<String> typeNames) {
+ // Check this capability is in type_names
+
+ // Check if this capability or some of its parent types
+ // are in the list of types: type_names
+ if(typeNames.contains(getType())) {
+ return true;
+ }
+ else if(getParentType() != null) {
+ return getParentType().inheritsFrom(typeNames);
+ }
+ return false;
+ }
+
+ // getters/setters
+
+ public LinkedHashMap<String,Object> getProperties() {
+ return properties;
+ }
+
+ public String getName() {
+ return name;
+ }
+}
+
+/*python
+from toscaparser.elements.property_definition import PropertyDef
+from toscaparser.elements.statefulentitytype import StatefulEntityType
+
+
+class CapabilityTypeDef(StatefulEntityType):
+ '''TOSCA built-in capabilities type.'''
+ TOSCA_TYPEURI_CAPABILITY_ROOT = 'tosca.capabilities.Root'
+
+ def __init__(self, name, ctype, ntype, custom_def=None):
+ self.name = name
+ super(CapabilityTypeDef, self).__init__(ctype, self.CAPABILITY_PREFIX,
+ custom_def)
+ self.nodetype = ntype
+ self.properties = None
+ self.custom_def = custom_def
+ if self.PROPERTIES in self.defs:
+ self.properties = self.defs[self.PROPERTIES]
+ self.parent_capabilities = self._get_parent_capabilities(custom_def)
+
+ def get_properties_def_objects(self):
+ '''Return a list of property definition objects.'''
+ properties = []
+ parent_properties = {}
+ if self.parent_capabilities:
+ for type, value in self.parent_capabilities.items():
+ parent_properties[type] = value.get('properties')
+ if self.properties:
+ for prop, schema in self.properties.items():
+ properties.append(PropertyDef(prop, None, schema))
+ if parent_properties:
+ for parent, props in parent_properties.items():
+ for prop, schema in props.items():
+ # add parent property if not overridden by children type
+ if not self.properties or \
+ prop not in self.properties.keys():
+ properties.append(PropertyDef(prop, None, schema))
+ return properties
+
+ def get_properties_def(self):
+ '''Return a dictionary of property definition name-object pairs.'''
+ return {prop.name: prop
+ for prop in self.get_properties_def_objects()}
+
+ def get_property_def_value(self, name):
+ '''Return the definition of a given property name.'''
+ props_def = self.get_properties_def()
+ if props_def and name in props_def:
+ return props_def[name].value
+
+ def _get_parent_capabilities(self, custom_def=None):
+ capabilities = {}
+ parent_cap = self.parent_type
+ if parent_cap:
+ parent_cap = parent_cap.type
+ while parent_cap != self.TOSCA_TYPEURI_CAPABILITY_ROOT:
+ if parent_cap in self.TOSCA_DEF.keys():
+ capabilities[parent_cap] = self.TOSCA_DEF[parent_cap]
+ elif custom_def and parent_cap in custom_def.keys():
+ capabilities[parent_cap] = custom_def[parent_cap]
+ parent_cap = capabilities[parent_cap]['derived_from']
+ return capabilities
+
+ @property
+ def parent_type(self):
+ '''Return a capability this capability is derived from.'''
+ if not hasattr(self, 'defs'):
+ return None
+ pnode = self.derived_from(self.defs)
+ if pnode:
+ return CapabilityTypeDef(self.name, pnode,
+ self.nodetype, self.custom_def)
+
+ def inherits_from(self, type_names):
+ '''Check this capability is in type_names
+
+ Check if this capability or some of its parent types
+ are in the list of types: type_names
+ '''
+ if self.type in type_names:
+ return True
+ elif self.parent_type:
+ return self.parent_type.inherits_from(type_names)
+ else:
+ return False*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/DataType.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/DataType.java
new file mode 100644
index 0000000..d8f8952
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/DataType.java
@@ -0,0 +1,116 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+
+public class DataType extends StatefulEntityType {
+
+ LinkedHashMap<String,Object> customDef;
+
+ public DataType(String _dataTypeName,LinkedHashMap<String,Object> _customDef) {
+ super(_dataTypeName,DATATYPE_NETWORK_PREFIX,_customDef);
+
+ customDef = _customDef;
+ }
+
+ public DataType getParentType() {
+ // Return a datatype this datatype is derived from
+ if(defs != null) {
+ String ptype = derivedFrom(defs);
+ if(ptype != null) {
+ return new DataType(ptype,customDef);
+ }
+ }
+ return null;
+ }
+
+ public String getValueType() {
+ // Return 'type' section in the datatype schema
+ if(defs != null) {
+ return (String)entityValue(defs,"type");
+ }
+ return null;
+ }
+
+ public ArrayList<PropertyDef> getAllPropertiesObjects() {
+ //Return all properties objects defined in type and parent type
+ ArrayList<PropertyDef> propsDef = getPropertiesDefObjects();
+ DataType ptype = getParentType();
+ while(ptype != null) {
+ propsDef.addAll(ptype.getPropertiesDefObjects());
+ ptype = ptype.getParentType();
+ }
+ return propsDef;
+ }
+
+ public LinkedHashMap<String,PropertyDef> getAllProperties() {
+ // Return a dictionary of all property definition name-object pairs
+ LinkedHashMap<String,PropertyDef> pno = new LinkedHashMap<>();
+ for(PropertyDef pd: getAllPropertiesObjects()) {
+ pno.put(pd.getName(),pd);
+ }
+ return pno;
+ }
+
+ public Object getAllPropertyValue(String name) {
+ // Return the value of a given property name
+ LinkedHashMap<String,PropertyDef> propsDef = getAllProperties();
+ if(propsDef != null && propsDef.get(name) != null) {
+ return propsDef.get(name).getPDValue();
+ }
+ return null;
+ }
+
+ public LinkedHashMap<String,Object> getDefs() {
+ return defs;
+ }
+
+}
+
+/*python
+
+from toscaparser.elements.statefulentitytype import StatefulEntityType
+
+
+class DataType(StatefulEntityType):
+ '''TOSCA built-in and user defined complex data type.'''
+
+ def __init__(self, datatypename, custom_def=None):
+ super(DataType, self).__init__(datatypename,
+ self.DATATYPE_NETWORK_PREFIX,
+ custom_def)
+ self.custom_def = custom_def
+
+ @property
+ def parent_type(self):
+ '''Return a datatype this datatype is derived from.'''
+ ptype = self.derived_from(self.defs)
+ if ptype:
+ return DataType(ptype, self.custom_def)
+ return None
+
+ @property
+ def value_type(self):
+ '''Return 'type' section in the datatype schema.'''
+ return self.entity_value(self.defs, 'type')
+
+ def get_all_properties_objects(self):
+ '''Return all properties objects defined in type and parent type.'''
+ props_def = self.get_properties_def_objects()
+ ptype = self.parent_type
+ while ptype:
+ props_def.extend(ptype.get_properties_def_objects())
+ ptype = ptype.parent_type
+ return props_def
+
+ def get_all_properties(self):
+ '''Return a dictionary of all property definition name-object pairs.'''
+ return {prop.name: prop
+ for prop in self.get_all_properties_objects()}
+
+ def get_all_property_value(self, name):
+ '''Return the value of a given property name.'''
+ props_def = self.get_all_properties()
+ if props_def and name in props_def.key():
+ return props_def[name].value
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/EntityType.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/EntityType.java
new file mode 100644
index 0000000..1ea5b42
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/EntityType.java
@@ -0,0 +1,419 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.yaml.snakeyaml.Yaml;
+
+import org.openecomp.sdc.toscaparser.extensions.ExtTools;
+import org.openecomp.sdc.toscaparser.utils.CopyUtils;
+
+public class EntityType {
+
+ private static Logger log = LoggerFactory.getLogger(EntityType.class.getName());
+
+ private static final String TOSCA_DEFINITION_1_0_YAML = "TOSCA_definition_1_0.yaml";
+ protected static final String DERIVED_FROM = "derived_from";
+ protected static final String PROPERTIES = "properties";
+ protected static final String ATTRIBUTES = "attributes";
+ protected static final String REQUIREMENTS = "requirements";
+ protected static final String INTERFACES = "interfaces";
+ protected static final String CAPABILITIES = "capabilities";
+ protected static final String TYPE = "type";
+ protected static final String ARTIFACTS = "artifacts";
+
+ @SuppressWarnings("unused")
+ private static final String SECTIONS[] = {
+ DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS,
+ INTERFACES, CAPABILITIES, TYPE, ARTIFACTS
+ };
+
+ public static final String TOSCA_DEF_SECTIONS[] = {
+ "node_types", "data_types", "artifact_types",
+ "group_types", "relationship_types",
+ "capability_types", "interface_types",
+ "policy_types"};
+
+
+ // TOSCA definition file
+ //private final static String path = EntityType.class.getProtectionDomain().getCodeSource().getLocation().getPath();
+
+ //private final static String path = EntityType.class.getClassLoader().getResource("TOSCA_definition_1_0.yaml").getFile();
+ //private final static String TOSCA_DEF_FILE = EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml");
+
+ private static LinkedHashMap<String,Object> TOSCA_DEF_LOAD_AS_IS = loadTdf();
+
+ //EntityType.class.getClassLoader().getResourceAsStream("TOSCA_definition_1_0.yaml");
+
+ @SuppressWarnings("unchecked")
+ private static LinkedHashMap<String,Object> loadTdf() {
+ String toscaDefLocation = EntityType.class.getClassLoader().getResource(TOSCA_DEFINITION_1_0_YAML).getFile();
+ InputStream input = EntityType.class.getClassLoader().getResourceAsStream(TOSCA_DEFINITION_1_0_YAML);
+ if (input == null){
+ log.error("EntityType - loadTdf - Couldn't load TOSCA_DEF_FILE {}", toscaDefLocation);
+ }
+ Yaml yaml = new Yaml();
+ Object loaded = yaml.load(input);
+ //@SuppressWarnings("unchecked")
+ return (LinkedHashMap<String,Object>) loaded;
+ }
+
+ // Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS
+ public static LinkedHashMap<String,Object> TOSCA_DEF;
+ static {
+ TOSCA_DEF = new LinkedHashMap<String,Object>();
+ for(String section: TOSCA_DEF_SECTIONS) {
+ @SuppressWarnings("unchecked")
+ LinkedHashMap<String,Object> value = (LinkedHashMap<String,Object>)TOSCA_DEF_LOAD_AS_IS.get(section);
+ if(value != null) {
+ for(String key: value.keySet()) {
+ TOSCA_DEF.put(key, value.get(key));
+ }
+ }
+ }
+ }
+
+ public static final String DEPENDSON = "tosca.relationships.DependsOn";
+ public static final String HOSTEDON = "tosca.relationships.HostedOn";
+ public static final String CONNECTSTO = "tosca.relationships.ConnectsTo";
+ public static final String ATTACHESTO = "tosca.relationships.AttachesTo";
+ public static final String LINKSTO = "tosca.relationships.network.LinksTo";
+ public static final String BINDSTO = "tosca.relationships.network.BindsTo";
+
+ public static final String RELATIONSHIP_TYPE[] = {
+ "tosca.relationships.DependsOn",
+ "tosca.relationships.HostedOn",
+ "tosca.relationships.ConnectsTo",
+ "tosca.relationships.AttachesTo",
+ "tosca.relationships.network.LinksTo",
+ "tosca.relationships.network.BindsTo"};
+
+ public static final String NODE_PREFIX = "tosca.nodes.";
+ public static final String RELATIONSHIP_PREFIX = "tosca.relationships.";
+ public static final String CAPABILITY_PREFIX = "tosca.capabilities.";
+ public static final String INTERFACE_PREFIX = "tosca.interfaces.";
+ public static final String ARTIFACT_PREFIX = "tosca.artifacts.";
+ public static final String POLICY_PREFIX = "tosca.policies.";
+ public static final String GROUP_PREFIX = "tosca.groups.";
+ //currently the data types are defined only for network
+ // but may have changes in the future.
+ public static final String DATATYPE_PREFIX = "tosca.datatypes.";
+ public static final String DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + "network.";
+ public static final String TOSCA = "tosca";
+
+ protected String type;
+ protected LinkedHashMap<String,Object> defs = null;
+ public Object getParentType() { return null; }
+
+ public String derivedFrom(LinkedHashMap<String,Object> defs) {
+ // Return a type this type is derived from
+ return (String)entityValue(defs, "derived_from");
+ }
+
+ public boolean isDerivedFrom(String type_str) {
+ // Check if object inherits from the given type
+ // Returns true if this object is derived from 'type_str'
+ // False otherwise.
+ if(type == null || this.type.isEmpty()) {
+ return false;
+ }
+ else if(type == type_str) {
+ return true;
+ }
+ else if(getParentType() != null) {
+ return ((EntityType)getParentType()).isDerivedFrom(type_str);
+ }
+ else {
+ return false;
+ }
+ }
+
+ public Object entityValue(LinkedHashMap<String,Object> defs, String key) {
+ if(defs != null) {
+ return defs.get(key);
+ }
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ public Object getValue(String ndtype, LinkedHashMap<String,Object> _defs, boolean parent) {
+ Object value = null;
+ if(_defs == null) {
+ if(defs == null) {
+ return null;
+ }
+ _defs = this.defs;
+ }
+ Object defndt = _defs.get(ndtype);
+ if(defndt != null) {
+ // copy the value to avoid that next operations add items in the
+ // item definitions
+ //value = copy.copy(defs[ndtype])
+ value = CopyUtils.copyLhmOrAl(defndt);
+ }
+
+ if(parent) {
+ EntityType p = this;
+ if(p != null) {
+ while(p != null) {
+ if(p.defs != null && p.defs.get(ndtype) != null) {
+ // get the parent value
+ Object parentValue = p.defs.get(ndtype);
+ if(value != null) {
+ if(value instanceof LinkedHashMap) {
+ for(Map.Entry<String,Object> me: ((LinkedHashMap<String,Object>)parentValue).entrySet()) {
+ String k = me.getKey();
+ if(((LinkedHashMap<String,Object>)value).get(k) == null) {
+ ((LinkedHashMap<String,Object>)value).put(k,me.getValue());
+ }
+ }
+ }
+ if(value instanceof ArrayList) {
+ for(Object pValue: (ArrayList<Object>)parentValue) {
+ if(!((ArrayList<Object>)value).contains(pValue)) {
+ ((ArrayList<Object>)value).add(pValue);
+ }
+ }
+ }
+ }
+ else {
+ // value = copy.copy(parent_value)
+ value = CopyUtils.copyLhmOrAl(parentValue);
+ }
+ }
+ p = (EntityType)p.getParentType();
+ }
+ }
+ }
+
+ return value;
+ }
+
+ @SuppressWarnings("unchecked")
+ public Object getDefinition(String ndtype) {
+ Object value = null;
+ LinkedHashMap<String,Object> _defs;
+ // no point in hasattr, because we have it, and it
+ // doesn't do anything except emit an exception anyway
+ //if not hasattr(self, 'defs'):
+ // defs = None
+ // ExceptionCollector.appendException(
+ // ValidationError(message="defs is " + str(defs)))
+ //else:
+ // defs = self.defs
+ _defs = this.defs;
+
+
+ if(_defs != null && _defs.get(ndtype) != null) {
+ value = _defs.get(ndtype);
+ }
+
+ Object p = getParentType();
+ if(p != null) {
+ Object inherited = ((EntityType)p).getDefinition(ndtype);
+ if(inherited != null) {
+ // inherited = dict(inherited) WTF?!?
+ if(value == null) {
+ value = inherited;
+ }
+ else {
+ //?????
+ //inherited.update(value)
+ //value.update(inherited)
+ for(Map.Entry<String,Object> me: ((LinkedHashMap<String,Object>)inherited).entrySet()) {
+ ((LinkedHashMap<String,Object>)value).put(me.getKey(),me.getValue());
+ }
+ }
+ }
+ }
+ return value;
+ }
+
+ public static void updateDefinitions(String version) {
+ ExtTools exttools = new ExtTools();
+ String extensionDefsFile = exttools.getDefsFile(version);
+
+ InputStream input = null;
+ try {
+ input = new FileInputStream(new File(extensionDefsFile));
+ }
+ catch (FileNotFoundException e) {
+ log.error("EntityType - updateDefinitions - Failed to open extension defs file ", extensionDefsFile);
+ return;
+ }
+ Yaml yaml = new Yaml();
+ LinkedHashMap<String,Object> nfvDefFile = (LinkedHashMap<String,Object>)yaml.load(input);
+ LinkedHashMap<String,Object> nfvDef = new LinkedHashMap<>();
+ for(String section: TOSCA_DEF_SECTIONS) {
+ if(nfvDefFile.get(section) != null) {
+ LinkedHashMap<String,Object> value =
+ (LinkedHashMap<String,Object>)nfvDefFile.get(section);
+ for(String key: value.keySet()) {
+ nfvDef.put(key, value.get(key));
+ }
+ }
+ }
+ TOSCA_DEF.putAll(nfvDef);
+ }
+
+}
+
+/*python
+
+from toscaparser.common.exception import ExceptionCollector
+from toscaparser.common.exception import ValidationError
+from toscaparser.extensions.exttools import ExtTools
+import org.openecomp.sdc.toscaparser.utils.yamlparser
+
+log = logging.getLogger('tosca')
+
+
+class EntityType(object):
+ '''Base class for TOSCA elements.'''
+
+ SECTIONS = (DERIVED_FROM, PROPERTIES, ATTRIBUTES, REQUIREMENTS,
+ INTERFACES, CAPABILITIES, TYPE, ARTIFACTS) = \
+ ('derived_from', 'properties', 'attributes', 'requirements',
+ 'interfaces', 'capabilities', 'type', 'artifacts')
+
+ TOSCA_DEF_SECTIONS = ['node_types', 'data_types', 'artifact_types',
+ 'group_types', 'relationship_types',
+ 'capability_types', 'interface_types',
+ 'policy_types']
+
+ '''TOSCA definition file.'''
+ TOSCA_DEF_FILE = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)),
+ "TOSCA_definition_1_0.yaml")
+
+ loader = toscaparser.utils.yamlparser.load_yaml
+
+ TOSCA_DEF_LOAD_AS_IS = loader(TOSCA_DEF_FILE)
+
+ # Map of definition with pre-loaded values of TOSCA_DEF_FILE_SECTIONS
+ TOSCA_DEF = {}
+ for section in TOSCA_DEF_SECTIONS:
+ if section in TOSCA_DEF_LOAD_AS_IS.keys():
+ value = TOSCA_DEF_LOAD_AS_IS[section]
+ for key in value.keys():
+ TOSCA_DEF[key] = value[key]
+
+ RELATIONSHIP_TYPE = (DEPENDSON, HOSTEDON, CONNECTSTO, ATTACHESTO,
+ LINKSTO, BINDSTO) = \
+ ('tosca.relationships.DependsOn',
+ 'tosca.relationships.HostedOn',
+ 'tosca.relationships.ConnectsTo',
+ 'tosca.relationships.AttachesTo',
+ 'tosca.relationships.network.LinksTo',
+ 'tosca.relationships.network.BindsTo')
+
+ NODE_PREFIX = 'tosca.nodes.'
+ RELATIONSHIP_PREFIX = 'tosca.relationships.'
+ CAPABILITY_PREFIX = 'tosca.capabilities.'
+ INTERFACE_PREFIX = 'tosca.interfaces.'
+ ARTIFACT_PREFIX = 'tosca.artifacts.'
+ POLICY_PREFIX = 'tosca.policies.'
+ GROUP_PREFIX = 'tosca.groups.'
+ # currently the data types are defined only for network
+ # but may have changes in the future.
+ DATATYPE_PREFIX = 'tosca.datatypes.'
+ DATATYPE_NETWORK_PREFIX = DATATYPE_PREFIX + 'network.'
+ TOSCA = 'tosca'
+
+ def derived_from(self, defs):
+ '''Return a type this type is derived from.'''
+ return self.entity_value(defs, 'derived_from')
+
+ def is_derived_from(self, type_str):
+ '''Check if object inherits from the given type.
+
+ Returns true if this object is derived from 'type_str'.
+ False otherwise.
+ '''
+ if not self.type:
+ return False
+ elif self.type == type_str:
+ return True
+ elif self.parent_type:
+ return self.parent_type.is_derived_from(type_str)
+ else:
+ return False
+
+ def entity_value(self, defs, key):
+ if key in defs:
+ return defs[key]
+
+ def get_value(self, ndtype, defs=None, parent=None):
+ value = None
+ if defs is None:
+ if not hasattr(self, 'defs'):
+ return None
+ defs = self.defs
+ if ndtype in defs:
+ # copy the value to avoid that next operations add items in the
+ # item definitions
+ value = copy.copy(defs[ndtype])
+ if parent:
+ p = self
+ if p:
+ while p:
+ if ndtype in p.defs:
+ # get the parent value
+ parent_value = p.defs[ndtype]
+ if value:
+ if isinstance(value, dict):
+ for k, v in parent_value.items():
+ if k not in value.keys():
+ value[k] = v
+ if isinstance(value, list):
+ for p_value in parent_value:
+ if p_value not in value:
+ value.append(p_value)
+ else:
+ value = copy.copy(parent_value)
+ p = p.parent_type
+ return value
+
+ def get_definition(self, ndtype):
+ value = None
+ if not hasattr(self, 'defs'):
+ defs = None
+ ExceptionCollector.appendException(
+ ValidationError(message="defs is " + str(defs)))
+ else:
+ defs = self.defs
+ if defs is not None and ndtype in defs:
+ value = defs[ndtype]
+ p = self.parent_type
+ if p:
+ inherited = p.get_definition(ndtype)
+ if inherited:
+ inherited = dict(inherited)
+ if not value:
+ value = inherited
+ else:
+ inherited.update(value)
+ value.update(inherited)
+ return value
+
+
+def update_definitions(version):
+ exttools = ExtTools()
+ extension_defs_file = exttools.get_defs_file(version)
+ loader = toscaparser.utils.yamlparser.load_yaml
+ nfv_def_file = loader(extension_defs_file)
+ nfv_def = {}
+ for section in EntityType.TOSCA_DEF_SECTIONS:
+ if section in nfv_def_file.keys():
+ value = nfv_def_file[section]
+ for key in value.keys():
+ nfv_def[key] = value[key]
+ EntityType.TOSCA_DEF.update(nfv_def)
+*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/GroupType.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/GroupType.java
new file mode 100644
index 0000000..41dcd34
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/GroupType.java
@@ -0,0 +1,214 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import java.util.LinkedHashMap;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+
+public class GroupType extends StatefulEntityType {
+
+ private static final String DERIVED_FROM = "derived_from";
+ private static final String VERSION = "version";
+ private static final String METADATA = "metadata";
+ private static final String DESCRIPTION = "description";
+ private static final String PROPERTIES = "properties";
+ private static final String MEMBERS = "members";
+ private static final String INTERFACES = "interfaces";
+
+ private static final String SECTIONS[] = {
+ DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES, MEMBERS, INTERFACES};
+
+ private String groupType;
+ private LinkedHashMap<String,Object> customDef;
+ private String groupDescription;
+ private String groupVersion;
+ //private LinkedHashMap<String,Object> groupProperties;
+ //private ArrayList<String> groupMembers;
+ private LinkedHashMap<String,Object> metaData;
+
+ @SuppressWarnings("unchecked")
+ public GroupType(String _grouptype,LinkedHashMap<String,Object> _customDef) {
+ super(_grouptype,GROUP_PREFIX,_customDef);
+
+ groupType = _grouptype;
+ customDef = _customDef;
+ _validateFields();
+ if(defs != null) {
+ groupDescription = (String)defs.get(DESCRIPTION);
+ groupVersion = (String)defs.get(VERSION);
+ //groupProperties = (LinkedHashMap<String,Object>)defs.get(PROPERTIES);
+ //groupMembers = (ArrayList<String>)defs.get(MEMBERS);
+ Object mdo = defs.get(METADATA);
+ if(mdo instanceof LinkedHashMap) {
+ metaData = (LinkedHashMap<String,Object>)mdo;
+ }
+ else {
+ metaData = null;
+ }
+
+ if(metaData != null) {
+ _validateMetadata(metaData);
+ }
+ }
+ }
+
+ public GroupType getParentType() {
+ // Return a group statefulentity of this entity is derived from.
+ if(defs == null) {
+ return null;
+ }
+ String pgroupEntity = derivedFrom(defs);
+ if(pgroupEntity != null) {
+ return new GroupType(pgroupEntity,customDef);
+ }
+ return null;
+ }
+
+ public String getDescription() {
+ return groupDescription;
+ }
+
+ public String getVersion() {
+ return groupVersion;
+ }
+
+ @SuppressWarnings("unchecked")
+ public LinkedHashMap<String,Object> getInterfaces() {
+ Object ifo = getValue(INTERFACES,null,false);
+ if(ifo instanceof LinkedHashMap) {
+ return (LinkedHashMap<String, Object>)ifo;
+ }
+ return new LinkedHashMap<String,Object>();
+ }
+
+ private void _validateFields() {
+ if(defs != null) {
+ for(String name: defs.keySet()) {
+ boolean bFound = false;
+ for(String sect: SECTIONS) {
+ if(name.equals(sect)) {
+ bFound = true;
+ break;
+ }
+ }
+ if(!bFound) {
+ ExceptionCollector.appendException(String.format(
+ "UnknownFieldError: Group Type \"%s\" contains unknown field \"%s\"",
+ groupType,name));
+ }
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private void _validateMetadata(LinkedHashMap<String,Object> metadata) {
+ String mtt = (String) metadata.get("type");
+ if(mtt != null && !mtt.equals("map") && !mtt.equals("tosca:map")) {
+ ExceptionCollector.appendException(String.format(
+ "InvalidTypeError: \"%s\" defined in group for metadata is invalid",
+ mtt));
+ }
+ for(String entrySchema: metadata.keySet()) {
+ Object estob = metadata.get(entrySchema);
+ if(estob instanceof LinkedHashMap) {
+ String est = (String)((LinkedHashMap<String,Object>)estob).get("type");
+ if(!est.equals("string")) {
+ ExceptionCollector.appendException(String.format(
+ "InvalidTypeError: \"%s\" defined in group for metadata \"%s\" is invalid",
+ est,entrySchema));
+ }
+ }
+ }
+ }
+
+ public String getType() {
+ return groupType;
+ }
+
+
+}
+
+/*python
+
+from toscaparser.common.exception import ExceptionCollector
+from toscaparser.common.exception import InvalidTypeError
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.elements.statefulentitytype import StatefulEntityType
+
+
+class GroupType(StatefulEntityType):
+ '''TOSCA built-in group type.'''
+
+ SECTIONS = (DERIVED_FROM, VERSION, METADATA, DESCRIPTION, PROPERTIES,
+ MEMBERS, INTERFACES) = \
+ ("derived_from", "version", "metadata", "description",
+ "properties", "members", "interfaces")
+
+ def __init__(self, grouptype, custom_def=None):
+ super(GroupType, self).__init__(grouptype, self.GROUP_PREFIX,
+ custom_def)
+ self.custom_def = custom_def
+ self.grouptype = grouptype
+ self._validate_fields()
+ self.group_description = None
+ if self.DESCRIPTION in self.defs:
+ self.group_description = self.defs[self.DESCRIPTION]
+
+ self.group_version = None
+ if self.VERSION in self.defs:
+ self.group_version = self.defs[self.VERSION]
+
+ self.group_properties = None
+ if self.PROPERTIES in self.defs:
+ self.group_properties = self.defs[self.PROPERTIES]
+
+ self.group_members = None
+ if self.MEMBERS in self.defs:
+ self.group_members = self.defs[self.MEMBERS]
+
+ if self.METADATA in self.defs:
+ self.meta_data = self.defs[self.METADATA]
+ self._validate_metadata(self.meta_data)
+
+ @property
+ def parent_type(self):
+ '''Return a group statefulentity of this entity is derived from.'''
+ if not hasattr(self, 'defs'):
+ return None
+ pgroup_entity = self.derived_from(self.defs)
+ if pgroup_entity:
+ return GroupType(pgroup_entity, self.custom_def)
+
+ @property
+ def description(self):
+ return self.group_description
+
+ @property
+ def version(self):
+ return self.group_version
+
+ @property
+ def interfaces(self):
+ return self.get_value(self.INTERFACES)
+
+ def _validate_fields(self):
+ if self.defs:
+ for name in self.defs.keys():
+ if name not in self.SECTIONS:
+ ExceptionCollector.appendException(
+ UnknownFieldError(what='Group Type %s'
+ % self.grouptype, field=name))
+
+ def _validate_metadata(self, meta_data):
+ if not meta_data.get('type') in ['map', 'tosca:map']:
+ ExceptionCollector.appendException(
+ InvalidTypeError(what='"%s" defined in group for '
+ 'metadata' % (meta_data.get('type'))))
+ for entry_schema, entry_schema_type in meta_data.items():
+ if isinstance(entry_schema_type, dict) and not \
+ entry_schema_type.get('type') == 'string':
+ ExceptionCollector.appendException(
+ InvalidTypeError(what='"%s" defined in group for '
+ 'metadata "%s"'
+ % (entry_schema_type.get('type'),
+ entry_schema)))
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/InterfacesDef.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/InterfacesDef.java
new file mode 100644
index 0000000..c34bff8
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/InterfacesDef.java
@@ -0,0 +1,227 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.openecomp.sdc.toscaparser.EntityTemplate;
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+
+public class InterfacesDef extends StatefulEntityType {
+
+ public static final String LIFECYCLE = "tosca.interfaces.node.lifecycle.Standard";
+ public static final String CONFIGURE = "tosca.interfaces.relationship.Configure";
+ public static final String LIFECYCLE_SHORTNAME = "Standard";
+ public static final String CONFIGURE_SHORTNAME = "Configure";
+
+ public static final String SECTIONS[] = {
+ LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME,CONFIGURE_SHORTNAME
+ };
+
+ public static final String IMPLEMENTATION = "implementation";
+ public static final String INPUTS = "inputs";
+
+ public static final String INTERFACEVALUE[] = {IMPLEMENTATION, INPUTS};
+
+ public static final String INTERFACE_DEF_RESERVED_WORDS[] = {
+ "type", "inputs", "derived_from", "version", "description"};
+
+ private EntityType ntype;
+ private EntityTemplate nodeTemplate;
+ private String name;
+ private Object value;
+ private String implementation;
+ private LinkedHashMap<String,Object> inputs;
+
+
+ @SuppressWarnings("unchecked")
+ public InterfacesDef(EntityType inodeType,
+ String interfaceType,
+ EntityTemplate inodeTemplate,
+ String iname,
+ Object ivalue) {
+ // void
+ super();
+
+ ntype = inodeType;
+ nodeTemplate = inodeTemplate;
+ type = interfaceType;
+ name = iname;
+ value = ivalue;
+ implementation = null;
+ inputs = null;
+ defs = new LinkedHashMap<String,Object>();
+
+ if(interfaceType.equals(LIFECYCLE_SHORTNAME)) {
+ interfaceType = LIFECYCLE;
+ }
+ if(interfaceType.equals(CONFIGURE_SHORTNAME)) {
+ interfaceType = CONFIGURE;
+ }
+
+ // only NodeType has getInterfaces "hasattr(ntype,interfaces)"
+ // while RelationshipType does not
+ if(ntype instanceof NodeType) {
+ if(((NodeType)ntype).getInterfaces() != null &&
+ ((NodeType)ntype).getInterfaces().values().contains(interfaceType)) {
+ LinkedHashMap<String,Object> nii = (LinkedHashMap<String,Object>)
+ ((NodeType)ntype).getInterfaces().get(interfaceType);
+ interfaceType = (String)nii.get("type");
+ }
+ }
+ if(inodeType != null) {
+ if(nodeTemplate != null && nodeTemplate.getCustomDef() != null &&
+ nodeTemplate.getCustomDef().values().contains(interfaceType)) {
+ defs = (LinkedHashMap<String,Object>)
+ nodeTemplate.getCustomDef().get(interfaceType);
+ }
+ else {
+ defs = (LinkedHashMap<String,Object>)TOSCA_DEF.get(interfaceType);
+ }
+ }
+
+ if(ivalue != null) {
+ if(ivalue instanceof LinkedHashMap) {
+ for(Map.Entry<String,Object> me: ((LinkedHashMap<String,Object>)ivalue).entrySet()) {
+ if(me.getKey().equals("implementation")) {
+ implementation = (String)me.getValue();
+ }
+ else if(me.getKey().equals("inputs")) {
+ inputs = (LinkedHashMap<String,Object>)me.getValue();
+ }
+ else {
+ ExceptionCollector.appendException(String.format(
+ "UnknownFieldError: \"interfaces\" of template \"%s\" contain unknown field \"%s\"",
+ nodeTemplate.getName(),me.getKey()));
+ }
+ }
+ }
+ else {
+ implementation = (String)ivalue;
+ }
+ }
+ }
+
+ public ArrayList<String> getLifecycleOps() {
+ if(defs != null) {
+ if(type.equals(LIFECYCLE)) {
+ return _ops();
+ }
+ }
+ return null;
+ }
+
+ public ArrayList<String> getConfigureOps() {
+ if(defs != null) {
+ if(type.equals(CONFIGURE)) {
+ return _ops();
+ }
+ }
+ return null;
+ }
+
+ private ArrayList<String> _ops() {
+ return new ArrayList<String>(defs.keySet());
+ }
+
+ // getters/setters
+
+ public LinkedHashMap<String,Object> getInputs() {
+ return inputs;
+ }
+
+ public void setInput(String name,Object value) {
+ inputs.put(name, value);
+ }
+}
+
+/*python
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from toscaparser.common.exception import ExceptionCollector
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.elements.statefulentitytype import StatefulEntityType
+
+SECTIONS = (LIFECYCLE, CONFIGURE, LIFECYCLE_SHORTNAME,
+ CONFIGURE_SHORTNAME) = \
+ ('tosca.interfaces.node.lifecycle.Standard',
+ 'tosca.interfaces.relationship.Configure',
+ 'Standard', 'Configure')
+
+INTERFACEVALUE = (IMPLEMENTATION, INPUTS) = ('implementation', 'inputs')
+
+INTERFACE_DEF_RESERVED_WORDS = ['type', 'inputs', 'derived_from', 'version',
+ 'description']
+
+
+class InterfacesDef(StatefulEntityType):
+ '''TOSCA built-in interfaces type.'''
+
+ def __init__(self, node_type, interfacetype,
+ node_template=None, name=None, value=None):
+ self.ntype = node_type
+ self.node_template = node_template
+ self.type = interfacetype
+ self.name = name
+ self.value = value
+ self.implementation = None
+ self.inputs = None
+ self.defs = {}
+ if interfacetype == LIFECYCLE_SHORTNAME:
+ interfacetype = LIFECYCLE
+ if interfacetype == CONFIGURE_SHORTNAME:
+ interfacetype = CONFIGURE
+ if hasattr(self.ntype, 'interfaces') \
+ and self.ntype.interfaces \
+ and interfacetype in self.ntype.interfaces:
+ interfacetype = self.ntype.interfaces[interfacetype]['type']
+ if node_type:
+ if self.node_template and self.node_template.custom_def \
+ and interfacetype in self.node_template.custom_def:
+ self.defs = self.node_template.custom_def[interfacetype]
+ else:
+ self.defs = self.TOSCA_DEF[interfacetype]
+ if value:
+ if isinstance(self.value, dict):
+ for i, j in self.value.items():
+ if i == IMPLEMENTATION:
+ self.implementation = j
+ elif i == INPUTS:
+ self.inputs = j
+ else:
+ what = ('"interfaces" of template "%s"' %
+ self.node_template.name)
+ ExceptionCollector.appendException(
+ UnknownFieldError(what=what, field=i))
+ else:
+ self.implementation = value
+
+ @property
+ def lifecycle_ops(self):
+ if self.defs:
+ if self.type == LIFECYCLE:
+ return self._ops()
+
+ @property
+ def configure_ops(self):
+ if self.defs:
+ if self.type == CONFIGURE:
+ return self._ops()
+
+ def _ops(self):
+ ops = []
+ for name in list(self.defs.keys()):
+ ops.append(name)
+ return ops
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/Metadata.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/Metadata.java
new file mode 100644
index 0000000..8ffd019
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/Metadata.java
@@ -0,0 +1,35 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import java.util.Map;
+
+public class Metadata {
+
+ private final Map<String, Object> metadataMap;
+
+ public Metadata(Map<String, Object> metadataMap) {
+ this.metadataMap = metadataMap;
+ }
+
+ public String getValue(String key) {
+ return !isEmpty() ? String.valueOf(this.metadataMap.get(key)) : null;
+ }
+
+ public void setValue(String key, Object value) {
+ if (!isEmpty()) {
+ this.metadataMap.put(key, value);
+ }
+ }
+
+
+ private boolean isEmpty() {
+ return this.metadataMap == null || this.metadataMap.size() == 0;
+ }
+
+ @Override
+ public String toString() {
+ return "Metadata{" +
+ "metadataMap=" + metadataMap +
+ '}';
+ }
+
+}
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/NodeType.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/NodeType.java
new file mode 100644
index 0000000..7b103db
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/NodeType.java
@@ -0,0 +1,519 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+import org.openecomp.sdc.toscaparser.elements.InterfacesDef;
+
+public class NodeType extends StatefulEntityType {
+ // TOSCA built-in node type
+
+ private static final String DERIVED_FROM = "derived_from";
+ private static final String METADATA = "metadata";
+ private static final String PROPERTIES = "properties";
+ private static final String VERSION = "version";
+ private static final String DESCRIPTION = "description";
+ private static final String ATTRIBUTES = "attributes";
+ private static final String REQUIREMENTS = "requirements";
+ private static final String CAPABILITIES = "capabilities";
+ private static final String INTERFACES = "interfaces";
+ private static final String ARTIFACTS = "artifacts";
+
+ private static final String SECTIONS[] = {
+ DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS
+ };
+
+ private String ntype;
+ public LinkedHashMap<String,Object> customDef;
+
+ public NodeType(String nttype,LinkedHashMap<String,Object> ntcustomDef) {
+ super(nttype,NODE_PREFIX, ntcustomDef);
+ ntype = nttype;
+ customDef = ntcustomDef;
+ _validateKeys();
+ }
+
+ public Object getParentType() {
+ // Return a node this node is derived from
+ if(defs == null) {
+ return null;
+ }
+ String pnode = derivedFrom(defs);
+ if(pnode != null && !pnode.isEmpty()) {
+ return new NodeType(pnode,customDef);
+ }
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ public LinkedHashMap<RelationshipType,NodeType> getRelationship() {
+ // Return a dictionary of relationships to other node types
+
+ // This method returns a dictionary of named relationships that nodes
+ // of the current node type (self) can have to other nodes (of specific
+ // types) in a TOSCA template.
+
+ LinkedHashMap<RelationshipType,NodeType> relationship = new LinkedHashMap<>();
+ ArrayList<LinkedHashMap<String,Object>> requires;
+ Object treq = getAllRequirements();
+ if(treq != null) {
+ // NOTE(sdmonov): Check if requires is a dict.
+ // If it is a dict convert it to a list of dicts.
+ // This is needed because currently the code below supports only
+ // lists as requirements definition. The following check will
+ // make sure if a map (dict) was provided it will be converted to
+ // a list before proceeding to the parsing.
+ if(treq instanceof LinkedHashMap) {
+ requires = new ArrayList<>();
+ for(Map.Entry<String,Object> me: ((LinkedHashMap<String,Object>)treq).entrySet()) {
+ LinkedHashMap<String,Object> tl = new LinkedHashMap<>();
+ tl.put(me.getKey(),me.getValue());
+ requires.add(tl);
+ }
+ }
+ else {
+ requires = (ArrayList<LinkedHashMap<String,Object>>)treq;
+ }
+
+ String keyword = null;
+ String nodeType = null;
+ for(LinkedHashMap<String,Object> require: requires) {
+ String relation = null;
+ for(Map.Entry<String,Object> re: require.entrySet()) {
+ String key = re.getKey();
+ LinkedHashMap<String,Object> req = (LinkedHashMap<String,Object>)re.getValue();
+ if(req.get("relationship") != null) {
+ Object trelation = req.get("relationship");
+ // trelation is a string or a dict with "type" mapped to the string we want
+ if(trelation instanceof String) {
+ relation = (String)trelation;
+ }
+ else {
+ if(((LinkedHashMap<String,Object>)trelation).get("type") != null) {
+ relation = (String)((LinkedHashMap<String,Object>)trelation).get("type");
+ }
+ }
+ nodeType = (String)req.get("node");
+ //BUG meaningless?? LinkedHashMap<String,Object> value = req;
+ if(nodeType != null) {
+ keyword = "node";
+ }
+ else {
+ // If value is a dict and has a type key
+ // we need to lookup the node type using
+ // the capability type
+ String captype = (String)req.get("capability");
+ String value = _getNodeTypeByCap(captype);
+ relation = _getRelation(key,value);
+ keyword = key;
+ nodeType = value;
+ }
+ }
+
+ }
+ RelationshipType rtype = new RelationshipType(relation, keyword, customDef);
+ NodeType relatednode = new NodeType(nodeType, customDef);
+ relationship.put(rtype, relatednode);
+ }
+ }
+ return relationship;
+
+ }
+
+ @SuppressWarnings("unchecked")
+ private String _getNodeTypeByCap(String cap) {
+ // Find the node type that has the provided capability
+
+ // This method will lookup all node types if they have the
+ // provided capability.
+
+ // Filter the node types
+ ArrayList<String> nodeTypes = new ArrayList<>();
+ for(String nt: TOSCA_DEF.keySet()) {
+ if(nt.startsWith(NODE_PREFIX) && !nt.equals("tosca.nodes.Root")) {
+ nodeTypes.add(nt);
+ }
+ }
+ for(String nt: nodeTypes) {
+ LinkedHashMap<String,Object> nodeDef = (LinkedHashMap<String,Object>)TOSCA_DEF.get(nt);
+ if(nodeDef instanceof LinkedHashMap && nodeDef.get("capabilities") != null) {
+ LinkedHashMap<String,Object> nodeCaps = (LinkedHashMap<String,Object>)nodeDef.get("capabilities");
+ if(nodeCaps != null) {
+ for(Object val: nodeCaps.values()) {
+ if(val instanceof LinkedHashMap) {
+ String tp = (String)((LinkedHashMap<String,Object>)val).get("type");
+ if(tp != null && tp.equals(cap)) {
+ return nt;
+ }
+ }
+ }
+ }
+ }
+ }
+ return null;
+ }
+
+ @SuppressWarnings("unchecked")
+ private String _getRelation(String key,String ndtype) {
+ String relation = null;
+ NodeType ntype = new NodeType(ndtype,null);
+ LinkedHashMap<String,CapabilityTypeDef> caps = ntype.getCapabilities();
+ if(caps != null && caps.get(key) != null) {
+ CapabilityTypeDef c = caps.get(key);
+ for(int i=0; i< RELATIONSHIP_TYPE.length; i++) {
+ String r = RELATIONSHIP_TYPE[i];
+ LinkedHashMap<String,Object> rtypedef = (LinkedHashMap<String,Object>)TOSCA_DEF.get(r);
+ for(Object o: rtypedef.values()) {
+ LinkedHashMap<String,Object> properties = (LinkedHashMap<String,Object>)o;
+ if(properties.get(c.getType()) != null) {
+ relation = r;
+ break;
+ }
+ }
+ if(relation != null) {
+ break;
+ }
+ else {
+ for(Object o: rtypedef.values()) {
+ LinkedHashMap<String,Object> properties = (LinkedHashMap<String,Object>)o;
+ if(properties.get(c.getParentType()) != null) {
+ relation = r;
+ break;
+ }
+ }
+ }
+ }
+ }
+ return relation;
+ }
+
+ @SuppressWarnings("unchecked")
+ public ArrayList<CapabilityTypeDef> getCapabilitiesObjects() {
+ // Return a list of capability objects
+ ArrayList<CapabilityTypeDef> typecapabilities = new ArrayList<>();
+ LinkedHashMap<String,Object> caps = (LinkedHashMap<String,Object>)getValue(CAPABILITIES, null, true);
+ if(caps != null) {
+ // 'cname' is symbolic name of the capability
+ // 'cvalue' is a dict { 'type': <capability type name> }
+ for(Map.Entry<String,Object> me: caps.entrySet()) {
+ String cname = me.getKey();
+ LinkedHashMap<String,String> cvalue = (LinkedHashMap<String,String>)me.getValue();
+ String ctype = cvalue.get("type");
+ CapabilityTypeDef cap = new CapabilityTypeDef(cname,ctype,type,customDef);
+ typecapabilities.add(cap);
+ }
+ }
+ return typecapabilities;
+ }
+
+ public LinkedHashMap<String,CapabilityTypeDef> getCapabilities() {
+ // Return a dictionary of capability name-objects pairs
+ LinkedHashMap<String,CapabilityTypeDef> caps = new LinkedHashMap<>();
+ for(CapabilityTypeDef ctd: getCapabilitiesObjects()) {
+ caps.put(ctd.getName(),ctd);
+ }
+ return caps;
+ }
+
+ @SuppressWarnings("unchecked")
+ public ArrayList<Object> getRequirements() {
+ return (ArrayList<Object>)getValue(REQUIREMENTS,null,true);
+ }
+
+ public ArrayList<Object> getAllRequirements() {
+ return getRequirements();
+ }
+
+ @SuppressWarnings("unchecked")
+ public LinkedHashMap<String,Object> getInterfaces() {
+ return (LinkedHashMap<String,Object>)getValue(INTERFACES,null,false);
+ }
+
+
+ @SuppressWarnings("unchecked")
+ public ArrayList<String> getLifecycleInputs()
+ {
+ // Return inputs to life cycle operations if found
+ ArrayList<String> inputs = new ArrayList<>();
+ LinkedHashMap<String,Object> interfaces = getInterfaces();
+ if(interfaces != null) {
+ for(Map.Entry<String,Object> me: interfaces.entrySet()) {
+ String iname = me.getKey();
+ LinkedHashMap<String,Object> ivalue = (LinkedHashMap<String,Object>)me.getValue();
+ if(iname.equals(InterfacesDef.LIFECYCLE)) {
+ for(Map.Entry<String,Object> ie: ivalue.entrySet()) {
+ if(ie.getKey().equals("input")) {
+ LinkedHashMap<String,Object> y = (LinkedHashMap<String,Object>)ie.getValue();
+ for(String i: y.keySet()) {
+ inputs.add(i);
+ }
+ }
+ }
+ }
+ }
+ }
+ return inputs;
+ }
+
+ public ArrayList<String> getLifecycleOperations() {
+ // Return available life cycle operations if found
+ ArrayList<String> ops = null;
+ LinkedHashMap<String,Object> interfaces = getInterfaces();
+ if(interfaces != null) {
+ InterfacesDef i = new InterfacesDef(this,InterfacesDef.LIFECYCLE,null,null,null);
+ ops = i.getLifecycleOps();
+ }
+ return ops;
+ }
+
+ public CapabilityTypeDef getCapability(String name) {
+ //BUG?? the python code has to be wrong
+ // it refers to a bad attribute 'value'...
+ LinkedHashMap<String,CapabilityTypeDef> caps = getCapabilities();
+ if(caps != null) {
+ return caps.get(name);
+ }
+ return null;
+ /*
+ def get_capability(self, name):
+ caps = self.get_capabilities()
+ if caps and name in caps.keys():
+ return caps[name].value
+ */
+ }
+
+ public String getCapabilityType(String name) {
+ //BUG?? the python code has to be wrong
+ // it refers to a bad attribute 'value'...
+ CapabilityTypeDef captype = getCapability(name);
+ if(captype != null) {
+ return captype.getType();
+ }
+ return null;
+ /*
+ def get_capability_type(self, name):
+ captype = self.get_capability(name)
+ if captype and name in captype.keys():
+ return captype[name].value
+ */
+ }
+
+ private void _validateKeys() {
+ if(defs != null) {
+ for(String key: defs.keySet()) {
+ boolean bFound = false;
+ for(int i=0; i< SECTIONS.length; i++) {
+ if(key.equals(SECTIONS[i])) {
+ bFound = true;
+ break;
+ }
+ }
+ if(!bFound) {
+ ExceptionCollector.appendException(String.format(
+ "UnknownFieldError: Nodetype \"%s\" has unknown field \"%s\"",ntype,key));
+ }
+ }
+ }
+ }
+
+}
+
+/*python
+
+from toscaparser.common.exception import ExceptionCollector
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.elements.capabilitytype import CapabilityTypeDef
+import org.openecomp.sdc.toscaparser.elements.interfaces as ifaces
+from toscaparser.elements.interfaces import InterfacesDef
+from toscaparser.elements.relationshiptype import RelationshipType
+from toscaparser.elements.statefulentitytype import StatefulEntityType
+
+
+class NodeType(StatefulEntityType):
+ '''TOSCA built-in node type.'''
+ SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, ATTRIBUTES, REQUIREMENTS, CAPABILITIES, INTERFACES, ARTIFACTS) = \
+ ('derived_from', 'metadata', 'properties', 'version',
+ 'description', 'attributes', 'requirements', 'capabilities',
+ 'interfaces', 'artifacts')
+
+ def __init__(self, ntype, custom_def=None):
+ super(NodeType, self).__init__(ntype, self.NODE_PREFIX, custom_def)
+ self.ntype = ntype
+ self.custom_def = custom_def
+ self._validate_keys()
+
+ @property
+ def parent_type(self):
+ '''Return a node this node is derived from.'''
+ if not hasattr(self, 'defs'):
+ return None
+ pnode = self.derived_from(self.defs)
+ if pnode:
+ return NodeType(pnode, self.custom_def)
+
+ @property
+ def relationship(self):
+ '''Return a dictionary of relationships to other node types.
+
+ This method returns a dictionary of named relationships that nodes
+ of the current node type (self) can have to other nodes (of specific
+ types) in a TOSCA template.
+
+ '''
+ relationship = {}
+ requires = self.get_all_requirements()
+ if requires:
+ # NOTE(sdmonov): Check if requires is a dict.
+ # If it is a dict convert it to a list of dicts.
+ # This is needed because currently the code below supports only
+ # lists as requirements definition. The following check will
+ # make sure if a map (dict) was provided it will be converted to
+ # a list before proceeding to the parsing.
+ if isinstance(requires, dict):
+ requires = [{key: value} for key, value in requires.items()]
+
+ keyword = None
+ node_type = None
+ for require in requires:
+ for key, req in require.items():
+ if 'relationship' in req:
+ relation = req.get('relationship')
+ if 'type' in relation:
+ relation = relation.get('type')
+ node_type = req.get('node')
+ value = req
+ if node_type:
+ keyword = 'node'
+ else:
+ # If value is a dict and has a type key
+ # we need to lookup the node type using
+ # the capability type
+ value = req
+ if isinstance(value, dict):
+ captype = value['capability']
+ value = (self.
+ _get_node_type_by_cap(key, captype))
+ relation = self._get_relation(key, value)
+ keyword = key
+ node_type = value
+ rtype = RelationshipType(relation, keyword, self.custom_def)
+ relatednode = NodeType(node_type, self.custom_def)
+ relationship[rtype] = relatednode
+ return relationship
+
+ def _get_node_type_by_cap(self, key, cap):
+ '''Find the node type that has the provided capability
+
+ This method will lookup all node types if they have the
+ provided capability.
+ '''
+
+ # Filter the node types
+ node_types = [node_type for node_type in self.TOSCA_DEF.keys()
+ if node_type.startswith(self.NODE_PREFIX) and
+ node_type != 'tosca.nodes.Root']
+
+ for node_type in node_types:
+ node_def = self.TOSCA_DEF[node_type]
+ if isinstance(node_def, dict) and 'capabilities' in node_def:
+ node_caps = node_def['capabilities']
+ for value in node_caps.values():
+ if isinstance(value, dict) and \
+ 'type' in value and value['type'] == cap:
+ return node_type
+
+ def _get_relation(self, key, ndtype):
+ relation = None
+ ntype = NodeType(ndtype)
+ caps = ntype.get_capabilities()
+ if caps and key in caps.keys():
+ c = caps[key]
+ for r in self.RELATIONSHIP_TYPE:
+ rtypedef = ntype.TOSCA_DEF[r]
+ for properties in rtypedef.values():
+ if c.type in properties:
+ relation = r
+ break
+ if relation:
+ break
+ else:
+ for properties in rtypedef.values():
+ if c.parent_type in properties:
+ relation = r
+ break
+ return relation
+
+ def get_capabilities_objects(self):
+ '''Return a list of capability objects.'''
+ typecapabilities = []
+ caps = self.get_value(self.CAPABILITIES, None, True)
+ if caps:
+ # 'name' is symbolic name of the capability
+ # 'value' is a dict { 'type': <capability type name> }
+ for name, value in caps.items():
+ ctype = value.get('type')
+ cap = CapabilityTypeDef(name, ctype, self.type,
+ self.custom_def)
+ typecapabilities.append(cap)
+ return typecapabilities
+
+ def get_capabilities(self):
+ '''Return a dictionary of capability name-objects pairs.'''
+ return {cap.name: cap
+ for cap in self.get_capabilities_objects()}
+
+ @property
+ def requirements(self):
+ return self.get_value(self.REQUIREMENTS, None, True)
+
+ def get_all_requirements(self):
+ return self.requirements
+
+ @property
+ def interfaces(self):
+ return self.get_value(self.INTERFACES)
+
+ @property
+ def lifecycle_inputs(self):
+ '''Return inputs to life cycle operations if found.'''
+ inputs = []
+ interfaces = self.interfaces
+ if interfaces:
+ for name, value in interfaces.items():
+ if name == ifaces.LIFECYCLE:
+ for x, y in value.items():
+ if x == 'inputs':
+ for i in y.iterkeys():
+ inputs.append(i)
+ return inputs
+
+ @property
+ def lifecycle_operations(self):
+ '''Return available life cycle operations if found.'''
+ ops = None
+ interfaces = self.interfaces
+ if interfaces:
+ i = InterfacesDef(self.type, ifaces.LIFECYCLE)
+ ops = i.lifecycle_ops
+ return ops
+
+ def get_capability(self, name):
+ caps = self.get_capabilities()
+ if caps and name in caps.keys():
+ return caps[name].value
+
+ def get_capability_type(self, name):
+ captype = self.get_capability(name)
+ if captype and name in captype.keys():
+ return captype[name].value
+
+ def _validate_keys(self):
+ if self.defs:
+ for key in self.defs.keys():
+ if key not in self.SECTIONS:
+ ExceptionCollector.appendException(
+ UnknownFieldError(what='Nodetype"%s"' % self.ntype,
+ field=key))
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/PolicyType.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/PolicyType.java
new file mode 100644
index 0000000..942c021
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/PolicyType.java
@@ -0,0 +1,289 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+import org.openecomp.sdc.toscaparser.utils.TOSCAVersionProperty;
+
+public class PolicyType extends StatefulEntityType {
+
+ private static final String DERIVED_FROM = "derived_from";
+ private static final String METADATA = "metadata";
+ private static final String PROPERTIES = "properties";
+ private static final String VERSION = "version";
+ private static final String DESCRIPTION = "description";
+ private static final String TARGETS = "targets";
+ private static final String TRIGGERS = "triggers";
+ private static final String TYPE = "type";
+
+ private static final String SECTIONS[] = {
+ DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS, TRIGGERS, TYPE
+ };
+
+ private LinkedHashMap<String,Object> customDef;
+ private String policyDescription;
+ private Object policyVersion;
+ private LinkedHashMap<String,Object> properties;
+ private LinkedHashMap<String,Object> parentPolicies;
+ private LinkedHashMap<String,Object> metaData;
+ private ArrayList<String> targetsList;
+
+
+ public PolicyType(String _type, LinkedHashMap<String,Object> _customDef) {
+ super(_type,POLICY_PREFIX,_customDef);
+
+ type = _type;
+ customDef = _customDef;
+ _validateKeys();
+
+ metaData = null;
+ if(defs != null && defs.get(METADATA) != null) {
+ metaData = (LinkedHashMap<String,Object>)defs.get(METADATA);
+ _validateMetadata(metaData);
+ }
+
+ properties = null;
+ if(defs != null && defs.get(PROPERTIES) != null) {
+ properties = (LinkedHashMap<String,Object>)defs.get(PROPERTIES);
+ }
+ parentPolicies = _getParentPolicies();
+
+ policyVersion = null;
+ if(defs != null && defs.get(VERSION) != null) {
+ policyVersion = (new TOSCAVersionProperty(
+ defs.get(VERSION))).getVersion();
+ }
+
+ policyDescription = null;
+ if(defs != null && defs.get(DESCRIPTION) != null) {
+ policyDescription = (String)defs.get(DESCRIPTION);
+ }
+
+ targetsList = null;
+ if(defs != null && defs.get(TARGETS) != null) {
+ targetsList = (ArrayList<String>)defs.get(TARGETS);
+ _validateTargets(targetsList,customDef);
+ }
+
+ }
+
+ private LinkedHashMap<String,Object> _getParentPolicies() {
+ LinkedHashMap<String,Object> policies = new LinkedHashMap<>();
+ String parentPolicy;
+ if(getParentType() != null) {
+ parentPolicy = getParentType().getType();
+ }
+ else {
+ parentPolicy = null;
+ }
+ if(parentPolicy != null) {
+ while(parentPolicy != null && !parentPolicy.equals("tosca.policies.Root")) {
+ policies.put(parentPolicy, TOSCA_DEF.get(parentPolicy));
+ parentPolicy = (String)
+ ((LinkedHashMap<String,Object>)policies.get(parentPolicy)).get("derived_from);");
+ }
+ }
+ return policies;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public PolicyType getParentType() {
+ // Return a policy statefulentity of this node is derived from
+ if(defs == null) {
+ return null;
+ }
+ String ppolicyEntity = derivedFrom(defs);
+ if(ppolicyEntity != null) {
+ return new PolicyType(ppolicyEntity,customDef);
+ }
+ return null;
+ }
+
+ public Object getPolicy(String name) {
+ // Return the definition of a policy field by name
+ if(defs != null && defs.get(name) != null) {
+ return defs.get(name);
+ }
+ return null;
+ }
+
+ public ArrayList<String> getTargets() {
+ // Return targets
+ return targetsList;
+ }
+
+ public String getDescription() {
+ return policyDescription;
+ }
+
+ public Object getVersion() {
+ return policyVersion;
+ }
+
+ private void _validateKeys() {
+ for(String key: defs.keySet()) {
+ boolean bFound = false;
+ for(String sect: SECTIONS) {
+ if(key.equals(sect)) {
+ bFound = true;
+ break;
+ }
+ }
+ if(!bFound) {
+ ExceptionCollector.appendException(String.format(
+ "UnknownFieldError: Policy \"%s\" contains unknown field \"%s\"",
+ type,key));
+ }
+ }
+ }
+
+ private void _validateTargets(ArrayList<String> _targetsList,
+ LinkedHashMap<String,Object> _customDef) {
+ for(String nodetype: _targetsList) {
+ if(_customDef.get(nodetype) == null) {
+ ExceptionCollector.appendException(String.format(
+ "InvalidTypeError: \"%s\" defined in targets for policy \"%s\"",
+ nodetype,type));
+
+ }
+ }
+ }
+
+ private void _validateMetadata(LinkedHashMap<String,Object> _metaData) {
+ String mtype = (String)_metaData.get("type");
+ if(mtype != null && !mtype.equals("map") && !mtype.equals("tosca:map")) {
+ ExceptionCollector.appendException(String.format(
+ "InvalidTypeError: \"%s\" defined in policy for metadata",
+ mtype));
+ }
+ for(String entrySchema: metaData.keySet()) {
+ Object estob = metaData.get(entrySchema);
+ if(estob instanceof LinkedHashMap) {
+ String est = (String)
+ ((LinkedHashMap<String,Object>)estob).get("type");
+ if(!est.equals("string")) {
+ ExceptionCollector.appendException(String.format(
+ "InvalidTypeError: \"%s\" defined in policy for metadata \"%s\"",
+ est,entrySchema));
+ }
+ }
+ }
+ }
+
+}
+
+/*python
+
+from toscaparser.common.exception import ExceptionCollector
+from toscaparser.common.exception import InvalidTypeError
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.elements.statefulentitytype import StatefulEntityType
+from toscaparser.utils.validateutils import TOSCAVersionProperty
+
+
+class PolicyType(StatefulEntityType):
+
+ '''TOSCA built-in policies type.'''
+ SECTIONS = (DERIVED_FROM, METADATA, PROPERTIES, VERSION, DESCRIPTION, TARGETS) = \
+ ('derived_from', 'metadata', 'properties', 'version',
+ 'description', 'targets')
+
+ def __init__(self, ptype, custom_def=None):
+ super(PolicyType, self).__init__(ptype, self.POLICY_PREFIX,
+ custom_def)
+ self.type = ptype
+ self.custom_def = custom_def
+ self._validate_keys()
+
+ self.meta_data = None
+ if self.METADATA in self.defs:
+ self.meta_data = self.defs[self.METADATA]
+ self._validate_metadata(self.meta_data)
+
+ self.properties = None
+ if self.PROPERTIES in self.defs:
+ self.properties = self.defs[self.PROPERTIES]
+ self.parent_policies = self._get_parent_policies()
+
+ self.policy_version = None
+ if self.VERSION in self.defs:
+ self.policy_version = TOSCAVersionProperty(
+ self.defs[self.VERSION]).get_version()
+
+ self.policy_description = self.defs[self.DESCRIPTION] \
+ if self.DESCRIPTION in self.defs else None
+
+ self.targets_list = None
+ if self.TARGETS in self.defs:
+ self.targets_list = self.defs[self.TARGETS]
+ self._validate_targets(self.targets_list, custom_def)
+
+ def _get_parent_policies(self):
+ policies = {}
+ parent_policy = self.parent_type.type if self.parent_type else None
+ if parent_policy:
+ while parent_policy != 'tosca.policies.Root':
+ policies[parent_policy] = self.TOSCA_DEF[parent_policy]
+ parent_policy = policies[parent_policy]['derived_from']
+ return policies
+
+ @property
+ def parent_type(self):
+ '''Return a policy statefulentity of this node is derived from.'''
+ if not hasattr(self, 'defs'):
+ return None
+ ppolicy_entity = self.derived_from(self.defs)
+ if ppolicy_entity:
+ return PolicyType(ppolicy_entity, self.custom_def)
+
+ def get_policy(self, name):
+ '''Return the definition of a policy field by name.'''
+ if name in self.defs:
+ return self.defs[name]
+
+ @property
+ def targets(self):
+ '''Return targets.'''
+ return self.targets_list
+
+ @property
+ def description(self):
+ return self.policy_description
+
+ @property
+ def version(self):
+ return self.policy_version
+
+ def _validate_keys(self):
+ for key in self.defs.keys():
+ if key not in self.SECTIONS:
+ ExceptionCollector.appendException(
+ UnknownFieldError(what='Policy "%s"' % self.type,
+ field=key))
+
+ def _validate_targets(self, targets_list, custom_def):
+ for nodetype in targets_list:
+ if nodetype not in custom_def:
+ ExceptionCollector.appendException(
+ InvalidTypeError(what='"%s" defined in targets for '
+ 'policy "%s"' % (nodetype, self.type)))
+
+ def _validate_metadata(self, meta_data):
+ if not meta_data.get('type') in ['map', 'tosca:map']:
+ ExceptionCollector.appendException(
+ InvalidTypeError(what='"%s" defined in policy for '
+ 'metadata' % (meta_data.get('type'))))
+
+ for entry_schema, entry_schema_type in meta_data.items():
+ if isinstance(entry_schema_type, dict) and not \
+ entry_schema_type.get('type') == 'string':
+ ExceptionCollector.appendException(
+ InvalidTypeError(what='"%s" defined in policy for '
+ 'metadata "%s"'
+ % (entry_schema_type.get('type'),
+ entry_schema)))
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/PortSpec.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/PortSpec.java
new file mode 100644
index 0000000..bae9488
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/PortSpec.java
@@ -0,0 +1,159 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import java.util.LinkedHashMap;
+
+import org.openecomp.sdc.toscaparser.DataEntity;
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+import org.openecomp.sdc.toscaparser.utils.ValidateUtils;
+
+public class PortSpec {
+ // Parent class for tosca.datatypes.network.PortSpec type
+
+ private static final String SHORTNAME = "PortSpec";
+ private static final String TYPE_URI = "tosca.datatypes.network." + SHORTNAME;
+
+ private static final String PROTOCOL = "protocol";
+ private static final String SOURCE = "source";
+ private static final String SOURCE_RANGE = "source_range";
+ private static final String TARGET = "target";
+ private static final String TARGET_RANGE = "target_range";
+
+ private static final String PROPERTY_NAMES[] = {
+ PROTOCOL, SOURCE, SOURCE_RANGE,
+ TARGET, TARGET_RANGE
+ };
+
+ // todo(TBD) May want to make this a subclass of DataType
+ // and change init method to set PortSpec's properties
+ public PortSpec() {
+
+ }
+
+ // The following additional requirements MUST be tested:
+ // 1) A valid PortSpec MUST have at least one of the following properties:
+ // target, target_range, source or source_range.
+ // 2) A valid PortSpec MUST have a value for the source property that
+ // is within the numeric range specified by the property source_range
+ // when source_range is specified.
+ // 3) A valid PortSpec MUST have a value for the target property that is
+ // within the numeric range specified by the property target_range
+ // when target_range is specified.
+ public static void validateAdditionalReq(Object _properties,
+ String propName,
+ LinkedHashMap<String,Object> custom_def) {
+
+ try {
+ LinkedHashMap<String,Object> properties = (LinkedHashMap<String,Object>)_properties;
+ Object source = properties.get(PortSpec.SOURCE);
+ Object sourceRange = properties.get(PortSpec.SOURCE_RANGE);
+ Object target = properties.get(PortSpec.TARGET);
+ Object targetRange = properties.get(PortSpec.TARGET_RANGE);
+
+ // verify one of the specified values is set
+ if(source == null && sourceRange == null &&
+ target == null && targetRange == null) {
+ ExceptionCollector.appendException(String.format(
+ "InvalidTypeAdditionalRequirementsError: Additional requirements for type \"%s\" not met",
+ TYPE_URI));
+ }
+ // Validate source value is in specified range
+ if(source != null && sourceRange != null) {
+ ValidateUtils.validateValueInRange(source,sourceRange,SOURCE);
+ }
+ else {
+ DataEntity portdef = new DataEntity("PortDef", source, null, SOURCE);
+ portdef.validate();
+ }
+ // Validate target value is in specified range
+ if(target != null && targetRange != null) {
+ ValidateUtils.validateValueInRange(target,targetRange,SOURCE);
+ }
+ else {
+ DataEntity portdef = new DataEntity("PortDef", source, null, TARGET);
+ portdef.validate();
+ }
+ }
+ catch(Exception e) {
+ ExceptionCollector.appendException(String.format(
+ "ValueError: \"%s\" do not meet requirements for type \"%s\"",
+ _properties.toString(),SHORTNAME));
+ }
+ }
+
+}
+
+/*python
+
+from toscaparser.common.exception import ExceptionCollector
+from toscaparser.common.exception import InvalidTypeAdditionalRequirementsError
+from toscaparser.utils.gettextutils import _
+import org.openecomp.sdc.toscaparser.utils.validateutils as validateutils
+
+log = logging.getLogger('tosca')
+
+
+class PortSpec(object):
+ '''Parent class for tosca.datatypes.network.PortSpec type.'''
+
+ SHORTNAME = 'PortSpec'
+ TYPE_URI = 'tosca.datatypes.network.' + SHORTNAME
+
+ PROPERTY_NAMES = (
+ PROTOCOL, SOURCE, SOURCE_RANGE,
+ TARGET, TARGET_RANGE
+ ) = (
+ 'protocol', 'source', 'source_range',
+ 'target', 'target_range'
+ )
+
+ # TODO(TBD) May want to make this a subclass of DataType
+ # and change init method to set PortSpec's properties
+ def __init__(self):
+ pass
+
+ # The following additional requirements MUST be tested:
+ # 1) A valid PortSpec MUST have at least one of the following properties:
+ # target, target_range, source or source_range.
+ # 2) A valid PortSpec MUST have a value for the source property that
+ # is within the numeric range specified by the property source_range
+ # when source_range is specified.
+ # 3) A valid PortSpec MUST have a value for the target property that is
+ # within the numeric range specified by the property target_range
+ # when target_range is specified.
+ @staticmethod
+ def validate_additional_req(properties, prop_name, custom_def=None, ):
+ try:
+ source = properties.get(PortSpec.SOURCE)
+ source_range = properties.get(PortSpec.SOURCE_RANGE)
+ target = properties.get(PortSpec.TARGET)
+ target_range = properties.get(PortSpec.TARGET_RANGE)
+
+ # verify one of the specified values is set
+ if source is None and source_range is None and \
+ target is None and target_range is None:
+ ExceptionCollector.appendException(
+ InvalidTypeAdditionalRequirementsError(
+ type=PortSpec.TYPE_URI))
+ # Validate source value is in specified range
+ if source and source_range:
+ validateutils.validate_value_in_range(source, source_range,
+ PortSpec.SOURCE)
+ else:
+ from toscaparser.dataentity import DataEntity
+ portdef = DataEntity('PortDef', source, None, PortSpec.SOURCE)
+ portdef.validate()
+ # Validate target value is in specified range
+ if target and target_range:
+ validateutils.validate_value_in_range(target, target_range,
+ PortSpec.TARGET)
+ else:
+ from toscaparser.dataentity import DataEntity
+ portdef = DataEntity('PortDef', source, None, PortSpec.TARGET)
+ portdef.validate()
+ except Exception:
+ msg = _('"%(value)s" do not meet requirements '
+ 'for type "%(type)s".') \
+ % {'value': properties, 'type': PortSpec.SHORTNAME}
+ ExceptionCollector.appendException(
+ ValueError(msg))
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/PropertyDef.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/PropertyDef.java
new file mode 100644
index 0000000..15e554c
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/PropertyDef.java
@@ -0,0 +1,230 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+
+public class PropertyDef {
+
+ private static final String PROPERTY_KEYNAME_DEFAULT = "default";
+ private static final String PROPERTY_KEYNAME_REQUIRED = "required";
+ private static final String PROPERTY_KEYNAME_STATUS = "status";
+ private static final String VALID_PROPERTY_KEYNAMES[] = {
+ PROPERTY_KEYNAME_DEFAULT,
+ PROPERTY_KEYNAME_REQUIRED,
+ PROPERTY_KEYNAME_STATUS};
+
+ private static final boolean PROPERTY_REQUIRED_DEFAULT = true;
+
+ private static final String VALID_REQUIRED_VALUES[] = {"true", "false"};
+
+ private static final String PROPERTY_STATUS_SUPPORTED = "supported";
+ private static final String PROPERTY_STATUS_EXPERIMENTAL = "experimental";
+ private static final String VALID_STATUS_VALUES[] = {
+ PROPERTY_STATUS_SUPPORTED, PROPERTY_STATUS_EXPERIMENTAL};
+
+ private static final String PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED;
+
+ private String name;
+ private Object value;
+ private LinkedHashMap<String,Object> schema;
+ private String _status;
+ private boolean _required;
+
+ public PropertyDef(String pdName, Object pdValue,
+ LinkedHashMap<String,Object> pdSchema) {
+ name = pdName;
+ value = pdValue;
+ schema = pdSchema;
+ _status = PROPERTY_STATUS_DEFAULT;
+ _required = PROPERTY_REQUIRED_DEFAULT;
+
+ if(schema != null) {
+ // Validate required 'type' property exists
+ if(schema.get("type") == null) {
+ //msg = (_('Schema definition of "%(pname)s" must have a "type" '
+ // 'attribute.') % dict(pname=self.name))
+ ExceptionCollector.appendException(String.format(
+ "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name));
+ }
+ _loadRequiredAttrFromSchema();
+ _loadStatusAttrFromSchema();
+ }
+ }
+
+ public Object getDefault() {
+ if(schema != null) {
+ for(Map.Entry<String,Object> me: schema.entrySet()) {
+ if(me.getKey().equals(PROPERTY_KEYNAME_DEFAULT)) {
+ return me.getValue();
+ }
+ }
+ }
+ return null;
+ }
+
+ public boolean isRequired() {
+ return _required;
+ }
+
+ private void _loadRequiredAttrFromSchema() {
+ // IF 'required' keyname exists verify it's a boolean,
+ // if so override default
+ Object val = schema.get(PROPERTY_KEYNAME_REQUIRED);
+ if(val != null) {
+ if(val instanceof Boolean) {
+ _required = (boolean)val;
+ }
+ else {
+ //valid_values = ', '.join(self.VALID_REQUIRED_VALUES)
+ //attr = self.PROPERTY_KEYNAME_REQUIRED
+ //TOSCAException.generate_inv_schema_property_error(self,
+ // attr,
+ // value,
+ // valid_values)
+ ExceptionCollector.appendException(String.format(
+ "Schema definition of \"%s\" has \"required\" attribute with an invalid value",
+ name));
+ }
+ }
+ }
+
+ public String getStatus() {
+ return _status;
+ }
+
+ private void _loadStatusAttrFromSchema() {
+ // IF 'status' keyname exists verify it's a boolean,
+ // if so override default
+ String sts = (String)schema.get(PROPERTY_KEYNAME_STATUS);
+ if(sts != null) {
+ boolean bFound = false;
+ for(String vsv: VALID_STATUS_VALUES) {
+ if(vsv.equals(sts)) {
+ bFound = true;
+ break;
+ }
+ }
+ if(bFound) {
+ _status = sts;
+ }
+ else {
+ //valid_values = ', '.join(self.VALID_STATUS_VALUES)
+ //attr = self.PROPERTY_KEYNAME_STATUS
+ //TOSCAException.generate_inv_schema_property_error(self,
+ // attr,
+ // value,
+ // valid_values)
+ ExceptionCollector.appendException(String.format(
+ "Schema definition of \"%s\" has \"status\" attribute with an invalid value",
+ name));
+ }
+ }
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public LinkedHashMap<String,Object> getSchema() {
+ return schema;
+ }
+
+ public Object getPDValue() {
+ // there's getValue in EntityType...
+ return value;
+ }
+
+}
+/*python
+
+from toscaparser.common.exception import ExceptionCollector
+from toscaparser.common.exception import InvalidSchemaError
+from toscaparser.common.exception import TOSCAException
+from toscaparser.utils.gettextutils import _
+
+
+class PropertyDef(object):
+ '''TOSCA built-in Property type.'''
+
+ VALID_PROPERTY_KEYNAMES = (PROPERTY_KEYNAME_DEFAULT,
+ PROPERTY_KEYNAME_REQUIRED,
+ PROPERTY_KEYNAME_STATUS) = \
+ ('default', 'required', 'status')
+
+ PROPERTY_REQUIRED_DEFAULT = True
+
+ VALID_REQUIRED_VALUES = ['true', 'false']
+ VALID_STATUS_VALUES = (PROPERTY_STATUS_SUPPORTED,
+ PROPERTY_STATUS_EXPERIMENTAL) = \
+ ('supported', 'experimental')
+
+ PROPERTY_STATUS_DEFAULT = PROPERTY_STATUS_SUPPORTED
+
+ def __init__(self, name, value=None, schema=None):
+ self.name = name
+ self.value = value
+ self.schema = schema
+ self._status = self.PROPERTY_STATUS_DEFAULT
+ self._required = self.PROPERTY_REQUIRED_DEFAULT
+
+ # Validate required 'type' property exists
+ try:
+ self.schema['type']
+ except KeyError:
+ msg = (_('Schema definition of "%(pname)s" must have a "type" '
+ 'attribute.') % dict(pname=self.name))
+ ExceptionCollector.appendException(
+ InvalidSchemaError(message=msg))
+
+ if self.schema:
+ self._load_required_attr_from_schema()
+ self._load_status_attr_from_schema()
+
+ @property
+ def default(self):
+ if self.schema:
+ for prop_key, prop_value in self.schema.items():
+ if prop_key == self.PROPERTY_KEYNAME_DEFAULT:
+ return prop_value
+ return None
+
+ @property
+ def required(self):
+ return self._required
+
+ def _load_required_attr_from_schema(self):
+ # IF 'required' keyname exists verify it's a boolean,
+ # if so override default
+ if self.PROPERTY_KEYNAME_REQUIRED in self.schema:
+ value = self.schema[self.PROPERTY_KEYNAME_REQUIRED]
+ if isinstance(value, bool):
+ self._required = value
+ else:
+ valid_values = ', '.join(self.VALID_REQUIRED_VALUES)
+ attr = self.PROPERTY_KEYNAME_REQUIRED
+ TOSCAException.generate_inv_schema_property_error(self,
+ attr,
+ value,
+ valid_values)
+
+ @property
+ def status(self):
+ return self._status
+
+ def _load_status_attr_from_schema(self):
+ # IF 'status' keyname exists verify it's a valid value,
+ # if so override default
+ if self.PROPERTY_KEYNAME_STATUS in self.schema:
+ value = self.schema[self.PROPERTY_KEYNAME_STATUS]
+ if value in self.VALID_STATUS_VALUES:
+ self._status = value
+ else:
+ valid_values = ', '.join(self.VALID_STATUS_VALUES)
+ attr = self.PROPERTY_KEYNAME_STATUS
+ TOSCAException.generate_inv_schema_property_error(self,
+ attr,
+ value,
+ valid_values)
+*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/RelationshipType.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/RelationshipType.java
new file mode 100644
index 0000000..2398ca3
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/RelationshipType.java
@@ -0,0 +1,102 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import java.util.LinkedHashMap;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+import org.openecomp.sdc.toscaparser.elements.EntityType;
+import org.openecomp.sdc.toscaparser.elements.StatefulEntityType;
+
+public class RelationshipType extends StatefulEntityType {
+
+ private static final String DERIVED_FROM = "derived_from";
+ private static final String VALID_TARGET_TYPES = "valid_target_types";
+ private static final String INTERFACES = "interfaces";
+ private static final String ATTRIBUTES = "attributes";
+ private static final String PROPERTIES = "properties";
+ private static final String DESCRIPTION = "description";
+ private static final String VERSION = "version";
+ private static final String CREDENTIAL = "credential";
+
+ private static final String SECTIONS[] = {
+ DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES,
+ ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION, CREDENTIAL};
+
+ private String capabilityName;
+ private LinkedHashMap<String,Object> customDef;
+
+ public RelationshipType(String _type, String _capabilityName, LinkedHashMap<String,Object> _customDef) {
+ super(_type,RELATIONSHIP_PREFIX,_customDef);
+ capabilityName = _capabilityName;
+ customDef = _customDef;
+ }
+
+ public RelationshipType getParentType() {
+ // Return a relationship this reletionship is derived from.'''
+ String prel = derivedFrom(defs);
+ if(prel != null) {
+ return new RelationshipType(prel,null,customDef);
+ }
+ return null;
+ }
+
+ public Object getValidTargetTypes() {
+ return entityValue(defs,"valid_target_types");
+ }
+
+ private void _validateKeys() {
+ for(String key: defs.keySet()) {
+ boolean bFound = false;
+ for(int i=0; i< SECTIONS.length; i++) {
+ if(key.equals(SECTIONS[i])) {
+ bFound = true;
+ break;
+ }
+ }
+ if(!bFound) {
+ ExceptionCollector.appendException(String.format(
+ "UnknownFieldError: Relationshiptype \"%s\" has unknown field \"%s\"",type,key));
+ }
+ }
+ }
+}
+
+/*python
+
+from toscaparser.common.exception import ExceptionCollector
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.elements.statefulentitytype import StatefulEntityType
+
+
+class RelationshipType(StatefulEntityType):
+ '''TOSCA built-in relationship type.'''
+ SECTIONS = (DERIVED_FROM, VALID_TARGET_TYPES, INTERFACES,
+ ATTRIBUTES, PROPERTIES, DESCRIPTION, VERSION,
+ CREDENTIAL) = ('derived_from', 'valid_target_types',
+ 'interfaces', 'attributes', 'properties',
+ 'description', 'version', 'credential')
+
+ def __init__(self, type, capability_name=None, custom_def=None):
+ super(RelationshipType, self).__init__(type, self.RELATIONSHIP_PREFIX,
+ custom_def)
+ self.capability_name = capability_name
+ self.custom_def = custom_def
+ self._validate_keys()
+
+ @property
+ def parent_type(self):
+ '''Return a relationship this reletionship is derived from.'''
+ prel = self.derived_from(self.defs)
+ if prel:
+ return RelationshipType(prel, self.custom_def)
+
+ @property
+ def valid_target_types(self):
+ return self.entity_value(self.defs, 'valid_target_types')
+
+ def _validate_keys(self):
+ for key in self.defs.keys():
+ if key not in self.SECTIONS:
+ ExceptionCollector.appendException(
+ UnknownFieldError(what='Relationshiptype "%s"' % self.type,
+ field=key))
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnit.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnit.java
new file mode 100644
index 0000000..b8e27e4
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnit.java
@@ -0,0 +1,261 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import java.util.HashMap;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+import org.openecomp.sdc.toscaparser.utils.ValidateUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class ScalarUnit {
+
+ private static Logger log = LoggerFactory.getLogger(ScalarUnit.class.getName());
+
+ private static final String SCALAR_UNIT_SIZE = "scalar-unit.size";
+ private static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency";
+ private static final String SCALAR_UNIT_TIME = "scalar-unit.time";
+
+ public static final String SCALAR_UNIT_TYPES[] = {
+ SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME
+ };
+
+ private Object value;
+ protected HashMap<String,Object> SCALAR_UNIT_DICT;
+ protected String SCALAR_UNIT_DEFAULT;
+
+ public ScalarUnit(Object _value) {
+ value = _value;
+ SCALAR_UNIT_DICT = new HashMap<>();
+ SCALAR_UNIT_DEFAULT = "";
+ }
+
+
+ private String _checkUnitInScalarStandardUnits(String inputUnit) {
+ // Check whether the input unit is following specified standard
+
+ // If unit is not following specified standard, convert it to standard
+ // unit after displaying a warning message.
+
+ if(SCALAR_UNIT_DICT.get(inputUnit) != null) {
+ return inputUnit;
+ }
+ else {
+ for(String key: SCALAR_UNIT_DICT.keySet()) {
+ if(key.toUpperCase().equals(inputUnit.toUpperCase())) {
+ log.debug("ScalarUnit - _checkUnitInScalarStandardUnits - \n" +
+ "The unit {} does not follow scalar unit standards\n" +
+ "using {} instead",
+ inputUnit, key);
+ return key;
+ }
+ }
+ ExceptionCollector.appendException(String.format(
+ "'The unit \"%s\" is not valid. Valid units are \n%s",
+ inputUnit,SCALAR_UNIT_DICT.keySet().toString()));
+ return inputUnit;
+ }
+ }
+
+ public Object validateScalarUnit() {
+ Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)");
+ Matcher matcher = pattern.matcher(value.toString());
+ if(matcher.find()) {
+ ValidateUtils.strToNum(matcher.group(1));
+ String scalarUnit = _checkUnitInScalarStandardUnits(matcher.group(2));
+ value = matcher.group(1) + " " + scalarUnit;
+ }
+ else {
+ ExceptionCollector.appendException(String.format(
+ "ValueError: \"%s\" is not a valid scalar-unit",value.toString()));
+ }
+ return value;
+ }
+
+ public double getNumFromScalarUnit(String unit) {
+ if(unit != null) {
+ unit = _checkUnitInScalarStandardUnits(unit);
+ }
+ else {
+ unit = SCALAR_UNIT_DEFAULT;
+ }
+ Pattern pattern = Pattern.compile("([0-9.]+)\\s*(\\w+)");
+ Matcher matcher = pattern.matcher(value.toString());
+ if(matcher.find()) {
+ ValidateUtils.strToNum(matcher.group(1));
+ String scalarUnit = _checkUnitInScalarStandardUnits(matcher.group(2));
+ value = matcher.group(1) + " " + scalarUnit;
+ Object on1 = ValidateUtils.strToNum(matcher.group(1));
+ Object on2 = SCALAR_UNIT_DICT.get(matcher.group(2));
+ Object on3 = SCALAR_UNIT_DICT.get(unit);
+
+ Double n1 = new Double(on1.toString());
+ Double n2 = new Double(on2.toString());
+ Double n3 = new Double(on3.toString());
+ double converted = n1 * n2 / n3;
+ if(Math.abs(converted - Math.round(converted)) < 0.0000000000001 ) {
+ converted = Math.round(converted);
+ }
+ return converted;
+ }
+ return 0l; //???
+ }
+
+ protected static HashMap<String,String> scalarunitMapping = _getScalarunitMappings();
+
+ private static HashMap<String,String> _getScalarunitMappings() {
+ HashMap<String,String> map = new HashMap<>();
+ map.put(SCALAR_UNIT_FREQUENCY,"ScalarUnitFrequency");
+ map.put(SCALAR_UNIT_SIZE, "ScalarUnitSize");
+ map.put(SCALAR_UNIT_TIME, "ScalarUnit_Time");
+ return map;
+ }
+
+ public static ScalarUnit getScalarunitClass(String type,Object val) {
+ if(type.equals(SCALAR_UNIT_SIZE)) {
+ return new ScalarUnitSize(val);
+ }
+ else if(type.equals(SCALAR_UNIT_TIME)) {
+ return new ScalarUnitTime(val);
+ }
+ else if(type.equals(SCALAR_UNIT_FREQUENCY)) {
+ return new ScalarUnitFrequency(val);
+ }
+ return null;
+ }
+
+ public static double getScalarunitValue(String type, Object value, String unit) {
+ if(type.equals(SCALAR_UNIT_SIZE)) {
+ return (new ScalarUnitSize(value)).getNumFromScalarUnit(unit);
+ }
+ if(type.equals(SCALAR_UNIT_TIME)) {
+ return (new ScalarUnitTime(value)).getNumFromScalarUnit(unit);
+ }
+ if(type.equals(SCALAR_UNIT_FREQUENCY)) {
+ return (new ScalarUnitFrequency(value)).getNumFromScalarUnit(unit);
+ }
+ ExceptionCollector.appendException(String.format(
+ "TypeError: \"%s\" is not a valid scalar-unit type",type));
+ return 0.0;
+ }
+
+}
+
+/*python
+
+from toscaparser.common.exception import ExceptionCollector
+from toscaparser.utils.gettextutils import _
+from toscaparser.utils import validateutils
+
+log = logging.getLogger('tosca')
+
+
+class ScalarUnit(object):
+ '''Parent class for scalar-unit type.'''
+
+ SCALAR_UNIT_TYPES = (
+ SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME
+ ) = (
+ 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time'
+ )
+
+ def __init__(self, value):
+ self.value = value
+
+ def _check_unit_in_scalar_standard_units(self, input_unit):
+ """Check whether the input unit is following specified standard
+
+ If unit is not following specified standard, convert it to standard
+ unit after displaying a warning message.
+ """
+ if input_unit in self.SCALAR_UNIT_DICT.keys():
+ return input_unit
+ else:
+ for key in self.SCALAR_UNIT_DICT.keys():
+ if key.upper() == input_unit.upper():
+ log.warning(_('The unit "%(unit)s" does not follow '
+ 'scalar unit standards; using "%(key)s" '
+ 'instead.') % {'unit': input_unit,
+ 'key': key})
+ return key
+ msg = (_('The unit "%(unit)s" is not valid. Valid units are '
+ '"%(valid_units)s".') %
+ {'unit': input_unit,
+ 'valid_units': sorted(self.SCALAR_UNIT_DICT.keys())})
+ ExceptionCollector.appendException(ValueError(msg))
+
+ def validate_scalar_unit(self):
+ regex = re.compile('([0-9.]+)\s*(\w+)')
+ try:
+ result = regex.match(str(self.value)).groups()
+ validateutils.str_to_num(result[0])
+ scalar_unit = self._check_unit_in_scalar_standard_units(result[1])
+ self.value = ' '.join([result[0], scalar_unit])
+ return self.value
+
+ except Exception:
+ ExceptionCollector.appendException(
+ ValueError(_('"%s" is not a valid scalar-unit.')
+ % self.value))
+
+ def get_num_from_scalar_unit(self, unit=None):
+ if unit:
+ unit = self._check_unit_in_scalar_standard_units(unit)
+ else:
+ unit = self.SCALAR_UNIT_DEFAULT
+ self.validate_scalar_unit()
+
+ regex = re.compile('([0-9.]+)\s*(\w+)')
+ result = regex.match(str(self.value)).groups()
+ converted = (float(validateutils.str_to_num(result[0]))
+ * self.SCALAR_UNIT_DICT[result[1]]
+ / self.SCALAR_UNIT_DICT[unit])
+ if converted - int(converted) < 0.0000000000001:
+ converted = int(converted)
+ return converted
+
+
+class ScalarUnit_Size(ScalarUnit):
+
+ SCALAR_UNIT_DEFAULT = 'B'
+ SCALAR_UNIT_DICT = {'B': 1, 'kB': 1000, 'KiB': 1024, 'MB': 1000000,
+ 'MiB': 1048576, 'GB': 1000000000,
+ 'GiB': 1073741824, 'TB': 1000000000000,
+ 'TiB': 1099511627776}
+
+
+class ScalarUnit_Time(ScalarUnit):
+
+ SCALAR_UNIT_DEFAULT = 'ms'
+ SCALAR_UNIT_DICT = {'d': 86400, 'h': 3600, 'm': 60, 's': 1,
+ 'ms': 0.001, 'us': 0.000001, 'ns': 0.000000001}
+
+
+class ScalarUnit_Frequency(ScalarUnit):
+
+ SCALAR_UNIT_DEFAULT = 'GHz'
+ SCALAR_UNIT_DICT = {'Hz': 1, 'kHz': 1000,
+ 'MHz': 1000000, 'GHz': 1000000000}
+
+
+scalarunit_mapping = {
+ ScalarUnit.SCALAR_UNIT_FREQUENCY: ScalarUnit_Frequency,
+ ScalarUnit.SCALAR_UNIT_SIZE: ScalarUnit_Size,
+ ScalarUnit.SCALAR_UNIT_TIME: ScalarUnit_Time,
+ }
+
+
+def get_scalarunit_class(type):
+ return scalarunit_mapping.get(type)
+
+
+def get_scalarunit_value(type, value, unit=None):
+ if type in ScalarUnit.SCALAR_UNIT_TYPES:
+ ScalarUnit_Class = get_scalarunit_class(type)
+ return (ScalarUnit_Class(value).
+ get_num_from_scalar_unit(unit))
+ else:
+ ExceptionCollector.appendException(
+ TypeError(_('"%s" is not a valid scalar-unit type.') % type))
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnitFrequency.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnitFrequency.java
new file mode 100644
index 0000000..63d3f94
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnitFrequency.java
@@ -0,0 +1,14 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+public class ScalarUnitFrequency extends ScalarUnit {
+
+ public ScalarUnitFrequency(Object value) {
+ super(value);
+ SCALAR_UNIT_DEFAULT = "GHz";
+ SCALAR_UNIT_DICT.put("Hz",1L);
+ SCALAR_UNIT_DICT.put("kHz",1000L);
+ SCALAR_UNIT_DICT.put("MHz",1000000L);
+ SCALAR_UNIT_DICT.put("GHz",1000000000L);
+ }
+
+}
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnitSize.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnitSize.java
new file mode 100644
index 0000000..e3028dc
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnitSize.java
@@ -0,0 +1,19 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+public class ScalarUnitSize extends ScalarUnit {
+
+ public ScalarUnitSize(Object value) {
+ super(value);
+
+ SCALAR_UNIT_DEFAULT = "B";
+ SCALAR_UNIT_DICT.put("B",1L);
+ SCALAR_UNIT_DICT.put("kB",1000L);
+ SCALAR_UNIT_DICT.put("kiB",1024L);
+ SCALAR_UNIT_DICT.put("MB",1000000L);
+ SCALAR_UNIT_DICT.put("MiB",1048576L);
+ SCALAR_UNIT_DICT.put("GB",1000000000L);
+ SCALAR_UNIT_DICT.put("GiB",1073741824L);
+ SCALAR_UNIT_DICT.put("TB",1000000000000L);
+ SCALAR_UNIT_DICT.put("TiB",1099511627776L);
+ }
+}
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnitTime.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnitTime.java
new file mode 100644
index 0000000..b8c0ad0
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/ScalarUnitTime.java
@@ -0,0 +1,17 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+public class ScalarUnitTime extends ScalarUnit {
+
+ public ScalarUnitTime(Object value) {
+ super(value);
+ SCALAR_UNIT_DEFAULT = "ms";
+ SCALAR_UNIT_DICT.put("d",86400L);
+ SCALAR_UNIT_DICT.put("h",3600L);
+ SCALAR_UNIT_DICT.put("m",60L);
+ SCALAR_UNIT_DICT.put("s",1L);
+ SCALAR_UNIT_DICT.put("ms",0.001);
+ SCALAR_UNIT_DICT.put("us",0.000001);
+ SCALAR_UNIT_DICT.put("ns",0.000000001);
+ }
+
+}
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/StatefulEntityType.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/StatefulEntityType.java
new file mode 100644
index 0000000..2ac42a1
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/StatefulEntityType.java
@@ -0,0 +1,220 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.openecomp.sdc.toscaparser.UnsupportedType;
+import org.openecomp.sdc.toscaparser.elements.EntityType;
+import org.openecomp.sdc.toscaparser.elements.PropertyDef;
+import org.openecomp.sdc.toscaparser.elements.AttributeDef;
+
+
+public class StatefulEntityType extends EntityType {
+ // Class representing TOSCA states
+
+ public static final String interfacesNodeLifecycleOperations[] = {
+ "create", "configure", "start", "stop", "delete"};
+
+ public static final String interfacesRelationshipConfigureOperations[] = {
+ "post_configure_source", "post_configure_target", "add_target", "remove_target"};
+
+ public StatefulEntityType() {
+ // void constructor for subclasses that don't want super
+ }
+
+ @SuppressWarnings("unchecked")
+ public StatefulEntityType(String entityType, String prefix, LinkedHashMap<String,Object> customDef) {
+
+ String entireEntityType = entityType;
+ if(UnsupportedType.validateType(entireEntityType)) {
+ defs = null;
+ }
+ else {
+ if(entityType.startsWith(TOSCA + ":")) {
+ entityType = entityType.substring(TOSCA.length()+1);
+ entireEntityType = prefix + entityType;
+ }
+ if(!entityType.startsWith(TOSCA)) {
+ entireEntityType = prefix + entityType;
+ }
+ if(TOSCA_DEF.get(entireEntityType) != null) {
+ defs = (LinkedHashMap<String,Object> )TOSCA_DEF.get(entireEntityType);
+ entityType = entireEntityType;
+ }
+ else if(customDef != null && customDef.get(entityType) != null) {
+ defs = (LinkedHashMap<String,Object> )customDef.get(entityType);
+ }
+ else{
+ defs = null;
+ ExceptionCollector.appendException(String.format(
+ "InvalidTypeError: \"%s\" is not a valid type",entityType));
+ }
+ }
+ type = entityType;
+ }
+
+ @SuppressWarnings("unchecked")
+ public ArrayList<PropertyDef> getPropertiesDefObjects() {
+ // Return a list of property definition objects
+ ArrayList<PropertyDef> properties = new ArrayList<PropertyDef>();
+ LinkedHashMap<String,Object> props = (LinkedHashMap<String,Object>)getDefinition(PROPERTIES);
+ if(props != null) {
+ for(Map.Entry<String,Object> me: props.entrySet()) {
+ String pdname = me.getKey();
+ Object to = me.getValue();
+ if(to == null || !(to instanceof LinkedHashMap)) {
+ String s = to == null ? "null" : to.getClass().getSimpleName();
+ ExceptionCollector.appendException(String.format(
+ "Unexpected type error: property \"%s\" has type \"%s\" (expected dict)",pdname,s));
+ continue;
+ }
+ LinkedHashMap<String,Object> pdschema = (LinkedHashMap<String,Object>)to;
+ properties.add(new PropertyDef(pdname,null,pdschema));
+ }
+ }
+ return properties;
+ }
+
+ public LinkedHashMap<String,PropertyDef> getPropertiesDef() {
+ LinkedHashMap<String,PropertyDef> pds = new LinkedHashMap<String,PropertyDef>();
+ for(PropertyDef pd: getPropertiesDefObjects()) {
+ pds.put(pd.getName(),pd);
+ }
+ return pds;
+ }
+
+ public PropertyDef getPropertyDefValue(String name) {
+ // Return the property definition associated with a given name
+ PropertyDef pd = null;
+ LinkedHashMap<String,PropertyDef> propsDef = getPropertiesDef();
+ if(propsDef != null) {
+ pd = propsDef.get(name);
+ }
+ return pd;
+ }
+
+ public ArrayList<AttributeDef> getAttributesDefObjects() {
+ // Return a list of attribute definition objects
+ @SuppressWarnings("unchecked")
+ LinkedHashMap<String,Object> attrs = (LinkedHashMap<String,Object>)getValue(ATTRIBUTES,null,true);
+ ArrayList<AttributeDef> ads = new ArrayList<>();
+ if(attrs != null) {
+ for(Map.Entry<String,Object> me: attrs.entrySet()) {
+ String attr = me.getKey();
+ @SuppressWarnings("unchecked")
+ LinkedHashMap<String,Object> adschema = (LinkedHashMap<String,Object>)me.getValue();
+ ads.add(new AttributeDef(attr,null,adschema));
+ }
+ }
+ return ads;
+ }
+
+ public LinkedHashMap<String,AttributeDef> getAttributesDef() {
+ // Return a dictionary of attribute definition name-object pairs
+
+ LinkedHashMap<String,AttributeDef> ads = new LinkedHashMap<>();
+ for(AttributeDef ado: getAttributesDefObjects()) {
+ ads.put(((AttributeDef)ado).getName(),ado);
+ }
+ return ads;
+ }
+
+ public AttributeDef getAttributeDefValue(String name) {
+ // Return the attribute definition associated with a given name
+ AttributeDef ad = null;
+ LinkedHashMap<String,AttributeDef> attrsDef = getAttributesDef();
+ if(attrsDef != null) {
+ ad = attrsDef.get(name);
+ }
+ return ad;
+ }
+
+ public String getType() {
+ return type;
+ }
+ }
+
+/*python
+
+from toscaparser.common.exception import InvalidTypeError
+from toscaparser.elements.attribute_definition import AttributeDef
+from toscaparser.elements.entity_type import EntityType
+from toscaparser.elements.property_definition import PropertyDef
+from toscaparser.unsupportedtype import UnsupportedType
+
+
+class StatefulEntityType(EntityType):
+ '''Class representing TOSCA states.'''
+
+ interfaces_node_lifecycle_operations = ['create',
+ 'configure', 'start',
+ 'stop', 'delete']
+
+ interfaces_relationship_configure_operations = ['post_configure_source',
+ 'post_configure_target',
+ 'add_target',
+ 'remove_target']
+
+ def __init__(self, entitytype, prefix, custom_def=None):
+ entire_entitytype = entitytype
+ if UnsupportedType.validate_type(entire_entitytype):
+ self.defs = None
+ else:
+ if entitytype.startswith(self.TOSCA + ":"):
+ entitytype = entitytype[(len(self.TOSCA) + 1):]
+ entire_entitytype = prefix + entitytype
+ if not entitytype.startswith(self.TOSCA):
+ entire_entitytype = prefix + entitytype
+ if entire_entitytype in list(self.TOSCA_DEF.keys()):
+ self.defs = self.TOSCA_DEF[entire_entitytype]
+ entitytype = entire_entitytype
+ elif custom_def and entitytype in list(custom_def.keys()):
+ self.defs = custom_def[entitytype]
+ else:
+ self.defs = None
+ ExceptionCollector.appendException(
+ InvalidTypeError(what=entitytype))
+ self.type = entitytype
+
+ def get_properties_def_objects(self):
+ '''Return a list of property definition objects.'''
+ properties = []
+ props = self.get_definition(self.PROPERTIES)
+ if props:
+ for prop, schema in props.items():
+ properties.append(PropertyDef(prop, None, schema))
+ return properties
+
+ def get_properties_def(self):
+ '''Return a dictionary of property definition name-object pairs.'''
+ return {prop.name: prop
+ for prop in self.get_properties_def_objects()}
+
+ def get_property_def_value(self, name):
+ '''Return the property definition associated with a given name.'''
+ props_def = self.get_properties_def()
+ if props_def and name in props_def.keys():
+ return props_def[name].value
+
+ def get_attributes_def_objects(self):
+ '''Return a list of attribute definition objects.'''
+ attrs = self.get_value(self.ATTRIBUTES, parent=True)
+ if attrs:
+ return [AttributeDef(attr, None, schema)
+ for attr, schema in attrs.items()]
+ return []
+
+ def get_attributes_def(self):
+ '''Return a dictionary of attribute definition name-object pairs.'''
+ return {attr.name: attr
+ for attr in self.get_attributes_def_objects()}
+
+ def get_attribute_def_value(self, name):
+ '''Return the attribute definition associated with a given name.'''
+ attrs_def = self.get_attributes_def()
+ if attrs_def and name in attrs_def.keys():
+ return attrs_def[name].value
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/TypeValidation.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/TypeValidation.java
new file mode 100644
index 0000000..b29248d
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/TypeValidation.java
@@ -0,0 +1,147 @@
+package org.openecomp.sdc.toscaparser.elements;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+import org.openecomp.sdc.toscaparser.extensions.ExtTools;
+
+public class TypeValidation {
+
+ private static final String DEFINITION_VERSION = "tosca_definitions_version";
+ private static final String DESCRIPTION = "description";
+ private static final String IMPORTS = "imports";
+ private static final String DSL_DEFINITIONS = "dsl_definitions";
+ private static final String NODE_TYPES = "node_types";
+ private static final String REPOSITORIES = "repositories";
+ private static final String DATA_TYPES = "data_types";
+ private static final String ARTIFACT_TYPES = "artifact_types";
+ private static final String GROUP_TYPES = "group_types";
+ private static final String RELATIONSHIP_TYPES = "relationship_types";
+ private static final String CAPABILITY_TYPES = "capability_types";
+ private static final String INTERFACE_TYPES = "interface_types";
+ private static final String POLICY_TYPES = "policy_types";
+ private static final String TOPOLOGY_TEMPLATE = "topology_template";
+ private String ALLOWED_TYPE_SECTIONS[] = {
+ DEFINITION_VERSION, DESCRIPTION, IMPORTS,
+ DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES,
+ DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES,
+ RELATIONSHIP_TYPES, CAPABILITY_TYPES,
+ INTERFACE_TYPES, POLICY_TYPES,
+ TOPOLOGY_TEMPLATE
+ };
+
+ private static ArrayList<String> VALID_TEMPLATE_VERSIONS = _getVTV();
+
+ private static ArrayList<String> _getVTV() {
+ ArrayList<String> vtv = new ArrayList<>();
+ vtv.add("tosca_simple_yaml_1_0");
+ ExtTools exttools = new ExtTools();
+ vtv.addAll(exttools.getVersions());
+ return vtv;
+ }
+
+ //private LinkedHashMap<String,Object> customTypes;
+ private Object importDef;
+ //private String version;
+
+ public TypeValidation(LinkedHashMap<String,Object> _customTypes,
+ Object _importDef) {
+ importDef = _importDef;
+ _validateTypeKeys(_customTypes);
+ }
+
+ private void _validateTypeKeys(LinkedHashMap<String,Object> customTypes) {
+
+ String sVersion = (String)customTypes.get(DEFINITION_VERSION);
+ if(sVersion != null) {
+ _validateTypeVersion(sVersion);
+ //version = sVersion;
+ }
+ for(String name: customTypes.keySet()) {
+ boolean bFound = false;
+ for(String ats: ALLOWED_TYPE_SECTIONS) {
+ if(name.equals(ats)) {
+ bFound = true;
+ break;
+ }
+ }
+ if(!bFound) {
+ ExceptionCollector.appendException(String.format(
+ "UnknownFieldError: Template \"%s\" contains unknown field \"%s\"",
+ importDef.toString(),name));
+ }
+ }
+ }
+
+ private void _validateTypeVersion(String sVersion) {
+ boolean bFound = false;
+ String allowed = "";
+ for(String atv: VALID_TEMPLATE_VERSIONS) {
+ allowed += "\"" + atv + "\" ";
+ if(sVersion.equals(atv)) {
+ bFound = true;
+ break;
+ }
+ }
+ if(!bFound) {
+ ExceptionCollector.appendException(String.format(
+ "InvalidTemplateVersion: version \"%s\" in \"%s\" is not supported\n" +
+ "Allowed versions: [%s]",
+ sVersion,importDef.toString(),allowed));
+ }
+ }
+}
+
+/*python
+
+from toscaparser.common.exception import ExceptionCollector
+from toscaparser.common.exception import InvalidTemplateVersion
+from toscaparser.common.exception import UnknownFieldError
+from toscaparser.extensions.exttools import ExtTools
+
+
+class TypeValidation(object):
+
+ ALLOWED_TYPE_SECTIONS = (DEFINITION_VERSION, DESCRIPTION, IMPORTS,
+ DSL_DEFINITIONS, NODE_TYPES, REPOSITORIES,
+ DATA_TYPES, ARTIFACT_TYPES, GROUP_TYPES,
+ RELATIONSHIP_TYPES, CAPABILITY_TYPES,
+ INTERFACE_TYPES, POLICY_TYPES,
+ TOPOLOGY_TEMPLATE) = \
+ ('tosca_definitions_version', 'description', 'imports',
+ 'dsl_definitions', 'node_types', 'repositories',
+ 'data_types', 'artifact_types', 'group_types',
+ 'relationship_types', 'capability_types',
+ 'interface_types', 'policy_types', 'topology_template')
+ VALID_TEMPLATE_VERSIONS = ['tosca_simple_yaml_1_0']
+ exttools = ExtTools()
+ VALID_TEMPLATE_VERSIONS.extend(exttools.get_versions())
+
+ def __init__(self, custom_types, import_def):
+ self.import_def = import_def
+ self._validate_type_keys(custom_types)
+
+ def _validate_type_keys(self, custom_type):
+ version = custom_type[self.DEFINITION_VERSION] \
+ if self.DEFINITION_VERSION in custom_type \
+ else None
+ if version:
+ self._validate_type_version(version)
+ self.version = version
+
+ for name in custom_type:
+ if name not in self.ALLOWED_TYPE_SECTIONS:
+ ExceptionCollector.appendException(
+# UnknownFieldError(what='Template ' + (self.import_def),
+ UnknownFieldError(what= (self.import_def),
+ field=name))
+
+ def _validate_type_version(self, version):
+ if version not in self.VALID_TEMPLATE_VERSIONS:
+ ExceptionCollector.appendException(
+ InvalidTemplateVersion(
+# what=version + ' in ' + self.import_def,
+ what=self.import_def,
+ valid_versions=', '. join(self.VALID_TEMPLATE_VERSIONS)))
+*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Constraint.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Constraint.java
new file mode 100644
index 0000000..43737bf
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Constraint.java
@@ -0,0 +1,237 @@
+package org.openecomp.sdc.toscaparser.elements.constraints;
+
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
+
+import org.openecomp.sdc.toscaparser.elements.ScalarUnit;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+
+public abstract class Constraint {
+
+ // Parent class for constraints for a Property or Input
+
+ protected static final String EQUAL = "equal";
+ protected static final String GREATER_THAN = "greater_than";
+ protected static final String GREATER_OR_EQUAL = "greater_or_equal";
+ protected static final String LESS_THAN = "less_than";
+ protected static final String LESS_OR_EQUAL = "less_or_equal";
+ protected static final String IN_RANGE = "in_range";
+ protected static final String VALID_VALUES = "valid_values";
+ protected static final String LENGTH = "length";
+ protected static final String MIN_LENGTH = "min_length";
+ protected static final String MAX_LENGTH = "max_length";
+ protected static final String PATTERN = "pattern";
+
+ protected static final String CONSTRAINTS[] = {
+ EQUAL, GREATER_THAN,GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL,
+ IN_RANGE, VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN};
+
+ @SuppressWarnings("unchecked")
+ public static Constraint factory(String constraintClass,String propname,String proptype,Object constraint) {
+
+ // a factory for the different Constraint classes
+ // replaces Python's __new__() usage
+
+ if(!(constraint instanceof LinkedHashMap) ||
+ ((LinkedHashMap<String,Object>)constraint).size() != 1) {
+ ExceptionCollector.appendException(
+ "InvalidSchemaError: Invalid constraint schema " + constraint.toString());
+ }
+
+ if(constraintClass.equals(EQUAL)) {
+ return new Equal(propname,proptype,constraint);
+ }
+ else if(constraintClass.equals(GREATER_THAN)) {
+ return new GreaterThan(propname,proptype,constraint);
+ }
+ else if(constraintClass.equals(GREATER_OR_EQUAL)) {
+ return new GreaterOrEqual(propname,proptype,constraint);
+ }
+ else if(constraintClass.equals(LESS_THAN)) {
+ return new LessThan(propname,proptype,constraint);
+ }
+ else if(constraintClass.equals(LESS_OR_EQUAL)) {
+ return new LessOrEqual(propname,proptype,constraint);
+ }
+ else if(constraintClass.equals(IN_RANGE)) {
+ return new InRange(propname,proptype,constraint);
+ }
+ else if(constraintClass.equals(VALID_VALUES)) {
+ return new ValidValues(propname,proptype,constraint);
+ }
+ else if(constraintClass.equals(LENGTH)) {
+ return new Length(propname,proptype,constraint);
+ }
+ else if(constraintClass.equals(MIN_LENGTH)) {
+ return new MinLength(propname,proptype,constraint);
+ }
+ else if(constraintClass.equals(MAX_LENGTH)) {
+ return new MaxLength(propname,proptype,constraint);
+ }
+ else if(constraintClass.equals(PATTERN)) {
+ return new Pattern(propname,proptype,constraint);
+ }
+ else {
+ ExceptionCollector.appendException(String.format(
+ "InvalidSchemaError: Invalid property \"%s\"",constraintClass));
+ return null;
+ }
+ }
+
+ protected String constraintKey = "TBD";
+ protected ArrayList<String> validTypes = new ArrayList<>();
+ protected ArrayList<String> validPropTypes = new ArrayList<>();
+
+ protected String propertyName;
+ protected String propertyType;
+ protected Object constraintValue;
+ protected Object constraintValueMsg;
+ protected Object valueMsg;
+
+ @SuppressWarnings("unchecked")
+ public Constraint(String propname,String proptype,Object constraint) {
+
+ _setValues();
+
+ propertyName = propname;
+ propertyType = proptype;
+ constraintValue = ((LinkedHashMap<String,Object>)constraint).get(constraintKey);
+ constraintValueMsg = constraintValue;
+ boolean bFound = false;
+ for(String s: ScalarUnit.SCALAR_UNIT_TYPES) {
+ if(s.equals(propertyType)) {
+ bFound = true;
+ break;
+ }
+ }
+ if(bFound) {
+ constraintValue = _getScalarUnitConstraintValue();
+ }
+ // check if constraint is valid for property type
+ bFound = false;
+ for(String s: validPropTypes) {
+ if(s.equals(propertyType)) {
+ bFound = true;
+ break;
+ }
+ }
+ if(!bFound) {
+ ExceptionCollector.appendException(String.format(
+ "InvalidSchemaError: Property \"%s\" is not valid for data type \"%s\"",
+ constraintKey,propertyType));
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ private Object _getScalarUnitConstraintValue() {
+ // code differs from Python because of class creation
+ if(constraintValue instanceof ArrayList) {
+ ArrayList<Object> ret = new ArrayList<>();
+ for(Object v: (ArrayList<Object>)constraintValue) {
+ ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType,v);
+ ret.add(su.getNumFromScalarUnit(null));
+ }
+ return ret;
+ }
+ else {
+ ScalarUnit su = ScalarUnit.getScalarunitClass(propertyType,constraintValue);
+ return su.getNumFromScalarUnit(null);
+ }
+ }
+
+ public void validate(Object value) {
+ valueMsg = value;
+ boolean bFound = false;
+ for(String s: ScalarUnit.SCALAR_UNIT_TYPES) {
+ if(s.equals(propertyType)) {
+ bFound = true;
+ break;
+ }
+ }
+ if(bFound) {
+ value = ScalarUnit.getScalarunitValue(propertyType,value,null);
+ }
+ if(!_isValid(value)) {
+ ExceptionCollector.appendException("ValidationError: " + _errMsg(value));
+ }
+ }
+
+ protected abstract boolean _isValid(Object value);
+
+ protected abstract void _setValues();
+
+ protected abstract String _errMsg(Object value);
+
+}
+
+/*python
+
+class Constraint(object):
+ '''Parent class for constraints for a Property or Input.'''
+
+ CONSTRAINTS = (EQUAL, GREATER_THAN,
+ GREATER_OR_EQUAL, LESS_THAN, LESS_OR_EQUAL, IN_RANGE,
+ VALID_VALUES, LENGTH, MIN_LENGTH, MAX_LENGTH, PATTERN) = \
+ ('equal', 'greater_than', 'greater_or_equal', 'less_than',
+ 'less_or_equal', 'in_range', 'valid_values', 'length',
+ 'min_length', 'max_length', 'pattern')
+
+ def __new__(cls, property_name, property_type, constraint):
+ if cls is not Constraint:
+ return super(Constraint, cls).__new__(cls)
+
+ if(not isinstance(constraint, collections.Mapping) or
+ len(constraint) != 1):
+ ExceptionCollector.appendException(
+ InvalidSchemaError(message=_('Invalid constraint schema.')))
+
+ for type in constraint.keys():
+ ConstraintClass = get_constraint_class(type)
+ if not ConstraintClass:
+ msg = _('Invalid property "%s".') % type
+ ExceptionCollector.appendException(
+ InvalidSchemaError(message=msg))
+
+ return ConstraintClass(property_name, property_type, constraint)
+
+ def __init__(self, property_name, property_type, constraint):
+ self.property_name = property_name
+ self.property_type = property_type
+ self.constraint_value = constraint[self.constraint_key]
+ self.constraint_value_msg = self.constraint_value
+ if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES:
+ self.constraint_value = self._get_scalarunit_constraint_value()
+ # check if constraint is valid for property type
+ if property_type not in self.valid_prop_types:
+ msg = _('Property "%(ctype)s" is not valid for data type '
+ '"%(dtype)s".') % dict(
+ ctype=self.constraint_key,
+ dtype=property_type)
+ ExceptionCollector.appendException(InvalidSchemaError(message=msg))
+
+ def _get_scalarunit_constraint_value(self):
+ if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES:
+ ScalarUnit_Class = (scalarunit.
+ get_scalarunit_class(self.property_type))
+ if isinstance(self.constraint_value, list):
+ return [ScalarUnit_Class(v).get_num_from_scalar_unit()
+ for v in self.constraint_value]
+ else:
+ return (ScalarUnit_Class(self.constraint_value).
+ get_num_from_scalar_unit())
+
+ def _err_msg(self, value):
+ return _('Property "%s" could not be validated.') % self.property_name
+
+ def validate(self, value):
+ self.value_msg = value
+ if self.property_type in scalarunit.ScalarUnit.SCALAR_UNIT_TYPES:
+ value = scalarunit.get_scalarunit_value(self.property_type, value)
+ if not self._is_valid(value):
+ err_msg = self._err_msg(value)
+ ExceptionCollector.appendException(
+ ValidationError(message=err_msg))
+
+
+*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Equal.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Equal.java
new file mode 100644
index 0000000..dd88f02
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Equal.java
@@ -0,0 +1,61 @@
+package org.openecomp.sdc.toscaparser.elements.constraints;
+
+public class Equal extends Constraint {
+
+ protected void _setValues() {
+
+ constraintKey = EQUAL;
+
+ for(String s: Schema.PROPERTY_TYPES) {
+ validPropTypes.add(s);
+ }
+
+ }
+
+ public Equal(String name,String type,Object c) {
+ super(name,type,c);
+
+ }
+
+ protected boolean _isValid(Object val) {
+ // equality of objects is tricky so we're comparing
+ // the toString() representation
+ if(val.toString().equals(constraintValue.toString())) {
+ return true;
+ }
+ return false;
+ }
+
+ protected String _errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" is not equal to \"%s\"",
+ valueMsg,propertyName,constraintValueMsg);
+ }
+
+}
+
+/*python
+
+class Equal(Constraint):
+"""Constraint class for "equal"
+
+Constrains a property or parameter to a value equal to ('=')
+the value declared.
+"""
+
+constraint_key = Constraint.EQUAL
+
+valid_prop_types = Schema.PROPERTY_TYPES
+
+def _is_valid(self, value):
+ if value == self.constraint_value:
+ return True
+
+ return False
+
+def _err_msg(self, value):
+ return (_('The value "%(pvalue)s" of property "%(pname)s" is not '
+ 'equal to "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=self.value_msg,
+ cvalue=self.constraint_value_msg))
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/GreaterOrEqual.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/GreaterOrEqual.java
new file mode 100644
index 0000000..feb8fc0
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/GreaterOrEqual.java
@@ -0,0 +1,112 @@
+package org.openecomp.sdc.toscaparser.elements.constraints;
+
+import java.util.Date;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+import org.openecomp.sdc.toscaparser.functions.Function;
+
+public class GreaterOrEqual extends Constraint {
+ // Constraint class for "greater_or_equal"
+
+ // Constrains a property or parameter to a value greater than or equal
+ // to ('>=') the value declared.
+
+ protected void _setValues() {
+
+ constraintKey = GREATER_OR_EQUAL;
+
+ validTypes.add("Integer");
+ validTypes.add("Double");
+ validTypes.add("Float");
+ // timestamps are loaded as Date objects
+ validTypes.add("Date");
+ //validTypes.add("datetime.date");
+ //validTypes.add("datetime.time");
+ //validTypes.add("datetime.datetime");
+
+ validPropTypes.add(Schema.INTEGER);
+ validPropTypes.add(Schema.FLOAT);
+ validPropTypes.add(Schema.TIMESTAMP);
+ validPropTypes.add(Schema.SCALAR_UNIT_SIZE);
+ validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY);
+ validPropTypes.add(Schema.SCALAR_UNIT_TIME);
+
+ }
+
+ public GreaterOrEqual(String name,String type,Object c) {
+ super(name,type,c);
+
+ if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ExceptionCollector.appendException("InvalidSchemaError: The property \"greater_or_equal\" expects comparable values");
+ }
+ }
+
+
+
+ @Override
+ protected boolean _isValid(Object value) {
+ if(Function.isFunction(value)) {
+ return true;
+ }
+
+ // timestamps
+ if(value instanceof Date) {
+ if(constraintValue instanceof Date) {
+ return !((Date)value).before((Date)constraintValue);
+ }
+ return false;
+ }
+ // all others
+ Double n1 = new Double(value.toString());
+ Double n2 = new Double(constraintValue.toString());
+ return n1 >= n2;
+ }
+
+ protected String _errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" must be greater or equal to \"%s\"",
+ valueMsg,propertyName,constraintValueMsg);
+ }
+}
+
+/*python
+
+class GreaterOrEqual(Constraint):
+"""Constraint class for "greater_or_equal"
+
+Constrains a property or parameter to a value greater than or equal
+to ('>=') the value declared.
+"""
+
+constraint_key = Constraint.GREATER_OR_EQUAL
+
+valid_types = (int, float, datetime.date,
+ datetime.time, datetime.datetime)
+
+valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP,
+ Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY,
+ Schema.SCALAR_UNIT_TIME)
+
+def __init__(self, property_name, property_type, constraint):
+ super(GreaterOrEqual, self).__init__(property_name, property_type,
+ constraint)
+ if not isinstance(self.constraint_value, self.valid_types):
+ ExceptionCollector.appendException(
+ InvalidSchemaError(message=_('The property '
+ '"greater_or_equal" expects '
+ 'comparable values.')))
+
+def _is_valid(self, value):
+ if toscaparser.functions.is_function(value) or \
+ value >= self.constraint_value:
+ return True
+ return False
+
+def _err_msg(self, value):
+ return (_('The value "%(pvalue)s" of property "%(pname)s" must be '
+ 'greater than or equal to "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=self.value_msg,
+ cvalue=self.constraint_value_msg))
+
+
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/GreaterThan.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/GreaterThan.java
new file mode 100644
index 0000000..cb5cf4e
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/GreaterThan.java
@@ -0,0 +1,101 @@
+package org.openecomp.sdc.toscaparser.elements.constraints;
+
+import java.util.Date;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+
+public class GreaterThan extends Constraint {
+
+ @Override
+ protected void _setValues() {
+
+ constraintKey = GREATER_THAN;
+
+ validTypes.add("Integer");
+ validTypes.add("Double");
+ validTypes.add("Float");
+ // timestamps are loaded as Date objects
+ validTypes.add("Date");
+ //validTypes.add("datetime.date");
+ //validTypes.add("datetime.time");
+ //validTypes.add("datetime.datetime");
+
+
+ validPropTypes.add(Schema.INTEGER);
+ validPropTypes.add(Schema.FLOAT);
+ validPropTypes.add(Schema.TIMESTAMP);
+ validPropTypes.add(Schema.SCALAR_UNIT_SIZE);
+ validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY);
+ validPropTypes.add(Schema.SCALAR_UNIT_TIME);
+
+ }
+
+ public GreaterThan(String name,String type,Object c) {
+ super(name,type,c);
+
+ if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ExceptionCollector.appendException("InvalidSchemaError: The property \"greater_than\" expects comparable values");
+ }
+ }
+
+ @Override
+ protected boolean _isValid(Object value) {
+
+ // timestamps
+ if(value instanceof Date) {
+ if(constraintValue instanceof Date) {
+ return ((Date)value).after((Date)constraintValue);
+ }
+ return false;
+ }
+
+ Double n1 = new Double(value.toString());
+ Double n2 = new Double(constraintValue.toString());
+ return n1 > n2;
+ }
+
+ protected String _errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" must be greater than \"%s\"",
+ valueMsg,propertyName,constraintValueMsg);
+ }
+
+}
+
+/*
+class GreaterThan(Constraint):
+ """Constraint class for "greater_than"
+
+ Constrains a property or parameter to a value greater than ('>')
+ the value declared.
+ """
+
+ constraint_key = Constraint.GREATER_THAN
+
+ valid_types = (int, float, datetime.date,
+ datetime.time, datetime.datetime)
+
+ valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP,
+ Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY,
+ Schema.SCALAR_UNIT_TIME)
+
+ def __init__(self, property_name, property_type, constraint):
+ super(GreaterThan, self).__init__(property_name, property_type,
+ constraint)
+ if not isinstance(constraint[self.GREATER_THAN], self.valid_types):
+ ExceptionCollector.appendException(
+ InvalidSchemaError(message=_('The property "greater_than" '
+ 'expects comparable values.')))
+
+ def _is_valid(self, value):
+ if value > self.constraint_value:
+ return True
+
+ return False
+
+ def _err_msg(self, value):
+ return (_('The value "%(pvalue)s" of property "%(pname)s" must be '
+ 'greater than "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=self.value_msg,
+ cvalue=self.constraint_value_msg))
+*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/InRange.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/InRange.java
new file mode 100644
index 0000000..9110e5c
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/InRange.java
@@ -0,0 +1,169 @@
+package org.openecomp.sdc.toscaparser.elements.constraints;
+
+import java.util.Date;
+import java.util.ArrayList;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+
+public class InRange extends Constraint {
+ // Constraint class for "in_range"
+
+ //Constrains a property or parameter to a value in range of (inclusive)
+ //the two values declared.
+
+ private static final String UNBOUNDED = "UNBOUNDED";
+
+ private Object min,max;
+
+ protected void _setValues() {
+
+ constraintKey = IN_RANGE;
+
+ validTypes.add("Integer");
+ validTypes.add("Double");
+ validTypes.add("Float");
+ validTypes.add("String");
+ // timestamps are loaded as Date objects
+ validTypes.add("Date");
+ //validTypes.add("datetime.date");
+ //validTypes.add("datetime.time");
+ //validTypes.add("datetime.datetime");
+
+ validPropTypes.add(Schema.INTEGER);
+ validPropTypes.add(Schema.FLOAT);
+ validPropTypes.add(Schema.TIMESTAMP);
+ validPropTypes.add(Schema.SCALAR_UNIT_SIZE);
+ validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY);
+ validPropTypes.add(Schema.SCALAR_UNIT_TIME);
+ validPropTypes.add(Schema.RANGE);
+
+ }
+
+ @SuppressWarnings("unchecked")
+ public InRange(String name,String type,Object c) {
+ super(name,type,c);
+
+ if(!(constraintValue instanceof ArrayList) || ((ArrayList<Object>)constraintValue).size() != 2) {
+ ExceptionCollector.appendException("InvalidSchemaError: The property \"in_range\" expects a list");
+
+ }
+
+ ArrayList<Object> alcv = (ArrayList<Object>)constraintValue;
+ String msg = "The property \"in_range\" expects comparable values";
+ for(Object vo: alcv) {
+ if(!validTypes.contains(vo.getClass().getSimpleName())) {
+ ExceptionCollector.appendException("InvalidSchemaError: " + msg);
+ }
+ // The only string we allow for range is the special value 'UNBOUNDED'
+ if((vo instanceof String) && !((String)vo).equals(UNBOUNDED)) {
+ ExceptionCollector.appendException("InvalidSchemaError: " + msg);
+ }
+ }
+ min = alcv.get(0);
+ max = alcv.get(1);
+
+ }
+
+ @Override
+ protected boolean _isValid(Object value) {
+
+ // timestamps
+ if(value instanceof Date) {
+ if(min instanceof Date && max instanceof Date) {
+ return !((Date)value).before((Date)min) &&
+ !((Date)value).after((Date)max);
+ }
+ return false;
+ }
+
+ Double dvalue = new Double(value.toString());
+ if(!(min instanceof String)) {
+ if(dvalue < new Double(min.toString())) {
+ return false;
+ }
+ }
+ else if(!((String)min).equals(UNBOUNDED)) {
+ return false;
+ }
+ if(!(max instanceof String)) {
+ if(dvalue > new Double(max.toString())) {
+ return false;
+ }
+ }
+ else if(!((String)max).equals(UNBOUNDED)) {
+ return false;
+ }
+ return true;
+ }
+
+ @Override
+ protected String _errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" is out of range \"(min:%s, max:%s)\"",
+ valueMsg,propertyName,min.toString(),max.toString());
+ }
+
+}
+
+/*python
+
+class InRange(Constraint):
+ """Constraint class for "in_range"
+
+ Constrains a property or parameter to a value in range of (inclusive)
+ the two values declared.
+ """
+ UNBOUNDED = 'UNBOUNDED'
+
+ constraint_key = Constraint.IN_RANGE
+
+ valid_types = (int, float, datetime.date,
+ datetime.time, datetime.datetime, str)
+
+ valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP,
+ Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY,
+ Schema.SCALAR_UNIT_TIME, Schema.RANGE)
+
+ def __init__(self, property_name, property_type, constraint):
+ super(InRange, self).__init__(property_name, property_type, constraint)
+ if(not isinstance(self.constraint_value, collections.Sequence) or
+ (len(constraint[self.IN_RANGE]) != 2)):
+ ExceptionCollector.appendException(
+ InvalidSchemaError(message=_('The property "in_range" '
+ 'expects a list.')))
+
+ msg = _('The property "in_range" expects comparable values.')
+ for value in self.constraint_value:
+ if not isinstance(value, self.valid_types):
+ ExceptionCollector.appendException(
+ InvalidSchemaError(message=msg))
+ # The only string we allow for range is the special value
+ # 'UNBOUNDED'
+ if(isinstance(value, str) and value != self.UNBOUNDED):
+ ExceptionCollector.appendException(
+ InvalidSchemaError(message=msg))
+
+ self.min = self.constraint_value[0]
+ self.max = self.constraint_value[1]
+
+ def _is_valid(self, value):
+ if not isinstance(self.min, str):
+ if value < self.min:
+ return False
+ elif self.min != self.UNBOUNDED:
+ return False
+ if not isinstance(self.max, str):
+ if value > self.max:
+ return False
+ elif self.max != self.UNBOUNDED:
+ return False
+ return True
+
+ def _err_msg(self, value):
+ return (_('The value "%(pvalue)s" of property "%(pname)s" is out of '
+ 'range "(min:%(vmin)s, max:%(vmax)s)".') %
+ dict(pname=self.property_name,
+ pvalue=self.value_msg,
+ vmin=self.constraint_value_msg[0],
+ vmax=self.constraint_value_msg[1]))
+
+*/
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Length.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Length.java
new file mode 100644
index 0000000..9dd9ae9
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Length.java
@@ -0,0 +1,78 @@
+package org.openecomp.sdc.toscaparser.elements.constraints;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+
+public class Length extends Constraint {
+ // Constraint class for "length"
+
+ // Constrains the property or parameter to a value of a given length.
+
+ @Override
+ protected void _setValues() {
+
+ constraintKey = LENGTH;
+
+ validTypes.add("Integer");
+
+ validPropTypes.add(Schema.STRING);
+
+ }
+
+ public Length(String name,String type,Object c) {
+ super(name,type,c);
+
+ if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ExceptionCollector.appendException("InvalidSchemaError: The property \"length\" expects an integer");
+ }
+ }
+
+ @Override
+ protected boolean _isValid(Object value) {
+ if(value instanceof String && constraintValue instanceof Integer &&
+ ((String)value).length() == (Integer)constraintValue) {
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ protected String _errMsg(Object value) {
+ return String.format("Length of value \"%s\" of property \"%s\" must be equal to \"%s\"",
+ value.toString(),propertyName,constraintValue.toString());
+ }
+
+}
+
+/*python
+ class Length(Constraint):
+ """Constraint class for "length"
+
+ Constrains the property or parameter to a value of a given length.
+ """
+
+ constraint_key = Constraint.LENGTH
+
+ valid_types = (int, )
+
+ valid_prop_types = (Schema.STRING, )
+
+ def __init__(self, property_name, property_type, constraint):
+ super(Length, self).__init__(property_name, property_type, constraint)
+ if not isinstance(self.constraint_value, self.valid_types):
+ ExceptionCollector.appendException(
+ InvalidSchemaError(message=_('The property "length" expects '
+ 'an integer.')))
+
+ def _is_valid(self, value):
+ if isinstance(value, str) and len(value) == self.constraint_value:
+ return True
+
+ return False
+
+ def _err_msg(self, value):
+ return (_('Length of value "%(pvalue)s" of property "%(pname)s" '
+ 'must be equal to "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=value,
+ cvalue=self.constraint_value))
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/LessOrEqual.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/LessOrEqual.java
new file mode 100644
index 0000000..00a5ca1
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/LessOrEqual.java
@@ -0,0 +1,105 @@
+package org.openecomp.sdc.toscaparser.elements.constraints;
+
+import java.util.Date;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+
+public class LessOrEqual extends Constraint {
+ // Constraint class for "less_or_equal"
+
+ // Constrains a property or parameter to a value less than or equal
+ // to ('<=') the value declared.
+
+ protected void _setValues() {
+
+ constraintKey = LESS_OR_EQUAL;
+
+ validTypes.add("Integer");
+ validTypes.add("Double");
+ validTypes.add("Float");
+ // timestamps are loaded as Date objects
+ validTypes.add("Date");
+ //validTypes.add("datetime.date");
+ //validTypes.add("datetime.time");
+ //validTypes.add("datetime.datetime");
+
+ validPropTypes.add(Schema.INTEGER);
+ validPropTypes.add(Schema.FLOAT);
+ validPropTypes.add(Schema.TIMESTAMP);
+ validPropTypes.add(Schema.SCALAR_UNIT_SIZE);
+ validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY);
+ validPropTypes.add(Schema.SCALAR_UNIT_TIME);
+
+ }
+
+ public LessOrEqual(String name,String type,Object c) {
+ super(name,type,c);
+
+ if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ExceptionCollector.appendException("InvalidSchemaError: The property \"less_or_equal\" expects comparable values");
+ }
+ }
+
+ @Override
+ protected boolean _isValid(Object value) {
+
+ // timestamps
+ if(value instanceof Date) {
+ if(constraintValue instanceof Date) {
+ return !((Date)value).after((Date)constraintValue);
+ }
+ return false;
+ }
+
+ Double n1 = new Double(value.toString());
+ Double n2 = new Double(constraintValue.toString());
+ return n1 <= n2;
+ }
+
+ @Override
+ protected String _errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" must be less or equal to \"%s\"",
+ valueMsg,propertyName,constraintValueMsg);
+ }
+
+}
+
+/*python
+
+class LessOrEqual(Constraint):
+ """Constraint class for "less_or_equal"
+
+ Constrains a property or parameter to a value less than or equal
+ to ('<=') the value declared.
+ """
+
+ constraint_key = Constraint.LESS_OR_EQUAL
+
+ valid_types = (int, float, datetime.date,
+ datetime.time, datetime.datetime)
+
+ valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP,
+ Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY,
+ Schema.SCALAR_UNIT_TIME)
+
+ def __init__(self, property_name, property_type, constraint):
+ super(LessOrEqual, self).__init__(property_name, property_type,
+ constraint)
+ if not isinstance(self.constraint_value, self.valid_types):
+ ExceptionCollector.appendException(
+ InvalidSchemaError(message=_('The property "less_or_equal" '
+ 'expects comparable values.')))
+
+ def _is_valid(self, value):
+ if value <= self.constraint_value:
+ return True
+
+ return False
+
+ def _err_msg(self, value):
+ return (_('The value "%(pvalue)s" of property "%(pname)s" must be '
+ 'less than or equal to "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=self.value_msg,
+ cvalue=self.constraint_value_msg))
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/LessThan.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/LessThan.java
new file mode 100644
index 0000000..8ac74a0
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/LessThan.java
@@ -0,0 +1,103 @@
+package org.openecomp.sdc.toscaparser.elements.constraints;
+
+import java.util.Date;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+
+public class LessThan extends Constraint {
+
+ @Override
+ protected void _setValues() {
+
+ constraintKey = LESS_THAN;
+
+ validTypes.add("Integer");
+ validTypes.add("Double");
+ validTypes.add("Float");
+ // timestamps are loaded as Date objects
+ validTypes.add("Date");
+ //validTypes.add("datetime.date");
+ //validTypes.add("datetime.time");
+ //validTypes.add("datetime.datetime");
+
+
+ validPropTypes.add(Schema.INTEGER);
+ validPropTypes.add(Schema.FLOAT);
+ validPropTypes.add(Schema.TIMESTAMP);
+ validPropTypes.add(Schema.SCALAR_UNIT_SIZE);
+ validPropTypes.add(Schema.SCALAR_UNIT_FREQUENCY);
+ validPropTypes.add(Schema.SCALAR_UNIT_TIME);
+
+ }
+
+ public LessThan(String name,String type,Object c) {
+ super(name,type,c);
+
+ if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ExceptionCollector.appendException("InvalidSchemaError: The property \"less_than\" expects comparable values");
+ }
+ }
+
+ @Override
+ protected boolean _isValid(Object value) {
+
+ // timestamps
+ if(value instanceof Date) {
+ if(constraintValue instanceof Date) {
+ return ((Date)value).before((Date)constraintValue);
+ }
+ return false;
+ }
+
+ Double n1 = new Double(value.toString());
+ Double n2 = new Double(constraintValue.toString());
+ return n1 < n2;
+ }
+
+ @Override
+ protected String _errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" must be less than \"%s\"",
+ valueMsg,propertyName,constraintValueMsg);
+ }
+
+}
+
+/*python
+
+class LessThan(Constraint):
+"""Constraint class for "less_than"
+
+Constrains a property or parameter to a value less than ('<')
+the value declared.
+"""
+
+constraint_key = Constraint.LESS_THAN
+
+valid_types = (int, float, datetime.date,
+ datetime.time, datetime.datetime)
+
+valid_prop_types = (Schema.INTEGER, Schema.FLOAT, Schema.TIMESTAMP,
+ Schema.SCALAR_UNIT_SIZE, Schema.SCALAR_UNIT_FREQUENCY,
+ Schema.SCALAR_UNIT_TIME)
+
+def __init__(self, property_name, property_type, constraint):
+ super(LessThan, self).__init__(property_name, property_type,
+ constraint)
+ if not isinstance(self.constraint_value, self.valid_types):
+ ExceptionCollector.appendException(
+ InvalidSchemaError(message=_('The property "less_than" '
+ 'expects comparable values.')))
+
+def _is_valid(self, value):
+ if value < self.constraint_value:
+ return True
+
+ return False
+
+def _err_msg(self, value):
+ return (_('The value "%(pvalue)s" of property "%(pname)s" must be '
+ 'less than "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=self.value_msg,
+ cvalue=self.constraint_value_msg))
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/MaxLength.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/MaxLength.java
new file mode 100644
index 0000000..99292f2
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/MaxLength.java
@@ -0,0 +1,89 @@
+package org.openecomp.sdc.toscaparser.elements.constraints;
+
+import java.util.LinkedHashMap;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+
+public class MaxLength extends Constraint {
+ // Constraint class for "min_length"
+
+ // Constrains the property or parameter to a value of a maximum length.
+
+ @Override
+ protected void _setValues() {
+
+ constraintKey = MAX_LENGTH;
+
+ validTypes.add("Integer");
+
+ validPropTypes.add(Schema.STRING);
+ validPropTypes.add(Schema.MAP);
+
+ }
+
+ public MaxLength(String name,String type,Object c) {
+ super(name,type,c);
+
+ if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ExceptionCollector.appendException("InvalidSchemaError: The property \"max_length\" expects an integer");
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ protected boolean _isValid(Object value) {
+ if(value instanceof String && constraintValue instanceof Integer &&
+ ((String)value).length() <= (Integer)constraintValue) {
+ return true;
+ }
+ else if(value instanceof LinkedHashMap && constraintValue instanceof Integer &&
+ ((LinkedHashMap<String,Object>)value).size() <= (Integer)constraintValue) {
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ protected String _errMsg(Object value) {
+ return String.format("Length of value \"%s\" of property \"%s\" must be no greater than \"%s\"",
+ value.toString(),propertyName,constraintValue.toString());
+ }
+
+}
+
+/*python
+
+class MaxLength(Constraint):
+ """Constraint class for "max_length"
+
+ Constrains the property or parameter to a value to a maximum length.
+ """
+
+ constraint_key = Constraint.MAX_LENGTH
+
+ valid_types = (int, )
+
+ valid_prop_types = (Schema.STRING, Schema.MAP)
+
+ def __init__(self, property_name, property_type, constraint):
+ super(MaxLength, self).__init__(property_name, property_type,
+ constraint)
+ if not isinstance(self.constraint_value, self.valid_types):
+ ExceptionCollector.appendException(
+ InvalidSchemaError(message=_('The property "max_length" '
+ 'expects an integer.')))
+
+ def _is_valid(self, value):
+ if ((isinstance(value, str) or isinstance(value, dict)) and
+ len(value) <= self.constraint_value):
+ return True
+
+ return False
+
+ def _err_msg(self, value):
+ return (_('Length of value "%(pvalue)s" of property "%(pname)s" '
+ 'must be no greater than "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=value,
+ cvalue=self.constraint_value))
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/MinLength.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/MinLength.java
new file mode 100644
index 0000000..447572c
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/MinLength.java
@@ -0,0 +1,89 @@
+package org.openecomp.sdc.toscaparser.elements.constraints;
+
+import java.util.LinkedHashMap;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+
+public class MinLength extends Constraint {
+ // Constraint class for "min_length"
+
+ // Constrains the property or parameter to a value of a minimum length.
+
+ @Override
+ protected void _setValues() {
+
+ constraintKey = MIN_LENGTH;
+
+ validTypes.add("Integer");
+
+ validPropTypes.add(Schema.STRING);
+ validPropTypes.add(Schema.MAP);
+
+ }
+
+ public MinLength(String name,String type,Object c) {
+ super(name,type,c);
+
+ if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ExceptionCollector.appendException("InvalidSchemaError: The property \"min_length\" expects an integer");
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ protected boolean _isValid(Object value) {
+ if(value instanceof String && constraintValue instanceof Integer &&
+ ((String)value).length() >= (Integer)constraintValue) {
+ return true;
+ }
+ else if(value instanceof LinkedHashMap && constraintValue instanceof Integer &&
+ ((LinkedHashMap<String,Object>)value).size() >= (Integer)constraintValue) {
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ protected String _errMsg(Object value) {
+ return String.format("Length of value \"%s\" of property \"%s\" must be at least \"%s\"",
+ value.toString(),propertyName,constraintValue.toString());
+ }
+
+}
+
+/*python
+
+class MinLength(Constraint):
+ """Constraint class for "min_length"
+
+ Constrains the property or parameter to a value to a minimum length.
+ """
+
+ constraint_key = Constraint.MIN_LENGTH
+
+ valid_types = (int, )
+
+ valid_prop_types = (Schema.STRING, Schema.MAP)
+
+ def __init__(self, property_name, property_type, constraint):
+ super(MinLength, self).__init__(property_name, property_type,
+ constraint)
+ if not isinstance(self.constraint_value, self.valid_types):
+ ExceptionCollector.appendException(
+ InvalidSchemaError(message=_('The property "min_length" '
+ 'expects an integer.')))
+
+ def _is_valid(self, value):
+ if ((isinstance(value, str) or isinstance(value, dict)) and
+ len(value) >= self.constraint_value):
+ return True
+
+ return False
+
+ def _err_msg(self, value):
+ return (_('Length of value "%(pvalue)s" of property "%(pname)s" '
+ 'must be at least "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=value,
+ cvalue=self.constraint_value))
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Pattern.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Pattern.java
new file mode 100644
index 0000000..f0c8c1d
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Pattern.java
@@ -0,0 +1,95 @@
+package org.openecomp.sdc.toscaparser.elements.constraints;
+
+import java.util.regex.Matcher;
+import java.util.regex.PatternSyntaxException;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+
+public class Pattern extends Constraint {
+
+ @Override
+ protected void _setValues() {
+
+ constraintKey = PATTERN;
+
+ validTypes.add("String");
+
+ validPropTypes.add(Schema.STRING);
+
+ }
+
+
+ public Pattern(String name,String type,Object c) {
+ super(name,type,c);
+
+ if(!validTypes.contains(constraintValue.getClass().getSimpleName())) {
+ ExceptionCollector.appendException("InvalidSchemaError: The property \"pattern\" expects a string");
+ }
+ }
+
+ @Override
+ protected boolean _isValid(Object value) {
+ try {
+ if(!(value instanceof String)) {
+ ExceptionCollector.appendException(String.format("ValueError: Input value \"%s\" to \"pattern\" property \"%s\" must be a string",
+ value.toString(),propertyName));
+ return false;
+ }
+ String strp = constraintValue.toString();
+ String strm = value.toString();
+ java.util.regex.Pattern pattern = java.util.regex.Pattern.compile(strp);
+ Matcher matcher = pattern.matcher(strm);
+ if(matcher.find() && matcher.end() == strm.length()) {
+ return true;
+ }
+ return false;
+ }
+ catch(PatternSyntaxException pse) {
+ ExceptionCollector.appendException(String.format("ValueError: Invalid regex \"%s\" in \"pattern\" property \"%s\"",
+ constraintValue.toString(),propertyName));
+ return false;
+ }
+ }
+
+ @Override
+ protected String _errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" does not match the pattern \"%s\"",
+ value.toString(),propertyName,constraintValue.toString());
+ }
+
+}
+
+/*python
+
+class Pattern(Constraint):
+ """Constraint class for "pattern"
+
+ Constrains the property or parameter to a value that is allowed by
+ the provided regular expression.
+ """
+
+ constraint_key = Constraint.PATTERN
+
+ valid_types = (str, )
+
+ valid_prop_types = (Schema.STRING, )
+
+ def __init__(self, property_name, property_type, constraint):
+ super(Pattern, self).__init__(property_name, property_type, constraint)
+ if not isinstance(self.constraint_value, self.valid_types):
+ ExceptionCollector.appendException(
+ InvalidSchemaError(message=_('The property "pattern" '
+ 'expects a string.')))
+ self.match = re.compile(self.constraint_value).match
+
+ def _is_valid(self, value):
+ match = self.match(value)
+ return match is not None and match.end() == len(value)
+
+ def _err_msg(self, value):
+ return (_('The value "%(pvalue)s" of property "%(pname)s" does not '
+ 'match pattern "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=value,
+ cvalue=self.constraint_value))
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Schema.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Schema.java
new file mode 100644
index 0000000..99580c5
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/Schema.java
@@ -0,0 +1,276 @@
+package org.openecomp.sdc.toscaparser.elements.constraints;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+import org.openecomp.sdc.toscaparser.common.ExceptionCollector;
+
+
+public class Schema {
+
+ private static final String TYPE = "type";
+ private static final String REQUIRED = "required";
+ private static final String DESCRIPTION = "description";
+ private static final String DEFAULT = "default";
+ private static final String CONSTRAINTS = "constraints";
+ private static final String STATUS = "status";
+ private static final String ENTRYSCHEMA = "entry_schema";
+ private static final String KEYS[] = {
+ TYPE, REQUIRED, DESCRIPTION,DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS};
+
+ public static final String INTEGER = "integer";
+ public static final String STRING = "string";
+ public static final String BOOLEAN = "boolean";
+ public static final String FLOAT = "float";
+ public static final String RANGE = "range";
+ public static final String NUMBER = "number";
+ public static final String TIMESTAMP = "timestamp";
+ public static final String LIST = "list";
+ public static final String MAP = "map";
+ public static final String SCALAR_UNIT_SIZE = "scalar-unit.size";
+ public static final String SCALAR_UNIT_FREQUENCY = "scalar-unit.frequency";
+ public static final String SCALAR_UNIT_TIME = "scalar-unit.time";
+ public static final String VERSION = "version";
+ public static final String PORTDEF = "PortDef";
+ public static final String PORTSPEC = "PortSpec"; //??? PortSpec.SHORTNAME
+
+ public static final String PROPERTY_TYPES[] = {
+ INTEGER, STRING, BOOLEAN, FLOAT, RANGE,NUMBER, TIMESTAMP, LIST, MAP,
+ SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME,
+ VERSION, PORTDEF, PORTSPEC};
+
+ @SuppressWarnings("unused")
+ private static final String SCALAR_UNIT_SIZE_DEFAULT = "B";
+
+ private static Map<String,Long> SCALAR_UNIT_SIZE_DICT = new HashMap<>();
+ static {
+ SCALAR_UNIT_SIZE_DICT.put("B", 1L);
+ SCALAR_UNIT_SIZE_DICT.put("KB", 1000L);
+ SCALAR_UNIT_SIZE_DICT.put("KIB", 1024L);
+ SCALAR_UNIT_SIZE_DICT.put("MB", 1000000L);
+ SCALAR_UNIT_SIZE_DICT.put("MIB", 1048576L);
+ SCALAR_UNIT_SIZE_DICT.put("GB", 1000000000L);
+ SCALAR_UNIT_SIZE_DICT.put("GIB", 1073741824L);
+ SCALAR_UNIT_SIZE_DICT.put("TB", 1000000000000L);
+ SCALAR_UNIT_SIZE_DICT.put("TIB", 1099511627776L);
+ }
+
+ private String name;
+ private LinkedHashMap<String,Object> schema;
+ private int _len;
+ private ArrayList<Constraint> constraintsList;
+
+
+ public Schema(String _name,LinkedHashMap<String,Object> _schemaDict) {
+ name = _name;
+
+ if(!(_schemaDict instanceof LinkedHashMap)) {
+ //msg = (_('Schema definition of "%(pname)s" must be a dict.')
+ // % dict(pname=name))
+ ExceptionCollector.appendException(String.format(
+ "InvalidSchemaError: Schema definition of \"%s\" must be a dict",name));
+ }
+
+ if(_schemaDict.get("type") == null) {
+ //msg = (_('Schema definition of "%(pname)s" must have a "type" '
+ // 'attribute.') % dict(pname=name))
+ ExceptionCollector.appendException(String.format(
+ "InvalidSchemaError: Schema definition of \"%s\" must have a \"type\" attribute",name));
+ }
+
+ schema = _schemaDict;
+ _len = 0; //??? None
+ constraintsList = new ArrayList<>();
+ }
+
+ public String getType() {
+ return (String)schema.get(TYPE);
+ }
+
+ public boolean isRequired() {
+ return (boolean)schema.getOrDefault(REQUIRED, true);
+ }
+
+ public String getDescription() {
+ return (String)schema.getOrDefault(DESCRIPTION,"");
+ }
+
+ public Object getDefault() {
+ return schema.get(DEFAULT);
+ }
+
+ public String getStatus() {
+ return (String)schema.getOrDefault(STATUS,"");
+ }
+
+ @SuppressWarnings("unchecked")
+ public ArrayList<Constraint> getConstraints() {
+ if(constraintsList.size() == 0) {
+ Object cob = schema.get(CONSTRAINTS);
+ if(cob instanceof ArrayList) {
+ ArrayList<Object> constraintSchemata = (ArrayList<Object>)cob;
+ for(Object ob: constraintSchemata) {
+ if(ob instanceof LinkedHashMap) {
+ for(String cClass: ((LinkedHashMap<String,Object>)ob).keySet()) {
+ Constraint c = Constraint.factory(cClass,name,getType(),ob);
+ if(c != null) {
+ constraintsList.add(c);
+ }
+ else {
+ // error
+ ExceptionCollector.appendException(String.format(
+ "UnknownFieldError: Constraint type \"%s\" for property \"%s\" is not supported",
+ cClass,name));
+ }
+ break;
+ }
+ }
+ }
+ }
+ }
+ return constraintsList;
+ }
+
+ @SuppressWarnings("unchecked")
+ public LinkedHashMap<String,Object> getEntrySchema() {
+ return (LinkedHashMap<String,Object>)schema.get(ENTRYSCHEMA);
+ }
+
+ // Python intrinsic methods...
+
+ // substitute for __getitem__ (aka self[key])
+ public Object getItem(String key) {
+ return schema.get(key);
+ }
+
+ /*
+ def __iter__(self):
+ for k in self.KEYS:
+ try:
+ self.schema[k]
+ except KeyError:
+ pass
+ else:
+ yield k
+ */
+
+ // substitute for __len__ (aka self.len())
+ public int getLen() {
+ int len = 0;
+ for(String k: KEYS) {
+ if(schema.get(k) != null) {
+ len++;
+ }
+ _len = len;
+ }
+ return _len;
+ }
+ // getter
+ public LinkedHashMap<String,Object> getSchema() {
+ return schema;
+ }
+
+}
+
+/*python
+
+class Schema(collections.Mapping):
+
+KEYS = (
+ TYPE, REQUIRED, DESCRIPTION,
+ DEFAULT, CONSTRAINTS, ENTRYSCHEMA, STATUS
+) = (
+ 'type', 'required', 'description',
+ 'default', 'constraints', 'entry_schema', 'status'
+)
+
+PROPERTY_TYPES = (
+ INTEGER, STRING, BOOLEAN, FLOAT, RANGE,
+ NUMBER, TIMESTAMP, LIST, MAP,
+ SCALAR_UNIT_SIZE, SCALAR_UNIT_FREQUENCY, SCALAR_UNIT_TIME,
+ VERSION, PORTDEF, PORTSPEC
+) = (
+ 'integer', 'string', 'boolean', 'float', 'range',
+ 'number', 'timestamp', 'list', 'map',
+ 'scalar-unit.size', 'scalar-unit.frequency', 'scalar-unit.time',
+ 'version', 'PortDef', PortSpec.SHORTNAME
+)
+
+SCALAR_UNIT_SIZE_DEFAULT = 'B'
+SCALAR_UNIT_SIZE_DICT = {'B': 1, 'KB': 1000, 'KIB': 1024, 'MB': 1000000,
+ 'MIB': 1048576, 'GB': 1000000000,
+ 'GIB': 1073741824, 'TB': 1000000000000,
+ 'TIB': 1099511627776}
+
+def __init__(self, name, schema_dict):
+ self.name = name
+ if not isinstance(schema_dict, collections.Mapping):
+ msg = (_('Schema definition of "%(pname)s" must be a dict.')
+ % dict(pname=name))
+ ExceptionCollector.appendException(InvalidSchemaError(message=msg))
+
+ try:
+ schema_dict['type']
+ except KeyError:
+ msg = (_('Schema definition of "%(pname)s" must have a "type" '
+ 'attribute.') % dict(pname=name))
+ ExceptionCollector.appendException(InvalidSchemaError(message=msg))
+
+ self.schema = schema_dict
+ self._len = None
+ self.constraints_list = []
+
+@property
+def type(self):
+ return self.schema[self.TYPE]
+
+@property
+def required(self):
+ return self.schema.get(self.REQUIRED, True)
+
+@property
+def description(self):
+ return self.schema.get(self.DESCRIPTION, '')
+
+@property
+def default(self):
+ return self.schema.get(self.DEFAULT)
+
+@property
+def status(self):
+ return self.schema.get(self.STATUS, '')
+
+@property
+def constraints(self):
+ if not self.constraints_list:
+ constraint_schemata = self.schema.get(self.CONSTRAINTS)
+ if constraint_schemata:
+ self.constraints_list = [Constraint(self.name,
+ self.type,
+ cschema)
+ for cschema in constraint_schemata]
+ return self.constraints_list
+
+@property
+def entry_schema(self):
+ return self.schema.get(self.ENTRYSCHEMA)
+
+def __getitem__(self, key):
+ return self.schema[key]
+
+def __iter__(self):
+ for k in self.KEYS:
+ try:
+ self.schema[k]
+ except KeyError:
+ pass
+ else:
+ yield k
+
+def __len__(self):
+ if self._len is None:
+ self._len = len(list(iter(self)))
+ return self._len
+*/ \ No newline at end of file
diff --git a/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/ValidValues.java b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/ValidValues.java
new file mode 100644
index 0000000..61e9cc2
--- /dev/null
+++ b/jtosca/src/main/java/org/openecomp/sdc/toscaparser/elements/constraints/ValidValues.java
@@ -0,0 +1,84 @@
+package org.openecomp.sdc.toscaparser.elements.constraints;
+
+import java.util.ArrayList;
+
+public class ValidValues extends Constraint {
+
+
+ protected void _setValues() {
+
+ constraintKey = VALID_VALUES;
+
+ for(String s: Schema.PROPERTY_TYPES) {
+ validPropTypes.add(s);
+ }
+
+ }
+
+
+ public ValidValues(String name,String type,Object c) {
+ super(name,type,c);
+
+ }
+
+ @SuppressWarnings("unchecked")
+ protected boolean _isValid(Object val) {
+ if(!(constraintValue instanceof ArrayList)) {
+ return false;
+ }
+ if(val instanceof ArrayList) {
+ boolean bAll = true;
+ for(Object v: (ArrayList<Object>)val) {
+ if(!((ArrayList<Object>)constraintValue).contains(v)) {
+ bAll = false;
+ break;
+ };
+ }
+ return bAll;
+ }
+ return ((ArrayList<Object>)constraintValue).contains(val);
+ }
+
+ protected String _errMsg(Object value) {
+ return String.format("The value \"%s\" of property \"%s\" is not valid. Expected a value from \"%s\"",
+ value.toString(),propertyName,constraintValue.toString());
+ }
+
+}
+
+/*python
+
+class ValidValues(Constraint):
+"""Constraint class for "valid_values"
+
+Constrains a property or parameter to a value that is in the list of
+declared values.
+"""
+constraint_key = Constraint.VALID_VALUES
+
+valid_prop_types = Schema.PROPERTY_TYPES
+
+def __init__(self, property_name, property_type, constraint):
+ super(ValidValues, self).__init__(property_name, property_type,
+ constraint)
+ if not isinstance(self.constraint_value, collections.Sequence):
+ ExceptionCollector.appendException(
+ InvalidSchemaError(message=_('The property "valid_values" '
+ 'expects a list.')))
+
+def _is_valid(self, value):
+ print '*** payton parser validating ',value,' in ',self.constraint_value#GGG
+ if isinstance(value, list):
+ return all(v in self.constraint_value for v in value)
+ return value in self.constraint_value
+
+def _err_msg(self, value):
+ allowed = '[%s]' % ', '.join(str(a) for a in self.constraint_value)
+ return (_('The value "%(pvalue)s" of property "%(pname)s" is not '
+ 'valid. Expected a value from "%(cvalue)s".') %
+ dict(pname=self.property_name,
+ pvalue=value,
+ cvalue=allowed))
+
+
+*/ \ No newline at end of file