diff options
Diffstat (limited to 'app/toscalib/templates')
26 files changed, 1884 insertions, 0 deletions
diff --git a/app/toscalib/templates/__init__.py b/app/toscalib/templates/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/app/toscalib/templates/__init__.py diff --git a/app/toscalib/templates/__init__.pyc b/app/toscalib/templates/__init__.pyc Binary files differnew file mode 100644 index 0000000..a70f506 --- /dev/null +++ b/app/toscalib/templates/__init__.pyc diff --git a/app/toscalib/templates/capability_item.py b/app/toscalib/templates/capability_item.py new file mode 100644 index 0000000..e96ee43 --- /dev/null +++ b/app/toscalib/templates/capability_item.py @@ -0,0 +1,73 @@ +from toscalib.templates.constant import * +from toscalib.templates.property_item import PropertyItem +import logging + +class CapabilityItem(object): + def __init__(self, definition): + self.name = definition.name + self.type = definition.type + self.definition = definition + self.properties = {} + self.id = PropertyItem(definition.id) + self.sub_pointer = None + self.parent_node = None + for prop in definition.properties.keys(): + self.properties[prop] = PropertyItem(definition.properties[prop]) + + def _parse_pre_defined_content(self, content): + if content is None: + return +# if content.has_key(CAP_PROPERTIES): + if CAP_PROPERTIES in content: + prop_sec = content[CAP_PROPERTIES] + for prop_name in prop_sec.keys(): + prop_item = self._get_property_item(prop_name) + if prop_item is not None: + prop_item._assign(prop_sec[prop_name]) + + def _propagate_substitution_value(self): + converge = True + for prop_item in iter(self.properties.values()): + converge = converge and prop_item._propagate_substitution_value() + + if self.sub_pointer is None: + return converge + + if self.id.value is None: + old_val = None + else: + old_val = self.id.value._get_value()[0] + + if isinstance(self.sub_pointer, PropertyItem): + if self.sub_pointer.value is None: + logging.warning( 'Something is wrong, the cap id mapping target'+ self.sub_pointer.name+ ' should have a value!') + return converge + self.id._direct_assign(self.sub_pointer.value) + from toscalib.templates.node import Node + if isinstance(self.sub_pointer, Node): + if self.sub_pointer.id is None or self.sub_pointer.id.value is None: + logging.warning( 'Something is wrong, the cap id mapping target'+ self.sub_pointer.name+ ' should have a value!') + return converge + self.id._direct_assign(self.sub_pointer.id.value) + + if self.id.value is None: + new_val = None + else: + new_val = self.id.value._get_value()[0] + return converge and (old_val == new_val) + + def _get_property_item(self, prop_name): +# if self.properties.has_key(prop_name): + if prop_name in self.properties: + return self.properties[prop_name] + else: + logging.warning('Capability: '+ self.name+ ' of type: '+ self.type+ ' has no property: '+ prop_name) + return None + + def _validate_capability(self, cap_name): + return self.definition._validate_capability(cap_name) + + def _update_parent_node(self, parent): + self.parent_node = parent + for prop in iter(self.properties.values()): + prop._update_parent_node(parent) diff --git a/app/toscalib/templates/capability_item.pyc b/app/toscalib/templates/capability_item.pyc Binary files differnew file mode 100644 index 0000000..3a7d971 --- /dev/null +++ b/app/toscalib/templates/capability_item.pyc diff --git a/app/toscalib/templates/constant.py b/app/toscalib/templates/constant.py new file mode 100644 index 0000000..101abaa --- /dev/null +++ b/app/toscalib/templates/constant.py @@ -0,0 +1,82 @@ +#Author: Shu Shi +#emaiL: shushi@research.att.com + +YMO_PREFIX=r'.._YAMLORDER_' + +TRUE_VALUES = ('True', 'TRUE', 'true', 'yes', 'Yes', 'YES', '1') + +TEMPLATE_SECTIONS = (VERSION, METADATA, DESCRIPTION, DSL, + REPO, IMPORT, ARTIFACT_TYPE, DATA_TYPE, CAPABILITY_TYPE, + INTERFACE_TYPE, RELATIONSHIP_TYPE, NODE_TYPE, GROUP_TYPE, + POLICY_TYPE, TOPOLOGY) = \ + ('tosca_definitions_version', 'metadata', 'description', 'dsl_definitions', + 'repositories', 'imports', 'artifact_types', 'data_types', 'capability_types', + 'interface_types', 'relationship_types', 'node_types', 'group_types', + 'policy_types', 'topology_template' ) + +YAML_ORDER_TEMPLATE_SECTIONS = (YMO_VERSION, YMO_METADATA, YMO_DESCRIPTION, YMO_DSL, + YMO_REPO, YMO_IMPORT, YMO_ARTIFACT_TYPE, YMO_DATA_TYPE, YMO_CAPABILITY_TYPE, + YMO_INTERFACE_TYPE, YMO_RELATIONSHIP_TYPE, YMO_NODE_TYPE, YMO_GROUP_TYPE, + YMO_POLICY_TYPE, YMO_TOPOLOGY) = \ + ('00_YAMLORDER_tosca_definitions_version', '02_YAMLORDER_metadata', '01_YAMLORDER_description', '03_YAMLORDER_dsl_definitions', + '04_YAMLORDER_repositories', '05_YAMLORDER_imports', '06_YAMLORDER_artifact_types', '07_YAMLORDER_data_types', '08_YAMLORDER_capability_types', + '09_YAMLORDER_interface_types', '10_YAMLORDER_relationship_types', '11_YAMLORDER_node_types', '12_YAMLORDER_group_types', + '13_YAMLORDER_policy_types', '14_YAMLORDER_topology_template' ) + +# Topology template key names +TOPOLOGY_SECTIONS = (TOPO_DESCRIPTION, TOPO_INPUTS, TOPO_NODE_TEMPLATES, + TOPO_RELATIONSHIP_TEMPLATES, TOPO_OUTPUTS, TOPO_GROUPS, + TOPO_SUBSTITUION_MAPPINGS) = \ + ('description', 'inputs', 'node_templates', + 'relationship_templates', 'outputs', 'groups', + 'substitution_mappings') + +YAML_ORDER_TOPOLOGY_SECTIONS = (YMO_TOPO_DESCRIPTION, YMO_TOPO_INPUTS, YMO_TOPO_NODE_TEMPLATES, + YMO_TOPO_RELATIONSHIP_TEMPLATES, YMO_TOPO_OUTPUTS, YMO_TOPO_GROUPS, + YMO_TOPO_SUBSTITUION_MAPPINGS) = \ + ('10_YAMLORDER_description', '11_YAMLORDER_inputs', '13_YAMLORDER_node_templates', + '14_YAMLORDER_relationship_templates', '16_YAMLORDER_outputs', '15_YAMLORDER_groups', + '12_YAMLORDER_substitution_mappings') + +SUBSTITUTION_SECTION = (SUB_NODE_TYPE, SUB_PROPERTY, SUB_ATTRIBUTE, SUB_REQUIREMENT, SUB_CAPABILITY, SUB_CAP_PROPERTY, SUB_CAP_ID, SUB_REQ_ID, SUB_INPUT, SUB_OUTPUT) = \ + ('node_type', 'properties', 'attributes', 'requirements', 'capabilities', 'properties', 'id', 'id', 'INPUT', 'OUTPUT') + +YAML_ORDER_SUBSTITUTION_SECTION = (YMO_SUB_NODE_TYPE, YMO_SUB_PROPERTY, YMO_SUB_REQUIREMENT, YMO_SUB_CAPABILITY) = \ + ('00_YAMLORDER_node_type', '01_YAMLORDER_properties', '03_YAMLORDER_requirements', '02_YAMLORDER_capabilities') + +REQUIREMENT_SECTION = (REQ_NODE, REQ_RELATIONSHIP, REQ_CAPABILITY, REQ_OCCURRENCE, REQ_FILTER) = \ + ('node', 'relationship', 'capability', 'occurrences', 'node_filter') + +YAML_ORDER_REQUIREMENOD_ASSIGNMENOD_SECTION = (YMO_REQ_NODE, YMO_REQ_RELATIONSHIP, YMO_REQ_CAPABILITY, YMO_REQ_OCCURRENCE, YMO_REQ_FILTER) = \ + ('01_YAMLORDER_node', '02_YAMLORDER_relationship', '00_YAMLORDER_capability', '04_YAMLORDER_occurrences', '03_YAMLORDER_node_filter') + +NODE_SECTION = (NOD_DERIVED_FROM, NOD_TYPE, NOD_PROPERTIES, NOD_ATTRIBUTES, NOD_REQUIREMENTS, + NOD_INTERFACES, NOD_CAPABILITIES, NOD_ARTIFACTS, NOD_DESCRIPTION) = \ + ('derived_from', 'type', 'properties', 'attributes', 'requirements', + 'interfaces', 'capabilities', 'artifacts', 'description') + +YAML_ORDER_NODETYPE_DEFINITION = (YMO_NOD_DERIVED_FROM, YMO_NOD_TYPE, YMO_NOD_PROPERTIES, YMO_NOD_ATTRIBUTES, YMO_NOD_REQUIREMENTS, YMO_NOD_RELATIONSHIPS, + YMO_NOD_INTERFACES, YMO_NOD_CAPABILITIES, YMO_NOD_ARTIFACTS, YMO_NOD_DESCRIPTION) = \ + ('00_YAMLORDER_derived_from', '00_YAMLORDER_type', '01_YAMLORDER_properties', '03_YAMLORDER_attributes', '05_YAMLORDER_requirements', '05_YAMLORDER_relationships', + '06_YAMLORDER_interfaces', '04_YAMLORDER_capabilities', '07_YAMLORDER_artifacts', '02_YAMLORDER_description') + +CAPABILITY_SECTION = (CAP_DERIVED_FROM, CAP_TYPE, CAP_PROPERTIES, CAP_ATTRIBUTES, + CAP_VERSION, CAP_DESCEIPTION, CAP_SOURCE ) = \ + ('derived_from', 'type', 'properties', 'attributes', + 'version', 'description', 'valid_source_type') + + +PROPERTY_SECTION = (PROP_TYPE, PROP_REQUIRED, PROP_DEFAULT, PROP_DESCRIPTION, + PROP_STATUS, PROP_ENTRY, PROP_CONSTRAINT) = \ + ('type', 'required', 'default', 'description', + 'status', 'entry_schema', 'constraints') + +YAML_ORDER_PROPERTY_SECTION = (YMO_PROP_TYPE, YMO_PROP_REQUIRED, YMO_PROP_DEFAULT, YMO_PROP_DESCRIPTION, + YMO_PROP_STATUS, YMO_PROP_ENTRY, YMO_PROP_CONSTRAINT) = \ + ('00_YAMLORDER_type', '01_YAMLORDER_required', '03_YAMLORDER_default', '02_YAMLORDER_description', + '04_YAMLORDER_status', '05_YAMLORDER_entry_schema', '06_YAMLORDER_constraints') + + +YAML_ORDER_INTERFACE_SECTION = (YMO_INT_TYPE, YMO_INT_INPUTS, YMO_OP_DESCRIPTION, YMO_OP_IMPLEMENTATION, YMO_OP_EXECUTOR, YMO_OP_INPUTS) = \ + ('00_YAMLORDER_type', '01_YAMLORDER_inputs', '02_YAMLORDER_description', + '03_YAMLORDER_implementation', '04_YAMLORDER_executor', '05_YAMLORDER_inputs') diff --git a/app/toscalib/templates/constant.pyc b/app/toscalib/templates/constant.pyc Binary files differnew file mode 100644 index 0000000..6335388 --- /dev/null +++ b/app/toscalib/templates/constant.pyc diff --git a/app/toscalib/templates/database.py b/app/toscalib/templates/database.py new file mode 100644 index 0000000..7e21148 --- /dev/null +++ b/app/toscalib/templates/database.py @@ -0,0 +1,113 @@ +#Author: Shu Shi +#emaiL: shushi@research.att.com + +from toscalib.templates.constant import * +import logging + +class ToscaDB(object): + """ The database that stores all node types and TEMPLATES """ + def __init__(self): + self.NODE_TYPES = {} + self.CAPABILITY_TYPES = {} + self.RELATIONSHIP_TYPES = {} + self.DATA_TYPES = {} + self.TEMPLATES = {} + + def _import_node_type(self, new_type): + if new_type is None: + return +# if self.NODE_TYPES.has_key(new_type.name) == True: + if new_type.name in self.NODE_TYPES: + logging.debug( 'Node type: '+ new_type.name+ ' already defined and will be overwritten') + self.NODE_TYPES[new_type.name]=new_type + + def _import_capability_type(self, new_type): + if new_type is None: + return +# if self.CAPABILITY_TYPES.has_key(new_type.name) == True: + if new_type.name in self.CAPABILITY_TYPES: + logging.debug( 'Capability type: '+ new_type.name+ ' already defined and will be overwritten') + + self.CAPABILITY_TYPES[new_type.name]=new_type + + def _import_relationship_type(self, new_type): + if new_type is None: + return +# if self.RELATIONSHIP_TYPES.has_key(new_type.name) == True: + if new_type.name in self.RELATIONSHIP_TYPES: + logging.debug( 'Relationship type: '+ new_type.name+ ' already defined and will be overwritten') + + self.RELATIONSHIP_TYPES[new_type.name]=new_type + + def _import_data_type(self, new_type): + if new_type is None: + return +# if self.DATA_TYPES.has_key(new_type.name) == True: + if new_type.name in self.DATA_TYPES: + logging.debug( 'Data type: '+ new_type.name+ ' already defined and will be overwritten') + self.DATA_TYPES[new_type.name]=new_type + + def _import_template(self, new_template): + if new_template is None: + return +# if self.TEMPLATES.has_key(new_template.name) == False: + if new_template.name not in self.TEMPLATES : + self.TEMPLATES[new_template.name]= new_template + + def _parse_objects(self): + logging.debug( 'parsing database') +# for objs in self.NODE_TYPES.itervalues(): + for objs in iter(self.NODE_TYPES.values()): + objs._parse_content(self) +# for objs in self.CAPABILITY_TYPES.itervalues(): + for objs in iter(self.CAPABILITY_TYPES.values()): + objs._parse_content(self) +# for objs in self.DATA_TYPES.itervalues(): + for objs in iter(self.DATA_TYPES.values()): + objs._parse_content(self) +# for objs in self.RELATIONSHIP_TYPES.itervalues(): + for objs in iter(self.RELATIONSHIP_TYPES.values()): + objs._parse_content(self) +# for objs in self.TEMPLATES.itervalues(): + for objs in iter(self.TEMPLATES.values()): + objs._parse_content(self) + + + def _prepare_schema(self): + schema_output = {} + data_sec = {} + for key in self.DATA_TYPES.keys(): + objs = self.DATA_TYPES[key] + data_sec[key] = objs.raw_content + node_sec = {} + for key in self.NODE_TYPES.keys(): + objs = self.NODE_TYPES[key] + if objs.raw_content is None: + objs._create_rawcontent() + node_sec[key]=objs.raw_content + cap_sec = {} + for key in self.CAPABILITY_TYPES.keys(): + objs = self.CAPABILITY_TYPES[key] + cap_sec[key]=objs.raw_content + rel_sec = {} + for key in self.RELATIONSHIP_TYPES.keys(): + objs = self.RELATIONSHIP_TYPES[key] + rel_sec[key]=objs.raw_content + + if len(data_sec) > 0: + schema_output[YMO_DATA_TYPE] = data_sec + if len(node_sec) > 0: + schema_output[YMO_NODE_TYPE] = node_sec + if len(cap_sec) > 0: + schema_output[YMO_CAPABILITY_TYPE] = cap_sec + if len(rel_sec) > 0: + schema_output[YMO_RELATIONSHIP_TYPE] = rel_sec + + schema_output[YMO_VERSION]= 'tosca_simple_yaml_1_0_0' + + return schema_output + + + + +
\ No newline at end of file diff --git a/app/toscalib/templates/database.pyc b/app/toscalib/templates/database.pyc Binary files differnew file mode 100644 index 0000000..e7f651f --- /dev/null +++ b/app/toscalib/templates/database.pyc diff --git a/app/toscalib/templates/heat_constants.py b/app/toscalib/templates/heat_constants.py new file mode 100644 index 0000000..365d38f --- /dev/null +++ b/app/toscalib/templates/heat_constants.py @@ -0,0 +1,7 @@ +HOT_TEMPLATE = (HOT_VERSION, HOT_DESCRIPTION, HOT_PARAMETERS, HOT_RESOURCES, + YMO_HOT_VERSION, YMO_HOT_DESCRIPTION, YMO_HOT_PARAMETERS, YMO_HOT_RESOURCES ) = \ + ('heat_template_version', 'description', 'parameters', 'resources', + '00_YAMLORDER_heat_template_version', '01_YAMLORDER_description', + '02_YAMLORDER_parameters', '03_YAMLORDER_resources') + +HOT_VERSION_NUM = '2013-05-23'
\ No newline at end of file diff --git a/app/toscalib/templates/heat_constants.pyc b/app/toscalib/templates/heat_constants.pyc Binary files differnew file mode 100644 index 0000000..379bc4e --- /dev/null +++ b/app/toscalib/templates/heat_constants.pyc diff --git a/app/toscalib/templates/interface_item.py b/app/toscalib/templates/interface_item.py new file mode 100644 index 0000000..9fd7fa9 --- /dev/null +++ b/app/toscalib/templates/interface_item.py @@ -0,0 +1,80 @@ +from toscalib.templates.property_item import PropertyItem +from toscalib.templates.operation_item import OperationItem +from toscalib.types.property import PropertyDefinition +from toscalib.templates.constant import * +import logging + +class InterfaceItem(object): + def __init__(self, definition, name = None, content = None): + if definition is not None: + self.name = definition.name + self.type = definition.type + self.definition = definition + self.inputs = {} + self.operations = {} + self.parent_node = None + for prop in definition.inputs.keys(): + self.inputs[prop] = PropertyItem(definition.inputs[prop]) + for oper in definition.operations.keys(): + self.operations[oper] = OperationItem(definition.operations[oper]) + else: + self.name = name + self.type = None + self.definition = None + self.inputs = {} + self.operations = {} + self.parent_node = None + + self._parse_pre_defined_content(content) + + def _parse_pre_defined_content(self, content): + if content is None: + return + + for key_name in content.keys(): + if key_name == 'type': + if self.type is not None and self.type != content[key_name]: + logging.warning( 'interface: '+ self.name+ 'type is different in definition: '+ self.type+ ' overwritten here to '+ self.raw_content[key_name]) + self.type = content[key_name] + continue + if key_name == 'inputs': + input_sec = content['inputs'] + for input_item in input_sec.keys(): + self.inputs[input_item] = PropertyItem(PropertyDefinition(input_item)) + self.inputs[input_item]._assign(input_sec[input_item]) + continue + +# if self.operations.has_key(key_name): + if key_name in self.operations: + self.operations[key_name]._parse_pre_defined_content(content[key_name]) + else: + self.operations[key_name] = OperationItem(None, key_name, content[key_name]) + + def _update_parent_node(self, parent): + self.parent_node = parent + for prop in iter(self.inputs.values()): + prop._update_parent_node(parent) + for ops in iter(self.operations.values()): + ops._update_parent_node(parent) + + def _prepare_output(self, tags=''): + output = {} + if 'cloudify' not in tags: + if self.type is not None: + output[YMO_INT_TYPE] = self.type + if len(self.inputs) > 0: + inputs = {} + for prop_name in self.inputs.keys(): + prop_item = self.inputs[prop_name] + if prop_item.value is None: + prop_value = None + else: + prop_value = prop_item.value._get_value(tags)[0] + inputs[prop_name] = prop_value + output[YMO_INT_INPUTS] = inputs + if len(self.operations) > 0: + for op_name in self.operations.keys(): + output[op_name] = self.operations[op_name]._prepare_output(tags) + + return output +
\ No newline at end of file diff --git a/app/toscalib/templates/interface_item.pyc b/app/toscalib/templates/interface_item.pyc Binary files differnew file mode 100644 index 0000000..20d9f8c --- /dev/null +++ b/app/toscalib/templates/interface_item.pyc diff --git a/app/toscalib/templates/node.py b/app/toscalib/templates/node.py new file mode 100644 index 0000000..8a07a49 --- /dev/null +++ b/app/toscalib/templates/node.py @@ -0,0 +1,389 @@ +from toscalib.templates.constant import * +from toscalib.types.node import NodeType +from toscalib.templates.requirement_item import RequirementItem +from toscalib.templates.property_item import PropertyItem +from toscalib.templates.capability_item import CapabilityItem +from toscalib.utils import tosca_import, tosca_heat + +import copy, re, logging +from toscalib.templates.interface_item import InterfaceItem +#Author: Shu Shi +#emaiL: shushi@research.att.com + + + +class Node(object): + def __init__(self, template, node_name, node_type): + self.template = template + self.name = node_name + self.id = PropertyItem(node_type.id) + self_id_str = {} + self_id_str['get_attribute']= [node_name, 'id'] + self.id._assign(self_id_str) + + self.mapping_template = None + self.tran_template = None + + self.fe_json = None + self.fe_nid = None + + if node_type is None: + logging.warning( 'Empty node type') + return + elif isinstance(node_type, NodeType) is False: + logging.warning( 'Invalid NodeType passed to Node: '+ node_name+ 'construction') + return + else: + self._instatiateWithType(node_type) + +#Instantiate the node type, when substitution mapping is attached, create the new template for it + def _instatiateWithType(self, node_type): + self.type = node_type.name + self.type_obj = node_type + + self.properties = {} + for prop in node_type.properties.keys(): + self.properties[prop] = PropertyItem(node_type.properties[prop]) + + self.attributes = {} + for attr in node_type.attributes.keys(): + self.attributes[attr] = PropertyItem(node_type.attributes[attr]) + + self.requirements = [] + for req in node_type.requirements: + self.requirements.append(RequirementItem(req)) + + self.capabilities = {} + for cap in node_type.capabilities.keys(): + self.capabilities[cap] = CapabilityItem(node_type.capabilities[cap]) + + self.interfaces = {} + for intf in node_type.interfaces.keys(): + self.interfaces[intf] = InterfaceItem(node_type.interfaces[intf]) + + if node_type.mapping_template is not None: + from toscalib.templates.topology import ToscaTopology + self.mapping_template = copy.deepcopy(node_type.mapping_template) + self.mapping_template._update_prefix(self.name + '_') + self.mapping_template._verify_substitution(self) +# for sub_rule in node_type.mapping_template.sub_rules: +# sub_rule._update_pointer(self, self.mapping_template) + + self._update_parent_node() + +#used to parse node template structure written in a template +#Assign values if needed +#For requirement fulfillment, add pending mode to check whether the value is a node template or type + def _parse_pre_defined_content(self, content): +# if content.has_key(NOD_PROPERTIES): + if NOD_PROPERTIES in content: + prop_sec = content[NOD_PROPERTIES] + if prop_sec is not None: + for prop_name in prop_sec.keys(): + prop_item = self._get_property_item(prop_name) + if prop_item is not None: + prop_item._assign(prop_sec[prop_name]) + if prop_sec[prop_name] == '__GET_NODE_NAME__': + prop_item._assign(self.name) + +# if content.has_key(NOD_REQUIREMENTS): + if NOD_REQUIREMENTS in content: + req_sec = content[NOD_REQUIREMENTS] + if req_sec is not None: + for req in req_sec: + req_item_name, req_item_value = tosca_import._parse_requirement_name_and_value(req) +#TODO: the same requirement name can only appear once!! + req_item = self._get_requirement_item_first(req_item_name) + if req_item is not None: + req_item._parse_pre_defined_content(req_item_value) + else: + logging.warning( 'Requirement '+ req_item_name +'not defined in Node '+ self.name + ' of type '+ self.type) + +# if content.has_key(NOD_CAPABILITIES): + if NOD_CAPABILITIES in content: + cap_sec = content[NOD_CAPABILITIES] + if cap_sec is not None: + for cap_name in cap_sec.keys(): + cap_item = self._get_capability_item(cap_name) + if cap_item is not None: + cap_item._parse_pre_defined_content(cap_sec[cap_name]) + +# if content.has_key(NOD_INTERFACES): + if NOD_INTERFACES in content: + interface_sec = content[NOD_INTERFACES] + if interface_sec is not None: + for interface_name in interface_sec.keys(): + interface_item = self._get_interface_item(interface_name) + if interface_item is not None: + interface_item._parse_pre_defined_content(interface_sec[interface_name]) + else: + self.interfaces[interface_name] = InterfaceItem(None, interface_name, interface_sec[interface_name]) + + self._update_parent_node() + + def _get_property_item(self, prop_name): +# if self.properties.has_key(prop_name): + if prop_name in self.properties: + return self.properties[prop_name] + else: + logging.warning('Node: '+ self.name+ ' of type: '+ self.type+ ' has no property: '+ prop_name) + return None + + def _get_attribute_item(self, attr_name): +# if self.attributes.has_key(attr_name): + if attr_name in self.attributes: + return self.attributes[attr_name] + else: + logging.warning('Node: '+ self.name+ ' of type: '+ self.type+ ' has no attribute: '+ attr_name) + return None + + def _get_interface_item(self, interface_name): +# if self.interfaces.has_key(interface_name): + if interface_name in self.interfaces: + return self.interfaces[interface_name] + else: + logging.warning( 'Node: '+ self.name+ ' of type: '+ self.type+ ' has no interface: '+ interface_name) + return None + + def _get_capability_item(self, cap_name): +# if self.capabilities.has_key(cap_name): + if cap_name in self.capabilities: + return self.capabilities[cap_name] + else: + #logging.debug('Node: '+ self.name+ ' of type: '+ self.type+ ' has no capability: '+ cap_name) + return None + + def _get_capability_property(self, cap_name, prop_name): + cap_item = self._get_capability_item(cap_name) + if cap_item is not None: + return cap_item._get_property_item(prop_name) + else: + #logging.debug( 'Node: '+ self.name+ ' of type: '+ self.type+ ' has no capability: '+ cap_name) + return None + + def _get_requirement_item_first(self, req_name): + for req_item in self.requirements: + if req_item.name == req_name: + return req_item + logging.warning( 'Node: '+ self.name+ ' of type: '+ self.type+ ' has no requirement: '+ req_name) + return None + + def _verify_requirements(self, node_dict): + for req in self.requirements: + req._verify_requirement(node_dict) + + def _verify_functions(self): + if self.id.value is not None: + self.id.value._update_function_reference(self.template, self, self.id) + for prop_item in iter(self.properties.values()): + if prop_item.value is not None: + prop_item.value._update_function_reference(self.template, self, prop_item) + for cap_item in iter(self.capabilities.values()): + for cap_item_prop in iter(cap_item.properties.values()): + if cap_item_prop.value is not None: + cap_item_prop.value._update_function_reference(self.template, self, cap_item_prop) + for interface_item in iter(self.interfaces.values()): + for interface_item_input in iter(interface_item.inputs.values()): + if interface_item_input.value is not None: + interface_item_input.value._update_function_reference(self.template, self, interface_item_input) + for operation_item in iter(interface_item.operations.values()): + for input_item in iter(operation_item.inputs.values()): + if input_item.value is not None: + input_item.value._update_function_reference(self.template, self, input_item) + + def _update_parent_node(self): + for prop in iter(self.properties.values()): + prop._update_parent_node(self) + for cap in iter(self.capabilities.values()): + cap._update_parent_node(self) + for req in self.requirements: + req._update_parent_node(self) + for interface in iter(self.interfaces.values()): + interface._update_parent_node(self) + + + def _update_prefix(self, prefix): + if self.name == 'NO_PREFIX': + self.name = prefix[:len(prefix)-1] + else: + self.name = prefix + self.name + self.id.value._update_prefix(prefix) + + for prop_item in iter(self.properties.values()): + if prop_item.value is not None: + prop_item.value._update_prefix(prefix) + for cap_item in iter(self.capabilities.values()): + for cap_item_prop in iter(cap_item.properties.values()): + if cap_item_prop.value is not None: + cap_item_prop.value._update_prefix(prefix) + for interface_item in iter(self.interfaces.values()): + for interface_item_input in iter(interface_item.inputs.values()): + if interface_item_input.value is not None: + interface_item_input.value._update_prefix(prefix) + for operation_item in iter(interface_item.operations.values()): + for input_item in iter(operation_item.inputs.values()): + if input_item.value is not None: + input_item.value._update_prefix(prefix) + + for req in self.requirements: + req._update_prefix(prefix) + + self._update_parent_node() + + def _verify_req_node(self, req_type, req_cap, req_filter): + if req_type is not None and self.type_obj._verify_req_type(req_type) is False: + logging.warning( 'Type matching failed') + return False + + if req_cap is not None: + cap_found = None + for cap_item in iter(self.capabilities.values()): + if cap_item._validate_capability(req_cap) is True: + cap_found = cap_item + break + if cap_found is None: + logging.warning( 'Capability matching failed') + return False + + return self._verify_node_filter(req_filter) + + def _verify_node_filter(self, req_filter): + return True + + def _propagate_substitution_value(self): + converge = True + for prop_item in iter(self.properties.values()): + converge = converge and prop_item._propagate_substitution_value() + for req_item in self.requirements: + converge = converge and req_item._propagate_substitution_value() + for cap_item in iter(self.capabilities.values()): + converge = converge and cap_item._propagate_substitution_value() + for attr_item in iter(self.attributes.values()): + converge = converge and attr_item._propagate_attr_substitution_value() + + + if self.mapping_template is not None: + self.mapping_template._propagate_substitution_value() + if self.tran_template is not None: + self.tran_template._propagate_substitution_value() + + return converge + + def _prepare_extra_imports(self, tags = ''): + if 'noexpand' in tags: + return [] + if self.tran_template is not None: + return self.tran_template._prepare_extra_imports(tags) + if self.mapping_template is not None: + return self.mapping_template._prepare_extra_imports(tags) + return [] + + def _prepare_output(self, tags=''): + if 'noexpand' not in tags: + newtags = tags.replace('main', 'part') + if self.tran_template is not None: + return self.tran_template._prepare_output(newtags) + if self.mapping_template is not None: + return self.mapping_template._prepare_output(newtags) + output = {} + if 'heat' in tags: + heat_type = re.sub('tosca.heat.', '', self.type) + heat_type = re.sub('\.', '::', heat_type) + output[YMO_NOD_TYPE] = heat_type + else: + output[YMO_NOD_TYPE] = self.type + prop_out = {} + for prop in self.properties.keys(): + prop_item = self.properties[prop] +# if prop_item.required is False and prop_item.used is not True and prop_item.filled is not True: + if prop_item.required is False and prop_item.filled is not True: + continue + if prop_item.filled is not True or prop_item.value is None: + prop_value = None + else: + prop_value = prop_item.value._get_value(tags)[0] + if prop_item.required is False and prop_value in [None, [], {}]: + continue + else: + prop_out[prop] = prop_value + cap_out={} + for cap in iter(self.capabilities.values()): + cap_item = {} + for cap_prop in iter(cap.properties.values()): + if cap_prop.filled is True: + cap_item[cap_prop.name] = cap_prop.value._get_value(tags)[0] + if len(cap_item) > 0: + cap_out[cap.name] = {'properties': cap_item} + + req_out = [] + for req in self.requirements: + if req.filled is True: + req_item = dict() + if 'cloudify' in tags: + if req.relationship is not None : + req_item['type'] = req.relationship + else: + req_item['type'] = 'cloudify.relationships.connected_to' + req_item['target'] = req.str_value + else: + req_item[req.name] = req.str_value + req_out.append(req_item) + elif req.filter is not None and 'cloudify' not in tags: + req_item = {} + if req.req_capability is not None: + req_item[YMO_REQ_CAPABILITY] = req.req_capability + if req.req_type is not None: + req_item[YMO_REQ_NODE] = req.req_type + if req.relationship is not None: + req_item[YMO_REQ_RELATIONSHIP] = req.relationship + req_item[YMO_REQ_FILTER] = req.filter + req_out.append({req.name:req_item}) + int_out = {} + for interface_name in self.interfaces.keys(): + int_out[interface_name] = self.interfaces[interface_name]._prepare_output(tags) + + if len(prop_out) > 0: + output[YMO_NOD_PROPERTIES]=prop_out + if len(req_out) > 0 and 'java_sim' not in tags: + if 'cloudify' in tags: + output[YMO_NOD_RELATIONSHIPS] = req_out + else: + output[YMO_NOD_REQUIREMENTS] = req_out + if len(cap_out) > 0 and 'cloudify' not in tags: + output[YMO_NOD_CAPABILITIES] = cap_out + if len(int_out) > 0 : + output[YMO_NOD_INTERFACES] = int_out + final_out = {} + final_out[self.name] = output + return final_out + + def _prepare_heat_output(self, parameters_type, parameters_val): + if self.mapping_template is not None: + return self.mapping_template._prepare_heat_output(parameters_type, parameters_val, True) + else: + if tosca_heat._type_validate(self.type) is not True: + return None + output = {} + output[YMO_NOD_TYPE] = tosca_heat._type_translate(self.type) + prop_out = {} + for prop_item in iter(self.properties.values()): + if prop_item.filled: + prop_out[prop_item.name] = prop_item.value + else: + input_name = self.name + '_' + prop_item.name + prop_out[prop_item.name] = '{ get_param: ' + input_name + ' }' + input_type = {} + input_type[input_name] = prop_item.type + input_val = {} + input_val[input_name] = prop_item.value + parameters_type.update(input_type) + parameters_val.udpate(input_val) + if len(prop_out) > 0: + output[YMO_NOD_PROPERTIES] = prop_out + final_out = {} + final_out[self.name] = output + return final_out + + + def toJson(self): + return self.fe_json
\ No newline at end of file diff --git a/app/toscalib/templates/node.pyc b/app/toscalib/templates/node.pyc Binary files differnew file mode 100644 index 0000000..3d780b7 --- /dev/null +++ b/app/toscalib/templates/node.pyc diff --git a/app/toscalib/templates/operation_item.py b/app/toscalib/templates/operation_item.py new file mode 100644 index 0000000..8bc2612 --- /dev/null +++ b/app/toscalib/templates/operation_item.py @@ -0,0 +1,65 @@ +from toscalib.templates.property_item import PropertyItem +from toscalib.types.property import PropertyDefinition +from toscalib.templates.constant import * + + +class OperationItem(object): + def __init__(self, definition, name = None, content = None): + if definition is not None: + self.name = definition.name + self.implementation = definition.implementation + self.definition = definition + self.inputs = {} + self.parent_node = None + + for prop in definition.inputs.keys(): + self.inputs[prop] = PropertyItem(definition.inputs[prop]) + else: + self.name = name + self.implementation = None + self.definition = None + self.inputs = {} + self.parent_node = None + + if content is not None: + self._parse_pre_defined_content(content) + + def _parse_pre_defined_content(self, content): + if content is None: + return + + if type(content) is not dict: + self.implementation = content + return + + for key_name in content.keys(): + if key_name == 'implementation': + self.implementation = content[key_name] + if key_name == 'inputs': + input_sec = content['inputs'] + for input_item in input_sec.keys(): + self.inputs[input_item] = PropertyItem(PropertyDefinition(input_item)) + self.inputs[input_item]._assign(input_sec[input_item]) + + def _update_parent_node(self, parent): + self.parent_node = parent + for prop in iter(self.inputs.values()): + prop._update_parent_node(parent) + + def _prepare_output(self, tags=''): + output = {} +# if self.implementation is not None: +# output[YMO_OP_IMPLEMENTATION] = self.implementation +# if 'cloudify' in tags: +# output[YMO_OP_EXECUTOR] = 'central_deployment_agent' + if len(self.inputs) > 0: + inputs = {} + for prop_name in self.inputs.keys(): + prop_item = self.inputs[prop_name] + if prop_item.value is None: + prop_value = None + else: + prop_value = prop_item.value._get_value(tags)[0] + inputs[prop_name] = prop_value + output[YMO_OP_INPUTS] = inputs + return output diff --git a/app/toscalib/templates/operation_item.pyc b/app/toscalib/templates/operation_item.pyc Binary files differnew file mode 100644 index 0000000..1d35b40 --- /dev/null +++ b/app/toscalib/templates/operation_item.pyc diff --git a/app/toscalib/templates/property_item.py b/app/toscalib/templates/property_item.py new file mode 100644 index 0000000..dc6aea7 --- /dev/null +++ b/app/toscalib/templates/property_item.py @@ -0,0 +1,95 @@ +#Author: Shu Shi +#emaiL: shushi@research.att.com + + +from toscalib.templates.constant import * +import ast, logging + +class PropertyItem(object): + def __init__(self, definition): + self.name = definition.name + self.type_obj = definition.type_obj + self.filled = False + self.definition = definition + self.value = None + self.required = definition.required + self.sub_pointer = None + self.used = True + self.parent_node = None + + def _assign(self, value): +# from toscalib.templates.capability_item import CapabilityItem + from toscalib.templates.value import Value + if value is None: + return False +# elif isinstance(value, CapabilityItem): +# self.value = value +# self.filled = True + else: + self.value = Value(self.type_obj, value) +# formatted_value = self.type._format_value(value) + if self.value is None: + logging.warning( 'Value can not be assigned: validation failed!') + else: + self.filled = True + +# if self.sub_pointer is not None: +# self.sub_pointer._assign(value) + + return True + + def _direct_assign(self, value): + self.value = value + if value is not None: + self.filled = True + + def _update_prefix(self, prefix): + self.name = prefix + self.name + + def _update_parent_node(self, parent): + self.parent_node = parent + + def _propagate_substitution_value(self): + if self.sub_pointer is None: + return True + if self.value is not None: +# self.sub_pointer._direct_assign(self.value) + self.sub_pointer._assign(self.value._get_value()[0]) + + return True + + def _propagate_attr_substitution_value(self): + if self.sub_pointer is None or hasattr(self.sub_pointer, 'value') is False: + return True + self._direct_assign(self.sub_pointer.value) + return True + + def _prepare_input_type_output(self, tags): + out_details= {} + out_details[YMO_PROP_TYPE] = self.type_obj.name + if hasattr(self.definition, 'default') is True and self.definition.default is not None: + if 'w_default' in tags: + return {} + out_details[YMO_PROP_DEFAULT] = self.definition.default + + out_val = {} + out_val[self.name] =out_details + return out_val + + def _prepare_output_type_output(self): + out_val = {} + val_body = self.value._get_value()[0] + out_val[self.name] =dict(value=val_body) + return out_val + + def _prepare_heat_output(self): + type_out = {} + type_out[self.name] =dict(type=self.type.name) + val_out = {} + if self.filled: + val_out[self.name] = self.value + else: + val_out[self.name] = None + + return type_out, val_out + diff --git a/app/toscalib/templates/property_item.pyc b/app/toscalib/templates/property_item.pyc Binary files differnew file mode 100644 index 0000000..96d7b86 --- /dev/null +++ b/app/toscalib/templates/property_item.pyc diff --git a/app/toscalib/templates/requirement_item.py b/app/toscalib/templates/requirement_item.py new file mode 100644 index 0000000..19c9844 --- /dev/null +++ b/app/toscalib/templates/requirement_item.py @@ -0,0 +1,116 @@ +#Author: Shu Shi +#emaiL: shushi@research.att.com + + +from toscalib.templates.constant import * +from toscalib.templates.property_item import PropertyItem +import logging + +class RequirementItem(object): + def __init__(self, definition): + self.name = definition.name + self.value = None + self.str_value = None + self.cap_match = None + self.filled = False + self.pending = False + + self.req_capability = definition.req_capability + self.relationship = definition.relationship + self.req_type = definition.req_type + self.filter = None + self.sub_pointer = None + self.parent_node = None + + + def _assign(self, value): + if value is None: + logging.warning( 'Assign None to fulfill requirement') + return False + + for cap_item in iter(value.capabilities.values()): + if cap_item._validate_capability(self.req_capability) is True: + self.cap_match = cap_item + break + if self.cap_match is None: + logging.warning( 'No matching capabilities in requirement assignment') + return False + else: + self.value = value + self.str_value = value.name + self.filled = True + + return True + + def _propagate_substitution_value(self): + if self.sub_pointer is None: + return True + if self.filled is not True: + return True + + if isinstance(self.sub_pointer, RequirementItem): + if self.cap_match.sub_pointer is None: + self.sub_pointer._assign(self.value) + else: + self.sub_pointer._assign(self.cap_match.sub_pointer.parent_node) + elif isinstance(self.sub_pointer, PropertyItem): + if self.cap_match.id.value is not None: + self.sub_pointer._direct_assign(self.cap_match.id.value) + + return True + + + def _verify_requirement(self, node_dict): + if self.filled is True: +# if node_dict.has_key(self.str_value): + if self.str_value in node_dict: + self._assign(node_dict[self.str_value]) + else: + logging.warning( 'Error! the node requires \''+ self.str_value+ '\' not defined in the template!') + self.str_value = None + self.filled = False + if self.pending is True: +# if node_dict.has_key(self.str_value): + if self.str_value in node_dict: + self._assign(node_dict[self.str_value]) + self.pending = None + else: + self.req_type = self.str_value + self.str_value = None + self.pending = None + + def _verify_node(self, node): + if node._verify_req_node(self.req_type, self.req_capability, self.filter) is False: + logging.warning( 'requirement matching failed') + return False + + return True + + + def _update_prefix(self, prefix): + if self.filled is True: + self.str_value = prefix + self.str_value + + def _update_parent_node(self, parent): + self.parent_node = parent + + def _parse_pre_defined_content(self, content): + if type(content) is str: + self.str_value = content + self.filled = True + elif type(content) is dict: +# if content.has_key(REQ_NODE): + if REQ_NODE in content: + self.str_value = content[REQ_NODE] + self.pending = True +# if content.has_key(REQ_CAPABILITY): + if REQ_CAPABILITY in content: + self.req_capability = content[REQ_CAPABILITY] +# if content.has_key(REQ_RELATIONSHIP): + if REQ_RELATIONSHIP in content: + self.relationship = content[REQ_RELATIONSHIP] +# if content.has_key(REQ_FILTER): + if REQ_FILTER in content: + self.filter = content[REQ_FILTER] + else: + logging.warning( 'Can not parse requirement assignment for '+self.name) diff --git a/app/toscalib/templates/requirement_item.pyc b/app/toscalib/templates/requirement_item.pyc Binary files differnew file mode 100644 index 0000000..1cb307e --- /dev/null +++ b/app/toscalib/templates/requirement_item.pyc diff --git a/app/toscalib/templates/substitution_rule.py b/app/toscalib/templates/substitution_rule.py new file mode 100644 index 0000000..fda8e50 --- /dev/null +++ b/app/toscalib/templates/substitution_rule.py @@ -0,0 +1,179 @@ +from toscalib.templates.constant import * +import logging + + +class SubstitutionRule (object): + def __init__(self, type, item_name, prop_name, value): + self.type = type + self.item = item_name + self.property = prop_name + self.value = value + + def _update_pointer(self, src_node, dst_template): + if type(self.value) is not list and len(self.value) < 1: + logging.warning( 'Incorrect mapping rule for property '+ self.property+ ': '+ self.value) + return + + if self.type == SUB_PROPERTY: + if self.value[0] == SUB_INPUT: +# if hasattr(dst_template, 'inputs') and dst_template.inputs.has_key(self.value[1]): + if hasattr(dst_template, 'inputs') and self.value[1] in dst_template.inputs: + if src_node is not None: + src_node.properties[self.property].sub_pointer = dst_template.inputs[self.value[1]] + if src_node.properties[self.property].required is True or src_node.properties[self.property].filled is True: + dst_template.inputs[self.value[1]].used = True + elif src_node is not None and src_node.properties[self.property].required is True: + logging.warning( 'Incorrect mapping rule for property '+ self.property+ ': no input named '+ self.value[1]) +# elif dst_template.node_dict.has_key(self.value[0]): + elif self.value[0] in dst_template.node_dict: + target_node = dst_template.node_dict[self.value[0]] + target_prop_item = target_node._get_property_item(self.value[1]) + if target_prop_item is not None: + if src_node is not None: + src_prop_item = src_node._get_property_item(self.property) + if src_prop_item.required is True or src_prop_item.filled is True: + target_prop_item.used = True + if src_prop_item is not None: + src_prop_item.sub_pointer = target_prop_item + else: + logging.warning( 'Incorrect mapping rule for property '+ self.property+ ': no property named '+ self.value[1]+ ' in node '+ self.value[0]) + else: + logging.warning('Incorrect mapping rule for property '+ self.property+ ': no node named '+ self.value[0]) + + elif self.type == SUB_ATTRIBUTE: + if self.value[0] == SUB_OUTPUT: +# if hasattr(dst_template, 'outputs') and dst_template.outputs.has_key(self.value[1]): + if hasattr(dst_template, 'outputs') and self.value[1] in dst_template.outputs: + if src_node is not None: + src_node.attributes[self.property].sub_pointer = dst_template.outputs[self.value[1]] + else: + logging.warning( 'Incorrect mapping rule for attribute '+ self.property+ ': no output named '+ self.value[1]) + + elif self.type == SUB_CAPABILITY: + if self.property is None: +# if dst_template.node_dict.has_key(self.value[0]): + if self.value[0] in dst_template.node_dict: + target_node = dst_template.node_dict[self.value[0]] + target_cap_item = target_node._get_capability_item(self.value[1]) + if target_cap_item is not None: + if src_node is not None: + src_cap_item = src_node._get_capability_item(self.item) + if src_cap_item is not None: + src_cap_item.sub_pointer = target_cap_item + for prop_name in src_cap_item.properties.keys(): + src_cap_item.properties[prop_name].sub_pointer = target_cap_item.properties[prop_name] + else: + logging.warning( 'Incorrect mapping rule for capability '+ self.item+ ': no capability named '+ self.value[1]+ ' in node '+ self.value[0]) + else: + logging.warning( 'Incorrect mapping rule for capability '+ self.item+ ': no node named '+ self.value[0]) + elif self.property == SUB_CAP_ID: + if self.value[0] == SUB_OUTPUT: +# if hasattr(dst_template, 'outputs') and dst_template.outputs.has_key(self.value[1]): + if hasattr(dst_template, 'outputs') and self.value[1] in dst_template.outputs: + target_node = dst_template.outputs[self.value[1]] + if src_node is not None: + src_cap_item = src_node._get_capability_item(self.item) + if src_cap_item is not None: + src_cap_item.sub_pointer = target_node +# elif dst_template.node_dict.has_key(self.value[0]): + elif self.value[0] in dst_template.node_dict: + target_node = dst_template.node_dict[self.value[0]] + if len(self.value) < 2: + target_item = target_node +# elif target_node.capabilities.has_key(self.value[1]) and len(self.value) > 1: + elif len(self.value) > 1 and self.value[1] in target_node.capabilities : + target_item = target_node._get_capability_property(self.value[1], self.value[2]) + elif self.value[1] in target_node.properties: + target_item = target_node._get_property_item(self.value[1]) + else: + target_item = None + logging.warning( 'Incorrect mapping rule for capability '+ self.item+ ': no capability/property named '+ self.value[1]+ ' in node '+ self.value[0]) + + if target_item is not None and src_node is not None: + src_cap_item = src_node._get_capability_item(self.item) + if src_cap_item is not None: + src_cap_item.sub_pointer = target_item + else: + if self.value[0] == SUB_INPUT: +# if hasattr(dst_template, 'inputs') and dst_template.inputs.has_key(self.value[1]): + if hasattr(dst_template, 'inputs') and self.value[1] in dst_template.inputs: + if src_node is not None: + src_cap_prop_item = src_node._get_capability_property(self.item, self.property) + src_cap_prop_item.sub_pointer = dst_template.inputs[self.value[1]] + if src_cap_prop_item.required is True or src_cap_prop_item.filled is True: + dst_template.inputs[self.value[1]].used = True + else: + logging.warning( 'Incorrect mapping rule for capability '+ self.item+ ': no input named '+ self.value[1]) +# elif dst_template.node_dict.has_key(self.value[0]): + elif self.value[0] in dst_template.node_dict: + target_node = dst_template.node_dict[self.value[0]] + +# if target_node.capabilities.has_key(self.value[1]): + if self.value[1] in target_node.capabilities: + target_cap_property = target_node._get_capability_property(self.value[1], self.value[2]) + if target_cap_property is not None: + if src_node is not None: + src_cap_prop_item = src_node._get_capability_property(self.item, self.property) + if src_cap_prop_item is not None: + src_cap_prop_item.sub_pointer = target_cap_property + else: + logging.warning( 'Incorrect mapping rule for capability '+ self.item+ ': no property named '+ self.value[2]+ ' in capability '+ self.value[0]+ '->'+ self.value[1]) +# elif target_node.properties.has_key(self.value[1]): + elif self.value[1] in target_node.properties: + target_prop_item = target_node._get_property_item(self.value[1]) + if src_node is not None: + src_cap_prop_item = src_node._get_capability_property(self.item, self.property) + if src_cap_prop_item is not None: + src_cap_prop_item.sub_pointer = target_prop_item + else: + logging.warning( 'Incorrect mapping rule for capability '+ self.item+ ': no capability/property named '+ self.value[1]+ ' in node '+ self.value[0]) + else: + logging.warning( 'Incorrect mapping rule for capability '+ self.item+ ': no node named '+ self.value[0]) + + elif self.type == SUB_REQUIREMENT: + if self.property is None: +# if dst_template.node_dict.has_key(self.value[0]): + if self.value[0] in dst_template.node_dict: + target_node = dst_template.node_dict[self.value[0]] + target_req_item = target_node._get_requirement_item_first(self.value[1]) + if target_req_item is not None: + if src_node is not None: + src_req_item = src_node._get_requirement_item_first(self.item) + if src_req_item is not None: + src_req_item.sub_pointer = target_req_item + else: + logging.warning( 'Incorrect mapping rule for requirement '+ self.item+ ': no requirement named '+ self.value[1]+ ' in node '+ self.value[0]) + else: + logging.warning( 'Incorrect mapping rule for requirement '+ self.item+ ': no node named '+ self.value[0]) + elif self.property == SUB_REQ_ID: + if self.value[0] == SUB_INPUT: +# if hasattr(dst_template, 'inputs') and dst_template.inputs.has_key(self.value[1]): + if hasattr(dst_template, 'inputs') and self.value[1] in dst_template.inputs: + if src_node is not None: + src_req_item = src_node._get_requirement_item_first(self.item) + if src_req_item is not None: + src_req_item.sub_pointer = dst_template.inputs[self.value[1]] + dst_template.inputs[self.value[1]].used = True + else: + logging.warning( 'Incorrect mapping rule for property '+ self.property+ ': no input named '+ self.value[1]) + +# elif dst_template.node_dict.has_key(self.value[0]): + elif self.value[0] in dst_template.node_dict: + target_node = dst_template.node_dict[self.value[0]] + target_prop_item = target_node._get_property_item(self.value[1]) + if target_prop_item is not None: + if src_node is not None: + src_req_item = src_node._get_requirement_item_first(self.item) + if src_req_item is not None: + src_req_item.sub_pointer = target_prop_item + else: + logging.warning( 'Incorrect mapping rule for requirement '+ self.item+ ': no property named '+ self.value[1]+ ' in node '+ self.value[0]) + else: + logging.warning( 'Incorrect mapping rule for requirement '+ self.item+ ': no node named '+ self.value[0]) + else: + logging.warning( 'Incorrect mapping rule for requirement '+ self.item+ ': wrong property name '+ self.property) + + else: + logging.warning('Incorrect mapping rule type: '+ self.type) + + diff --git a/app/toscalib/templates/substitution_rule.pyc b/app/toscalib/templates/substitution_rule.pyc Binary files differnew file mode 100644 index 0000000..0d4ad19 --- /dev/null +++ b/app/toscalib/templates/substitution_rule.pyc diff --git a/app/toscalib/templates/topology.py b/app/toscalib/templates/topology.py new file mode 100644 index 0000000..f8c00ed --- /dev/null +++ b/app/toscalib/templates/topology.py @@ -0,0 +1,419 @@ +#Author: Shu Shi +#emaiL: shushi@research.att.com + + +from toscalib.templates.constant import * +from toscalib.templates.heat_constants import * +from toscalib.templates.substitution_rule import SubstitutionRule +from toscalib.types.property import PropertyDefinition +from toscalib.templates.property_item import PropertyItem +from toscalib.templates.heat_constants import HOT_VERSION_NUM +import copy, logging + +class ToscaTopology(object): + def __init__(self, name, metadata_section=None, content_section=None): + self.name = name + self.metadata = metadata_section + self.raw_content = content_section + self.db = None + + self.node_dict = {} + self.inputs = {} + self.aux_inputs = {} + self.outputs = {} + self.sub_rules = [] + + self.node_index = 0 + self.temp_index = 0 + + self.extra_imports = [] + + def _parse_content(self, db): + if self.db is not None: + return + + self.db = db + + if self.raw_content is None: + return + +# if self.raw_content.has_key(TOPO_INPUTS): + if TOPO_INPUTS in self.raw_content: + self._parse_input(db, self.raw_content[TOPO_INPUTS]) + +# if self.raw_content.has_key(TOPO_NODE_TEMPLATES): + if TOPO_NODE_TEMPLATES in self.raw_content: + self._parse_node_template(db, self.raw_content[TOPO_NODE_TEMPLATES]) + else: + logging.warning( 'Topology template: ' + self.name+ ' has NO node templates!') + +# if self.raw_content.has_key(TOPO_OUTPUTS): + if TOPO_OUTPUTS in self.raw_content: + self._parse_output(db, self.raw_content[TOPO_OUTPUTS]) + +# if self.raw_content.has_key(TOPO_SUBSTITUION_MAPPINGS): + if TOPO_SUBSTITUION_MAPPINGS in self.raw_content: + self._parse_substitution(db, self.raw_content[TOPO_SUBSTITUION_MAPPINGS]) + else: + self.sub_type = None + self._verify_substitution() + self._update_function_pointer() + + def _parse_substitution(self, db, sub_sec): +# if sub_sec.has_key(SUB_NODE_TYPE): + if SUB_NODE_TYPE in sub_sec: + self.sub_type = sub_sec[SUB_NODE_TYPE] +# if db.NODE_TYPES.has_key(self.sub_type): + if self.sub_type in db.NODE_TYPES: + db.NODE_TYPES[self.sub_type].mapping_template = self + else: + logging.warning( 'substitution mapping section does not have node_type defined') + return + +# if sub_sec.has_key(SUB_PROPERTY): +# sub_prop = sub_sec[SUB_PROPERTY] +# for sub_prop_name in sub_prop.keys(): +# self.sub_rules.append(SubstitutionRule(SUB_PROPERTY, None, sub_prop_name, sub_prop[sub_prop_name])) + + for sub_prop in db.NODE_TYPES[self.sub_type].properties.keys(): +# if self.inputs.has_key(sub_prop): + if sub_prop in self.inputs: + self.sub_rules.append(SubstitutionRule(SUB_PROPERTY, None, sub_prop, [SUB_INPUT, sub_prop])) + + for sub_attr in db.NODE_TYPES[self.sub_type].attributes.keys(): +# if self.outputs.has_key(sub_attr): + if sub_attr in self.outputs: + self.sub_rules.append(SubstitutionRule(SUB_ATTRIBUTE, None, sub_attr, [SUB_OUTPUT, sub_attr])) + +# if sub_sec.has_key(SUB_CAPABILITY): + if SUB_CAPABILITY in sub_sec: + sub_cap = sub_sec[SUB_CAPABILITY] + for sub_cap_name in sub_cap.keys(): + sub_cap_item = sub_cap[sub_cap_name] + #standard capability mapping rule + if type(sub_cap_item) is not dict: + self.sub_rules.append(SubstitutionRule(SUB_CAPABILITY, sub_cap_name, None, sub_cap_item)) + #self-proposed capability mapping rules + else: +# if sub_cap_item.has_key(SUB_CAP_ID): + if SUB_CAP_ID in sub_cap_item: + self.sub_rules.append(SubstitutionRule(SUB_CAPABILITY, sub_cap_name, SUB_CAP_ID, sub_cap_item[SUB_CAP_ID])) +# if sub_cap_item.has_key(SUB_CAP_PROPERTY): + if SUB_CAP_PROPERTY in sub_cap_item: + sub_cap_item_prop = sub_cap_item[SUB_CAP_PROPERTY] + for sub_cap_item_prop_name in sub_cap_item_prop.keys(): + self.sub_rules.append(SubstitutionRule(SUB_CAPABILITY, sub_cap_name, sub_cap_item_prop_name, sub_cap_item_prop[sub_cap_item_prop_name])) + +# if sub_sec.has_key(SUB_REQUIREMENT): + if SUB_REQUIREMENT in sub_sec: + sub_req = sub_sec[SUB_REQUIREMENT] + for sub_req_name in sub_req.keys(): + sub_req_item = sub_req[sub_req_name] + #standard requirement mapping rule + if type(sub_req_item) is not dict: + self.sub_rules.append(SubstitutionRule(SUB_REQUIREMENT, sub_req_name, None, sub_req_item)) + #self-proposed requirement mapping rules + else: +# if sub_req_item.has_key(SUB_REQ_ID): + if SUB_REQ_ID in sub_req_item: + self.sub_rules.append(SubstitutionRule(SUB_REQUIREMENT, sub_req_name, SUB_REQ_ID, sub_req_item[SUB_REQ_ID])) + else: + logging.warning( 'Incorrect substitution mapping rules') + + def _verify_substitution(self, target_node=None): + for rule in self.sub_rules: + rule._update_pointer(target_node, self) + + def _parse_input(self, db, input_sec): + for input_name in input_sec.keys(): + input_def = PropertyDefinition(input_name, input_sec[input_name]) + input_def._parse_content(db) + self.inputs[input_name] = PropertyItem(input_def) + + def _parse_output(self, db, output_sec): + for output_name in output_sec.keys(): + output_def = PropertyDefinition(output_name) +# output_def._parse_content(db) + self.outputs[output_name] = PropertyItem(output_def) +# if output_sec[output_name].has_key('value'): + if 'value' in output_sec[output_name]: + self.outputs[output_name]._assign(output_sec[output_name]['value']) + + def _parse_node_template(self, db, template_sec): + self.node_dict = {} + for name in template_sec.keys(): +# if template_sec[name].has_key(NOD_TYPE): + if NOD_TYPE in template_sec[name]: + node_type_name = template_sec[name][NOD_TYPE] + else: + logging.warning( 'Invalid template: node section has no type') + continue + +# if db.NODE_TYPES.has_key(node_type_name) is False: + if node_type_name not in db.NODE_TYPES: + logging.warning( 'Invalid template: node type: '+ str(node_type_name)+ ' not defined or imported') + continue + + from toscalib.templates.node import Node + new_node = Node(self, name, db.NODE_TYPES[node_type_name]) + new_node._parse_pre_defined_content(template_sec[name]) + + self._add_node(new_node) + + for node in iter(self.node_dict.values()): + node._verify_requirements(self.node_dict) + node._verify_functions() + + + self.edge_list = self._create_edges() + + def _create_edges(self): + edges = [] + for node in iter(self.node_dict.values()): + for req in node.requirements: + if req.filled is True: + new_edge = (node, self.node_dict[req.str_value]) + logging.debug( 'edge created: '+ new_edge[0].name+ ' --> '+ new_edge[1].name) + edges.append(new_edge) + return edges + + def _update_function_pointer(self): + for node in iter(self.node_dict.values()): + #node._verify_requirements(self.node_dict) + node._verify_functions() + for output in iter(self.outputs.values()): + if output.value is not None: + output.value._update_function_reference(self) + + def _update_translation_function_pointer(self): + for node in iter(self.node_dict.values()): + if node.tran_template is not None: + node.tran_template._update_function_pointer() + + def _update_prefix(self, prefix): + exist_key_list = list(self.node_dict.keys()) + for node_key in exist_key_list: + if node_key == 'NO_PREFIX': + new_node_key = prefix[:len(prefix)-1] + else: + new_node_key = prefix + node_key + node = self.node_dict.pop(node_key) + node._update_prefix(prefix) + self.node_dict[new_node_key] = node + + exist_key_list = list(self.inputs.keys()) + for item_key in exist_key_list: + new_item_key = prefix + item_key + item = self.inputs.pop(item_key) + item._update_prefix(prefix) + self.inputs[new_item_key] = item + + exist_key_list = list(self.outputs.keys()) + for item_key in exist_key_list: + ###don't update output name prefix here + ###temporary solution for cloudify generation + ###but still need to update pointer for the value + new_item_key = prefix + item_key + #item = self.outputs.pop(item_key) + #item._update_prefix(prefix) + item = self.outputs[item_key] + item.value._update_prefix(prefix) + item.value._update_function_reference(self) + #self.outputs[new_item_key] = item + + #self._update_function_pointer() + + + def _update_used_tag_for_translation(self): + for item in iter(self.inputs.values()): + item.used = False + for node_item in iter(self.node_dict.values()): + for prop_item in iter(node_item.properties.values()): + prop_item.used = False + + def _add_node(self, new_node): + if new_node is None: + return + self.node_dict[new_node.name] = new_node + + def _propagate_substitution_value(self): + converge = False + while converge is not True: + converge = True + for node_item in iter(self.node_dict.values()): + converge = converge and node_item._propagate_substitution_value() + + + def _auto_generate_aux_inputs(self): + for node_name in self.node_dict.keys(): + node = self.node_dict[node_name] + for prop_name in node.properties.keys(): + prop_item = node.properties[prop_name] + if prop_item.value is None or prop_item.filled is False: + new_input_name = node_name + '_' + prop_name +# while self.inputs.has_key(new_input_name) or self.aux_inputs.has_key(new_input_name): + while new_input_name in self.inputs or new_input_name in self.aux_inputs: + new_input_name = new_input_name + '_' + def_item = copy.deepcopy(prop_item.definition) + def_item.name = new_input_name + self.aux_inputs[new_input_name] = PropertyItem(def_item) + fun_item = {} + fun_item['get_input'] = new_input_name + prop_item._assign(fun_item) + prop_item.value._update_function_reference(self) + + def _prepare_node_types(self): + for node_type in iter(self.db.NODE_TYPES.values()): + node_type.used = False + + for node in iter(self.node_dict.values()): + node_type = node.type_obj + while node_type is not None: + self.db.NODE_TYPES[node_type.name].used = True + node_type = node_type.parent + + def _prepare_node_types_output(self, tags=''): + self._prepare_node_types() + node_type = {} + if 'noexpand' not in tags: + for node in iter(self.node_dict.values()): + if node.tran_template is not None: + node_type.update(node.tran_template._prepare_node_types_output(tags)) + if len(node_type) < 1: + for ntype in iter(self.db.NODE_TYPES.values()): + if ntype.used is False: + continue + type_content = copy.deepcopy(ntype.raw_content) + if 'cloudify' in tags: + if ntype.name == 'cloudify.nodes.Root': + continue + + type_content.pop('capabilities', None) + type_content.pop('requirements', None) + type_content.pop('attributes', None) + else: + if ntype.name == 'tosca.nodes.Root': + continue + + node_type[ntype.name] = type_content + + return node_type + + def _prepare_extra_imports(self, tags): + if 'cloudify' in tags: + ret_val = [] + for item in self.extra_imports: + ret_val += list(item.values()) + return ret_val + else: + return self.extra_imports + + def _prepare_output(self, tags=''): + + output ={} + import_sec = [] + + if 'cloudify' in tags: + output[YMO_VERSION]= 'cloudify_dsl_1_3' + for item in self.extra_imports: + import_sec += list(item.values()) + #import_sec.append('http://www.getcloudify.org/spec/cloudify/3.4/types.yaml') + else: + import_sec += self.extra_imports + output[YMO_VERSION]= 'tosca_simple_yaml_1_0_0' + + if 'import_schema' in tags: + output[YMO_IMPORT] = [{'schema': 'schema.yaml'}] + + if self.metadata is not None and 'java_sim' not in tags: + output[YMO_METADATA] = self.metadata + topo_sec = {} + node_temp = {} + for node in iter(self.node_dict.values()): + node_temp.update(node._prepare_output(tags)) + import_sec += node._prepare_extra_imports(tags) + + if 'part' in tags: + return node_temp + + if len(node_temp.keys())> 0: + topo_sec[YMO_TOPO_NODE_TEMPLATES] = node_temp + + input_sec = {} + for name in self.inputs.keys(): + input_sec.update(self.inputs[name]._prepare_input_type_output(tags)) + for name in self.aux_inputs.keys(): + input_sec.update(self.aux_inputs[name]._prepare_input_type_output(tags)) + if (len(input_sec.keys())> 0) and 'java_sim' not in tags: + topo_sec[YMO_TOPO_INPUTS] = input_sec + output_sec = {} + for name in self.outputs.keys(): + output_sec.update(self.outputs[name]._prepare_output_type_output()) + if (len(output_sec.keys())> 0) and 'java_sim' not in tags: + topo_sec[YMO_TOPO_OUTPUTS] = output_sec + + + if 'w_sub' in tags and self.sub_type is not None: + sub_sec = {} + sub_sec[YMO_SUB_NODE_TYPE] = self.sub_type + sub_cap = {} + sub_req = {} + for sub_rule in self.sub_rules: + if sub_rule.type is SUB_CAPABILITY: + sub_cap[sub_rule.item] = sub_rule.value + if sub_rule.type is SUB_REQUIREMENT: + sub_req[sub_rule.item] = sub_rule.value + sub_sec[YMO_SUB_CAPABILITY] = sub_cap + sub_sec[YMO_SUB_REQUIREMENT] = sub_req + + topo_sec[YMO_TOPO_SUBSTITUION_MAPPINGS] = sub_sec + + if 'cloudify' in tags: + output.update(topo_sec) + else: + output[YMO_TOPOLOGY] = topo_sec + + if 'nodetype' in tags and 'java_sim' not in tags: + output[YMO_NODE_TYPE] = self._prepare_node_types_output(tags) + + if len(import_sec) > 0: + output[YMO_IMPORT] = import_sec + + + return output + + + def _prepare_heat_output(self, parameters_type = {}, parameters_val = {}, stripped = False): + output = {} + env_output = {} + output[YMO_HOT_VERSION] = HOT_VERSION_NUM + + for input_item in iter(self.inputs.values()): + out1, out2 = input_item._prepare_heat_output() + parameters_type.update(out1) + parameters_val.update(out2) + resources = {} + for node in iter(self.node_dict.values()): + resources.udpate(node._prepare_heat_output(parameters_type, parameters_val)) + + output[YMO_HOT_PARAMETERS] = parameters_type + output[YMO_HOT_RESOURCES] = resources + env_output[YMO_HOT_PARAMETERS] = parameters_val + + if stripped is True: + return resources + else: + return output, env_output + + + def toJson(self): + ret_json = {} + tmp_json = {} + for node in iter(self.node_dict.values()): + tmp_json[node.name] = node.toJson() + ret_json['nodes'] = tmp_json + ret_json['relations'] = {} + ret_json['inputs'] = {} + ret_json['outputs'] = {} + return ret_json +
\ No newline at end of file diff --git a/app/toscalib/templates/topology.pyc b/app/toscalib/templates/topology.pyc Binary files differnew file mode 100644 index 0000000..3b03399 --- /dev/null +++ b/app/toscalib/templates/topology.pyc diff --git a/app/toscalib/templates/value.py b/app/toscalib/templates/value.py new file mode 100644 index 0000000..fee0ceb --- /dev/null +++ b/app/toscalib/templates/value.py @@ -0,0 +1,266 @@ +from toscalib.types.data import TYP_LIST, TYP_MAP, TYP_STR, DataType +from toscalib.templates.property_item import PropertyItem +import copy, logging + +FUNCTIONS = (GET_INPUT, GET_PROPERTY, GET_ATTRIBUTE, GET_OPERATION, GET_NODES, GET_ARTIFACT, CONCAT) = \ + ('get_input', 'get_property', 'get_attribute', 'get_operation_output', 'get_nodes_of_type', 'get_artifact', 'concat') + +VALUE_STATE = (VALID_VALUE, FUNCTION, NULL) = \ + (1, 2, 3) + +def _is_function(value): + if type(value) is not dict: + return None + if len(value.keys()) != 1: + return None + key = list(value.keys())[0] + if key not in FUNCTIONS: + return None + + if key == GET_INPUT: + out_value = FunctionValue(key) + out_value.target_property = value[key] + return out_value + elif key == CONCAT: + out_value = FunctionValue(key) + value_list = value[key] + if type(value_list) is not list: + return None + out_value.extra_data = [] + for value_item in value_list: + out_value.extra_data.append(Value(DataType(TYP_STR), value_item)) + return out_value + else: + out_value = FunctionValue(key) + value_list = value[key] + if type(value_list) is not list: + return None + out_value.extra_data = value_list + + return out_value + + +class FunctionValue(object): + def __init__(self, func_type): + self.type = func_type + self.target_property = None + self.extra_data = [] + self.value_from_node = None + self.value_from_item = None + self.result = None + + def _update_prefix(self, prefix): + if self.type == GET_INPUT: + self.target_property = prefix + self.target_property + elif (self.type == GET_PROPERTY or self.type == GET_ATTRIBUTE): + if self.extra_data is not None and len(self.extra_data) > 1 and self.extra_data[0] != 'SELF': + if self.extra_data[0] == 'NO_PREFIX': + self.extra_data[0] = prefix[:len(prefix)-1] + else: + self.extra_data[0] = prefix + self.extra_data[0] + elif self.type == CONCAT: + for item in self.extra_data: + if item.function is not None: + item._update_prefix(prefix) + + def _update_function_reference(self, temp, self_node = None, self_item = None): + if self.type == GET_INPUT: +# if temp.inputs.has_key(self.target_property): + if self.target_property in temp.inputs: + self.value_from_item = temp.inputs[self.target_property] + return +# elif temp.aux_inputs.has_key(self.target_property): + elif self.target_property in temp.aux_inputs: + self.value_from_item = temp.aux_inputs[self.target_property] + return + else: + logging.debug( 'get_input function points to a non-existent input, autofill'+ self.target_property) + def_item = copy.deepcopy(self_item.definition) + def_item.name = self.target_property + temp.inputs[self.target_property] = PropertyItem(def_item) + self.value_from_item = temp.inputs[self.target_property] + return + elif self.type == GET_PROPERTY: + if self.extra_data is None or len(self.extra_data) < 2: + logging.warning('Error, get_property has not enough parameters '+ self.extra_data) + return +# if self.extra_data[0] != 'SELF' and temp.node_dict.has_key(self.extra_data[0]) is False: + if self.extra_data[0] != 'SELF' and self.extra_data[0] not in temp.node_dict: + logging.warning( 'Error, get_property from unrecognized node '+ self.extra_data[0]) + return + + if self.extra_data[0] == 'SELF': + node_item = self_node + else: + node_item = temp.node_dict[self.extra_data[0]] + self.value_from_node = node_item + + if len(self.extra_data) == 2: + self.value_from_item = node_item._get_property_item(self.extra_data[1]) + return + elif len(self.extra_data) == 3: + self.value_from_item = node_item._get_capability_property(self.extra_data[1], self.extra_data[2]) + if self.value_from_item is not None: + return + req_item = node_item._get_requirement_item_first(self.extra_data[1]) + if req_item is None: + return + new_node_item = req_item.value + if new_node_item is None: + self.value_from_node = None + return + self.value_from_node = new_node_item +# if req_item.cap_match.properties.has_key(self.extra_data[2]): + if self.extra_data[2] in req_item.cap_match.properties: + self.value_from_item = req_item.cap_match.properties[self.extra_data[2]] + else: + self.value_from_item = new_node_item._get_property_item(self.extra_data[2]) + + else: + logging.warning( 'Too many parameters for get_property function '+ self.extra_data) + elif self.type == GET_ATTRIBUTE: + if self.extra_data is None or len(self.extra_data) < 2: + logging.error( 'Error, get_attribute has not enough parameters '+ self.extra_data) + return +# if self.extra_data[0] != 'SELF' and temp.node_dict.has_key(self.extra_data[0]) is False: + if self.extra_data[0] != 'SELF' and self.extra_data[0] not in temp.node_dict: + logging.error( 'Error, get_attribute from unrecognized node '+ self.extra_data[0]) + return + + if self.extra_data[0] == 'SELF': + node_item = self_node + else: + node_item = temp.node_dict[self.extra_data[0]] + + self.value_from_node = node_item + + if len(self.extra_data) > 3: + logging.warning( 'Too many parameters for get_attribute function '+ self.extra_data) + return + if self.extra_data[1] == 'id': + self.value_from_item = node_item.id + else: + self.value_from_item = node_item._get_attribute_item(self.extra_data[1]) + + if self.value_from_item is not None: + return + req_item = node_item._get_requirement_item_first(self.extra_data[1]) + if req_item is None: + return + new_node_item = req_item.value + if new_node_item is None: + self.value_from_node = None + return + self.value_from_node = new_node_item + self.value_from_item = new_node_item._get_attribute_item(self.extra_data[2]) + return + + elif self.type == CONCAT: + for item in self.extra_data: + if item.function is not None: + item._update_function_reference(temp, self_node) + else: + logging.warning( 'Function '+ self.type+ ' is not supported') + return + + def _calculate_function_result(self, tags= '' ): + if 'func' in tags: + return self._get_function_representation(tags), FUNCTION + + if self.type == CONCAT: + function_ret = VALID_VALUE + function_str = "" + for item in self.extra_data: + item_str, item_value = item._get_value(tags) + if item_value is FUNCTION: + function_ret = FUNCTION + break + elif item_str is not None: + function_str = function_str + item_str + if function_ret == FUNCTION: + return self._get_function_representation(tags), FUNCTION + else: + return function_str, function_ret + + if 'w_default' in tags and self.type == GET_INPUT and self.value_from_item is not None and hasattr(self.value_from_item.definition, 'default') is True and self.value_from_item.definition.default is not None: + return self.value_from_item.definition.default, VALID_VALUE + + if self.value_from_item is None or self.value_from_item.value is None or self.value_from_item.value.function == self: + return self._get_function_representation(tags), FUNCTION + else: + return self.value_from_item.value._get_value(tags) + + def _get_value(self, tags = ''): + return self._calculate_function_result(tags) + + def _get_function_representation(self, tags=''): + if self.type == GET_INPUT: + out_str = {} + out_str[self.type]= self.target_property + elif self.type == GET_PROPERTY: + out_str = {} + if self.value_from_node is None or 'rawfunc' in tags: + out_val = copy.deepcopy(self.extra_data) + else: + out_val = [] + out_val.append(self.value_from_node.name) + out_val.append(self.extra_data[len(self.extra_data)-1]) + + out_str[self.type] = out_val + elif self.type == GET_ATTRIBUTE: + out_str = {} + if self.value_from_node is None or 'rawfunc' in tags: + out_val = copy.deepcopy(self.extra_data) + else: + out_val = [] + out_val.append(self.value_from_node.name) + out_val.append(self.extra_data[len(self.extra_data)-1]) + if self.extra_data[1] == 'id' and 'heat' in tags: + out_str['get_id'] = out_val[0] + else: + out_str[self.type] = out_val + elif self.type == CONCAT: + out_str = {} + out_list = [] + for item in self.extra_data: + item_str, item_value = item._get_value(tags) + out_list.append(item_str) + out_str[self.type] = out_list + else: + out_str = {} + out_str[self.type]= copy.deepcopy(self.extra_data) + return out_str + + def _get_function_result(self): + return self.result + +class Value(object): + def __init__(self, prop_type, value): + self.type = prop_type.name + self.type_obj = copy.deepcopy(prop_type) + self.raw_value = value + self.value = None + self.function = _is_function(value) + + if self.function is None: + self.value = self.type_obj._format_value(value) + + def _update_function_reference(self, temp, self_node = None, self_item = None): + if self.value is not None: + self.type_obj._update_function_reference(temp, self.value, self_node, self_item) + if self.function is not None: + self.function._update_function_reference(temp, self_node, self_item) + + def _update_prefix(self, prefix): + if self.value is not None: + self.type_obj._update_prefix(prefix, self.value) + if self.function is not None: + self.function._update_prefix(prefix) + + def _get_value(self, tags = ''): + if self.function is not None: + return self.function._get_value(tags) + if self.value is not None: + return self.type_obj._get_value(self.value, tags) + +
\ No newline at end of file diff --git a/app/toscalib/templates/value.pyc b/app/toscalib/templates/value.pyc Binary files differnew file mode 100644 index 0000000..00f27d7 --- /dev/null +++ b/app/toscalib/templates/value.pyc |