From 5699eb248346eb6dd59f42605aeb56af41b15cab Mon Sep 17 00:00:00 2001 From: Serban Jora Date: Tue, 29 Aug 2017 15:52:57 -0400 Subject: Add initial ATT tosca checker tool Addressed license headers and copyright owner issues Addressed project folder name Issue-ID: MODELING-7 Change-Id: I150784c5871bb6093ff0a6615639088bc2e0c496 Signed-off-by: Serban Jora --- .gitreview | 5 + javatoscachecker/LICENSE | 40 + javatoscachecker/checker/pom.xml | 201 ++ .../org/onap/tosca/checker/CSARRepository.java | 282 ++ .../java/org/onap/tosca/checker/Canonicals.java | 200 ++ .../main/java/org/onap/tosca/checker/Catalog.java | 459 +++ .../org/onap/tosca/checker/CatalogException.java | 29 + .../main/java/org/onap/tosca/checker/Checker.java | 3661 ++++++++++++++++++++ .../org/onap/tosca/checker/CheckerException.java | 30 + .../java/org/onap/tosca/checker/CommonLocator.java | 156 + .../java/org/onap/tosca/checker/Construct.java | 34 + .../src/main/java/org/onap/tosca/checker/Data.java | 923 +++++ .../main/java/org/onap/tosca/checker/Facet.java | 49 + .../main/java/org/onap/tosca/checker/Messages.java | 54 + .../main/java/org/onap/tosca/checker/Paths.java | 96 + .../main/java/org/onap/tosca/checker/Report.java | 115 + .../java/org/onap/tosca/checker/Repository.java | 62 + .../main/java/org/onap/tosca/checker/Stage.java | 33 + .../main/java/org/onap/tosca/checker/Target.java | 109 + .../java/org/onap/tosca/checker/TargetError.java | 55 + .../java/org/onap/tosca/checker/TargetLocator.java | 34 + .../java/org/onap/tosca/checker/Workflows.java | 287 ++ .../onap/tosca/checker/annotations/Catalogs.java | 49 + .../org/onap/tosca/checker/annotations/Checks.java | 42 + .../onap/tosca/checker/annotations/Validates.java | 41 + .../tosca/checker/annotations/package-info.java | 47 + .../org/onap/tosca/checker/model/Artifact.java | 32 + .../org/onap/tosca/checker/model/ArtifactType.java | 42 + .../onap/tosca/checker/model/ArtifactTypes.java | 18 + .../org/onap/tosca/checker/model/Artifacts.java | 18 + .../org/onap/tosca/checker/model/Attribute.java | 34 + .../org/onap/tosca/checker/model/Attributes.java | 18 + .../tosca/checker/model/AttributesAssignments.java | 21 + .../org/onap/tosca/checker/model/Capabilities.java | 22 + .../checker/model/CapabilitiesAssignments.java | 22 + .../org/onap/tosca/checker/model/Capability.java | 61 + .../tosca/checker/model/CapabilityAssignment.java | 39 + .../onap/tosca/checker/model/CapabilityType.java | 36 + .../onap/tosca/checker/model/CapabilityTypes.java | 18 + .../org/onap/tosca/checker/model/Constraint.java | 41 + .../org/onap/tosca/checker/model/Constraints.java | 19 + .../org/onap/tosca/checker/model/DataType.java | 40 + .../org/onap/tosca/checker/model/DataTypes.java | 18 + .../org/onap/tosca/checker/model/EntrySchema.java | 27 + .../java/org/onap/tosca/checker/model/Group.java | 42 + .../org/onap/tosca/checker/model/GroupType.java | 56 + .../org/onap/tosca/checker/model/GroupTypes.java | 18 + .../java/org/onap/tosca/checker/model/Groups.java | 18 + .../java/org/onap/tosca/checker/model/Import.java | 31 + .../java/org/onap/tosca/checker/model/Imports.java | 19 + .../java/org/onap/tosca/checker/model/Input.java | 43 + .../java/org/onap/tosca/checker/model/Inputs.java | 19 + .../onap/tosca/checker/model/InterfaceType.java | 58 + .../onap/tosca/checker/model/InterfaceTypes.java | 18 + .../org/onap/tosca/checker/model/Metadata.java | 20 + .../java/org/onap/tosca/checker/model/Models.java | 86 + .../org/onap/tosca/checker/model/NodeFilter.java | 28 + .../org/onap/tosca/checker/model/NodeTemplate.java | 60 + .../onap/tosca/checker/model/NodeTemplates.java | 18 + .../org/onap/tosca/checker/model/NodeType.java | 55 + .../org/onap/tosca/checker/model/NodeTypes.java | 18 + .../org/onap/tosca/checker/model/Operation.java | 32 + .../org/onap/tosca/checker/model/Operations.java | 18 + .../java/org/onap/tosca/checker/model/Outputs.java | 19 + .../org/onap/tosca/checker/model/Parameter.java | 45 + .../org/onap/tosca/checker/model/Policies.java | 18 + .../java/org/onap/tosca/checker/model/Policy.java | 42 + .../org/onap/tosca/checker/model/PolicyType.java | 89 + .../org/onap/tosca/checker/model/PolicyTypes.java | 18 + .../org/onap/tosca/checker/model/Properties.java | 18 + .../tosca/checker/model/PropertiesAssignments.java | 22 + .../org/onap/tosca/checker/model/Property.java | 41 + .../java/org/onap/tosca/checker/model/Range.java | 32 + .../tosca/checker/model/RelationshipTemplate.java | 45 + .../tosca/checker/model/RelationshipTemplates.java | 18 + .../onap/tosca/checker/model/RelationshipType.java | 48 + .../tosca/checker/model/RelationshipTypes.java | 18 + .../org/onap/tosca/checker/model/Repositories.java | 19 + .../org/onap/tosca/checker/model/Repository.java | 55 + .../org/onap/tosca/checker/model/Requirement.java | 50 + .../tosca/checker/model/RequirementAssignment.java | 55 + .../org/onap/tosca/checker/model/Requirements.java | 19 + .../checker/model/RequirementsAssignments.java | 19 + .../onap/tosca/checker/model/ServiceTemplate.java | 70 + .../java/org/onap/tosca/checker/model/Status.java | 23 + .../org/onap/tosca/checker/model/Substitution.java | 54 + .../org/onap/tosca/checker/model/TOSCAMap.java | 25 + .../org/onap/tosca/checker/model/TOSCAObject.java | 63 + .../org/onap/tosca/checker/model/TOSCAProxy.java | 179 + .../org/onap/tosca/checker/model/TOSCASeq.java | 24 + .../tosca/checker/model/TemplateInterface.java | 80 + .../tosca/checker/model/TemplateInterfaces.java | 19 + .../onap/tosca/checker/model/TopologyTemplate.java | 44 + .../onap/tosca/checker/model/TypeInterface.java | 47 + .../onap/tosca/checker/model/TypeInterfaces.java | 19 + .../org/onap/tosca/checker/model/package-info.java | 30 + .../java/org/onap/tosca/checker/package-info.java | 142 + .../org/onap/tosca/checker/processing/JSP.java | 659 ++++ .../org/onap/tosca/checker/processing/Process.java | 45 + .../tosca/checker/processing/ProcessBuilder.java | 38 + .../onap/tosca/checker/processing/Processor.java | 25 + .../checker/processing/ProcessorException.java | 43 + .../onap/tosca/checker/processing/TargetInfo.java | 32 + .../tosca/checker/processing/package-info.java | 23 + .../onap/tosca/checker/messages_en_US.properties | 18 + .../main/resources/tosca/tosca-common-types.yaml | 668 ++++ .../main/resources/tosca/tosca-examples-types.yaml | 117 + .../main/resources/tosca/tosca-network-types.yaml | 103 + .../src/main/resources/tosca/tosca-nfv-types.yaml | 143 + .../resources/tosca/tosca_simple_yaml_1_0.grammar | 1262 +++++++ .../resources/tosca/tosca_simple_yaml_1_1.grammar | 1646 +++++++++ .../java/org/onap/tosca/checker/test/CSAR.java | 35 + .../src/test/tosca/dcae_uservice_tosca.yaml | 653 ++++ .../checker/src/test/tosca/mini_ceil_tosca.yaml | 108 + .../checker/src/test/tosca/ucpe_schema.yml | 403 +++ .../checker/src/test/tosca/workflow_1.yaml | 46 + javatoscachecker/kwalify/LICENSE | 20 + javatoscachecker/kwalify/pom.xml | 104 + .../src/main/java/kwalify/BaseException.java | 51 + .../main/java/kwalify/CommandOptionException.java | 33 + .../kwalify/src/main/java/kwalify/Defaultable.java | 18 + .../src/main/java/kwalify/DefaultableHashMap.java | 48 + .../main/java/kwalify/InvalidPathException.java | 23 + .../main/java/kwalify/InvalidTypeException.java | 21 + .../src/main/java/kwalify/KwalifyException.java | 20 + .../main/java/kwalify/KwalifyRuntimeException.java | 19 + .../kwalify/src/main/java/kwalify/Main.java | 305 ++ .../kwalify/src/main/java/kwalify/Messages.java | 51 + .../src/main/java/kwalify/MetaValidator.java | 475 +++ .../kwalify/src/main/java/kwalify/Parser.java | 19 + .../src/main/java/kwalify/PlainYamlParser.java | 870 +++++ .../kwalify/src/main/java/kwalify/Rule.java | 673 ++++ .../src/main/java/kwalify/SchemaException.java | 22 + .../src/main/java/kwalify/SyntaxException.java | 28 + .../kwalify/src/main/java/kwalify/Types.java | 107 + .../kwalify/src/main/java/kwalify/Util.java | 646 ++++ .../src/main/java/kwalify/ValidationException.java | 26 + .../kwalify/src/main/java/kwalify/Validator.java | 415 +++ .../kwalify/src/main/java/kwalify/YamlParser.java | 156 + .../src/main/java/kwalify/YamlSyntaxException.java | 23 + .../kwalify/src/main/java/kwalify/YamlUtil.java | 62 + .../src/main/java/kwalify/messages.properties | 110 + .../src/main/resources/kwalify/messages.properties | 110 + javatoscachecker/pom.xml | 41 + javatoscachecker/service/README | 6 + javatoscachecker/service/application.properties | 8 + javatoscachecker/service/pom.xml | 148 + .../onap/tosca/checker/service/CachedTarget.java | 99 + .../org/onap/tosca/checker/service/Catalogs.java | 117 + .../tosca/checker/service/CheckerController.java | 260 ++ .../onap/tosca/checker/service/CheckerEngine.java | 89 + .../checker/service/NoSuchCatalogException.java | 24 + .../checker/service/TargetConflictException.java | 24 + .../tosca/checker/service/TemplateChecker.java | 121 + .../service/src/main/resources/checker.xml | 21 + 155 files changed, 21265 insertions(+) create mode 100644 .gitreview create mode 100644 javatoscachecker/LICENSE create mode 100644 javatoscachecker/checker/pom.xml create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CSARRepository.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Canonicals.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Catalog.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CatalogException.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Checker.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CheckerException.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CommonLocator.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Construct.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Data.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Facet.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Messages.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Paths.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Report.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Repository.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Stage.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Target.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetError.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetLocator.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Workflows.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Catalogs.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Checks.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Validates.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/package-info.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifact.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactType.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactTypes.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifacts.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attribute.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attributes.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/AttributesAssignments.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capabilities.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilitiesAssignments.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capability.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityAssignment.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityType.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityTypes.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraint.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraints.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataType.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataTypes.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/EntrySchema.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Group.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupType.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupTypes.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Groups.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Import.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Imports.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Input.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Inputs.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceType.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceTypes.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Metadata.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Models.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeFilter.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplate.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplates.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeType.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTypes.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operation.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operations.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Outputs.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Parameter.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policies.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policy.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyType.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyTypes.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Properties.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PropertiesAssignments.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Property.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Range.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplate.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplates.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipType.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTypes.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repositories.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repository.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirement.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementAssignment.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirements.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementsAssignments.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ServiceTemplate.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Status.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Substitution.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAMap.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAObject.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAProxy.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCASeq.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterface.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterfaces.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TopologyTemplate.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterface.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterfaces.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/package-info.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/package-info.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/JSP.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Process.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessBuilder.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Processor.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessorException.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/TargetInfo.java create mode 100644 javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/package-info.java create mode 100644 javatoscachecker/checker/src/main/resources/org/onap/tosca/checker/messages_en_US.properties create mode 100644 javatoscachecker/checker/src/main/resources/tosca/tosca-common-types.yaml create mode 100644 javatoscachecker/checker/src/main/resources/tosca/tosca-examples-types.yaml create mode 100644 javatoscachecker/checker/src/main/resources/tosca/tosca-network-types.yaml create mode 100644 javatoscachecker/checker/src/main/resources/tosca/tosca-nfv-types.yaml create mode 100644 javatoscachecker/checker/src/main/resources/tosca/tosca_simple_yaml_1_0.grammar create mode 100644 javatoscachecker/checker/src/main/resources/tosca/tosca_simple_yaml_1_1.grammar create mode 100644 javatoscachecker/checker/src/test/java/org/onap/tosca/checker/test/CSAR.java create mode 100644 javatoscachecker/checker/src/test/tosca/dcae_uservice_tosca.yaml create mode 100644 javatoscachecker/checker/src/test/tosca/mini_ceil_tosca.yaml create mode 100644 javatoscachecker/checker/src/test/tosca/ucpe_schema.yml create mode 100644 javatoscachecker/checker/src/test/tosca/workflow_1.yaml create mode 100644 javatoscachecker/kwalify/LICENSE create mode 100644 javatoscachecker/kwalify/pom.xml create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/BaseException.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/CommandOptionException.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/Defaultable.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/DefaultableHashMap.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/InvalidPathException.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/InvalidTypeException.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/KwalifyException.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/KwalifyRuntimeException.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/Main.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/Messages.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/MetaValidator.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/Parser.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/PlainYamlParser.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/Rule.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/SchemaException.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/SyntaxException.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/Types.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/Util.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/ValidationException.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/Validator.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/YamlParser.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/YamlSyntaxException.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/YamlUtil.java create mode 100644 javatoscachecker/kwalify/src/main/java/kwalify/messages.properties create mode 100644 javatoscachecker/kwalify/src/main/resources/kwalify/messages.properties create mode 100644 javatoscachecker/pom.xml create mode 100644 javatoscachecker/service/README create mode 100644 javatoscachecker/service/application.properties create mode 100644 javatoscachecker/service/pom.xml create mode 100644 javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/CachedTarget.java create mode 100644 javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/Catalogs.java create mode 100644 javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/CheckerController.java create mode 100644 javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/CheckerEngine.java create mode 100644 javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/NoSuchCatalogException.java create mode 100644 javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/TargetConflictException.java create mode 100644 javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/TemplateChecker.java create mode 100644 javatoscachecker/service/src/main/resources/checker.xml diff --git a/.gitreview b/.gitreview new file mode 100644 index 0000000..25242d0 --- /dev/null +++ b/.gitreview @@ -0,0 +1,5 @@ +[gerrit] +host=gerrit.onap.org +port=29418 +project=modeling/toscaparsers.git + diff --git a/javatoscachecker/LICENSE b/javatoscachecker/LICENSE new file mode 100644 index 0000000..7cd961e --- /dev/null +++ b/javatoscachecker/LICENSE @@ -0,0 +1,40 @@ +org.onap.tosca checker project is offered under Apache 2.0 license + +Copyright [2017] [onap.org] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +org.onap.tosca checker project includes code from kuwata-lab (kwalify), which +is licensed under the MIT license, as detailed below. + +copyright(c) 2005 kuwata-lab all rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/javatoscachecker/checker/pom.xml b/javatoscachecker/checker/pom.xml new file mode 100644 index 0000000..a408bc6 --- /dev/null +++ b/javatoscachecker/checker/pom.xml @@ -0,0 +1,201 @@ + + 4.0.0 + + org.onap.tosca + checker + 0.0.1-SNAPSHOT + + Checker + jar + Checker + + + src/main/java + + + maven-compiler-plugin + 3.1 + + 1.8 + 1.8 + ${project.build.sourceEncoding} + + + + org.apache.maven.plugins + maven-dependency-plugin + 2.10 + + + copy-dependencies + package + + copy-dependencies + + + ${project.build.directory}/deps + false + false + true + + + + + + org.codehaus.mojo + buildnumber-maven-plugin + 1.4 + + + validate + + create + + + + + false + false + + + + org.apache.maven.plugins + maven-jar-plugin + 2.1 + + + + true + + + ${buildNumber} + + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.6 + + + jar-with-dependencies + + + + org.onap.tosca.checker.Checker + + + ${buildNumber} + + + + + + make-assembly + package + + single + + + + + + + org.apache.maven.plugins + maven-jar-plugin + 3.0.2 + + + + test-jar + + + + + + org.apache.maven.plugins + maven-javadoc-plugin + 3.0.0-M1 + + API for ${project.name} ${project.version} + API for ${project.name} ${project.version} + + + + + com.blackducksoftware.integration + hub-maven-plugin + 2.0.0 + false + + ${project.name} + ${project.basedir} + false + + + + create-bdio-file + package + + build-bom + + + + + + + + + commons-io + commons-io + [2.5,) + + + commons-cli + commons-cli + [1.3,) + + + commons-jxpath + commons-jxpath + [1.3,) + + + commons-lang + commons-lang + [2.6,) + + + com.google.guava + guava + [21.0,) + + + org.yaml + snakeyaml + [1.17,) + + + com.fasterxml.jackson.core + jackson-core + [2.7.5,) + + + com.fasterxml.jackson.core + jackson-databind + [2.7.5,) + + + org.reflections + reflections + [0.9.11,) + + + org.onap.tosca + kwalify + 0.0.1-SNAPSHOT + + + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CSARRepository.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CSARRepository.java new file mode 100644 index 0000000..6375185 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CSARRepository.java @@ -0,0 +1,282 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.io.Reader; +import java.io.StringReader; +import java.io.InputStream; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.InputStreamReader; +import java.io.BufferedReader; + +import java.net.URI; +import java.net.URL; +import java.net.MalformedURLException; + +import java.util.logging.Level; +import java.util.logging.Logger; + +import java.util.Map; +import java.util.HashMap; +import java.util.Properties; +import java.util.Collections; + +import java.util.zip.ZipInputStream; +import java.util.zip.ZipEntry; + +import java.util.function.BiFunction; + +import org.apache.commons.io.IOUtils; + +/** + * Handles targets located within the same CSAR file. + * This is where the knowledge about the structure of a CSAR file should be placed. + * TOSCA 1.0 CSAR archive compliant. + */ +public class CSARRepository extends Repository { + + + private String metaEntryName = "TOSCA-Metadata/TOSCA.meta"; + + private byte[] data; + private Properties meta = new Properties(); + private Map entries = null; + + public CSARRepository(String theName, URI theRoot) throws IOException { + super(theName, theRoot); + load(); + } + + private void load() throws IOException { + InputStream is = null; + try { + is = this.getRoot().toURL().openStream(); + this.data = IOUtils.toByteArray(is); + } + finally { + if (is != null) { + try { + is.close(); + } + catch(IOException iox) {} + } + } + } + + //one should read the meta-inf/MANIFEST.MF file before deciding that a file is text + private Object processData(BiFunction theProcessor) { + + ZipInputStream archiveInputStream = new ZipInputStream(new ByteArrayInputStream(this.data)); + Object result = null; + try { + ZipEntry archiveEntry = null; + while ((archiveEntry = archiveInputStream.getNextEntry()) != null) { + result = theProcessor.apply(archiveEntry, archiveInputStream); + if (null != result) + return result; + archiveInputStream.closeEntry(); + } + } + catch (IOException iox) { + log.log(Level.WARNING, "Failed to read archive", iox); + } + finally { + try { + archiveInputStream.close(); + } + catch (IOException iox) { + } + } + return result; + } + + /* this will be useful when processing new style meta information .. */ + private Object processMeta() { + return + processData( (entry,stream) -> { + if (!entry.getName().equals(this.metaEntryName)) + return null; + + return readMeta(stream); + }); + } + + private Boolean readMeta(InputStream theStream) { + BufferedReader reader = null; + try { + reader = new BufferedReader(new InputStreamReader(theStream, "UTF-8")); + this.meta.load(reader); + return Boolean.TRUE; + } + catch(IOException iox) { + log.log(Level.WARNING, "Failed to read archive meta entry", iox); + return Boolean.FALSE; + } + finally { + /* + if (reader != null) { + try { + reader.close(); + } + catch (IOException iox) { + } + } + */ + //!!Do not close as it is used with processData which does the entry close itself + } + } + + /* + private Boolean readMeta() { + BufferedReader reader = null; + try { + reader = new BufferedReader(new InputStreamReader(stream, "UTF-8")); + String line = null; + while ((line = reader.readLine()) != null) { + //TODO: + } + return Boolean.TRUE; + } + catch (IOException iox) { + log.log(Level.WARNING, "Failed to read archive meta entry", iox); + return Boolean.FALSE; + } + finally { + if (reader != null) { + try { + reader.close(); + } + catch (IOException iox) { + } + } + //!!Do not close as it is used with processData which does the entry close itself + } + } + */ + + private Map entries() { + if (this.entries == null) { + this.entries = new HashMap(); + processData( (entry,stream) -> { + URI entryURI = this.rootURI.resolve(entry.getName()); + this.entries.put(entryURI, new CsarTarget(entry.getName(), entryURI)); + + if (entry.getName().equals(this.metaEntryName)) + readMeta(stream); + + return null; + }); + } + return this.entries; + } + + public Target mainTarget() { + return entries().get(rootURI.resolve(this.meta.getProperty("Entry-Definitions"))); + } + + public Iterable targets() { + return entries().values(); + } + + /** */ + public Target resolve(URI theURI) { + return entries().get(theURI); + } + + public TargetLocator getTargetLocator() { + return new CSARTargetLocator(); + } + + /** + */ + private class CSARTargetLocator implements TargetLocator { + + /** */ + public boolean addSearchPath(URI theURI) { + return false; + } + + /** */ + public boolean addSearchPath(String thePath) { + return false; + } + + /** */ + public Iterable searchPaths() { + return Collections.singleton(CSARRepository.this.rootURI); + } + + /** */ + public Target resolve(String theName) { + Target t = entries().get(CSARRepository.this.rootURI.resolve(theName)); + if (t == null) { + //fallback: check relative to the main target folder + t = entries().get(CSARRepository.this.mainTarget().getLocation().resolve(theName)); + } + return t; + } + } + + /** */ + private class CsarTarget extends Target { + + private String content; + + private CsarTarget(String theName, URI theUri) { + super(theName, theUri); + } + + private String content() { + if (this.content == null) { + this.content = (String)processData( (entry,stream) -> { + //go over the entries and find the one with a matching name + ByteArrayOutputStream out = null; + if (entry.getName().equals(getName())) { + byte[] buffer = new byte[4096]; + int len = 0; + out = new ByteArrayOutputStream(); + try { + while ((len = stream.read(buffer)) > 0) { + out.write(buffer, 0, len); + } + log.info(entry.getName() + ": " + out.toString("UTF-8")); + } + catch (IOException iox) { + log.warning("Failed to read entry data: " + iox); + return out = null; + } + } + //!!Do not close as it is used with processData which does the entry close itself + + try { + return (out != null) ? out.toString("UTF-8") : null; + } + catch (UnsupportedEncodingException uex) { + log.warning("Failed to process entry data as string: " + uex); + return ""; + } + }); + } + return this.content; + } + + public Reader open() throws IOException { + return new StringReader(content()); + } + } + +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Canonicals.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Canonicals.java new file mode 100644 index 0000000..3c12844 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Canonicals.java @@ -0,0 +1,200 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + +import java.util.List; +import java.util.LinkedList; +import java.util.Map; +import java.util.HashMap; +import java.util.TreeMap; + +import java.util.logging.Level; +import java.util.logging.Logger; + +import java.util.regex.Pattern; +import java.util.regex.Matcher; + +import kwalify.Validator; +import kwalify.Rule; +import kwalify.Types; + +import com.google.common.collect.Table; +import com.google.common.collect.HashBasedTable; + +import org.apache.commons.jxpath.JXPathContext; +import org.apache.commons.jxpath.JXPathException; + +import org.onap.tosca.checker.annotations.Validates; + +/** + * Constructs and collects the canonical form during the validation step (syntax check) based on the short form + * indicator from the grammar specification. + * The TOSCA spec indicates a 'short' form for most of the constructs but we want checking to be able to work on + * the canonical form and not to have to handle the short form explicitly. Additionally tis makes for a simpler + * grammar specification. + */ +public class Canonicals { + + private Logger log = Logger.getLogger("com.att.research.asc.chcker.Canonicals"); + + private Table canonicals = null; //HashBasedTable.create(); + + @Validates(rule="", timing=Validates.Timing.pre) + public boolean pre_validate_short_form( + Object theValue, Rule theRule, Validator.ValidationContext theContext) { + + String hint = theRule.getShort(); + if (theValue != null && + hint != null) { + + log.finer("Attempting canonical at " + theContext.getPath() + ", rule " + theRule.getName()); + + Object canonical = null; + //if the canonical form requires a collection + if (Types.isCollectionType(theRule.getType())) { + //and the actual value isn't one + if( !(theValue instanceof Map || theValue instanceof List)) { + //used to use singleton map/list here (was good for catching errors) + //but there is the possibility of short forms within short forms so + //the created canonicals need to accomodate other values. + if (Types.isMapType(theRule.getType())) { + canonical = new HashMap(); + ((Map)canonical).put(hint, theValue); + } + else { + //the hint is irrelevant here but we should impose a value when the target is a list + canonical = new LinkedList(); + ((List)canonical).add(theValue); + } + } + else { + //we can accomodate: + // map to list of map transformation + if (!Types.isMapType(theRule.getType()) /* a seq */ && + theValue instanceof Map) { + canonical = new LinkedList(); + ((List)canonical).add(theValue); + } + else { + log.fine("Grammar for rule " + theRule.getName() + " (at " + theContext.getPath() + ") would require unsupported short form transformation: " + theValue.getClass() + " to " + theRule.getType()); + return false; + } + } + + int errc = theContext.errorCount(); + //validateRule(canonical, rule, context); + if (errc != theContext.errorCount()) { + //this would indicate that the grammar is not well specified + log.warning("Short notation for " + theRule.getName() + " through " + hint + " at " + theContext.getPath() + " failed validation"); + } + else { + log.finer("Short notation for " + theRule.getName() + " through " + hint + " at " + theContext.getPath() + " passed validation. Canonical form is " + canonical); + + if (this.canonicals != null) + this.canonicals.put( + ((Checker.TOSCAValidator)theContext.getValidator()).getTarget(), + theContext.getPath(), canonical); + else { + applyCanonical( + ((Checker.TOSCAValidator)theContext.getValidator()).getTarget().getTarget(), + theContext.getPath(), canonical); + } + return true; + } + } + else { + log.info("Grammar for rule " + theRule.getName() + " (at " + theContext.getPath() + ") would require unsupported short form transformation: " + theValue.getClass() + " to " + theRule.getType()); + return false; + } + } + return false; + } + + + protected Object applyCanonical(Object theTarget, + String thePath, + Object theCanonical) { + + //Object canonical = theCanonicals.get(thePath); + //if (canonical != null) { + String path = patchWhitespaces( + patchIndexes(thePath)); + try { + JXPathContext.newContext(theTarget).setValue(path, theCanonical); + log.finer("Applied canonical form at: " + path); + + // if (doRemove) + // theCanonicals.remove(thePath); + } + catch (JXPathException jxpx) { + log.log(Level.WARNING, "Failed to apply canonical to " + theTarget, jxpx); + } + //} + return theCanonical; + } + + public void applyCanonicals(Target theTarget) { + + if (this.canonicals == null) { + return; + } + + Map targetCanonicals = this.canonicals.row(theTarget); + if (targetCanonicals != null) { + log.finest("applying canonicals: " + targetCanonicals); + for(Map.Entry canonicalEntry: targetCanonicals.entrySet()) { + applyCanonical(theTarget.getTarget(), canonicalEntry.getKey(), canonicalEntry.getValue()); + } + } + } + + /** Given that we remembered the canonical forms that were needed during + * validation to replace the short forms we can apply them to the target + * yaml. + * We take advantage here of the fact that the context path maintained + * during validation is compatible with (j)xpath, with the exception of + * sequence/array indentation .. + */ + + private Pattern indexPattern = Pattern.compile("/\\p{Digit}+"), + spacePattern = Pattern.compile("\\s"); + + private String patchIndexes(CharSequence thePath) { + Matcher m = indexPattern.matcher(thePath); + StringBuffer path = new StringBuffer(); + while (m.find()) { + String index = m.group(); + index = "[" + (Integer.valueOf(index.substring(1)).intValue() + 1) + "]"; + m.appendReplacement(path, m.quoteReplacement(index)); + } + m.appendTail(path); + return path.toString(); + } + + + private String patchWhitespaces(String thePath) { + String[] elems = thePath.split("/"); + StringBuffer path = new StringBuffer(); + for (int i = 0; i < elems.length; i++) { + if (spacePattern.matcher(elems[i]).find()) + path.append("[@name='") + .append(elems[i]) + .append("']"); + else + path.append("/") + .append(elems[i]); + } + return path.toString(); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Catalog.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Catalog.java new file mode 100644 index 0000000..35d0d1c --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Catalog.java @@ -0,0 +1,459 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + +import java.util.Iterator; +import java.util.Collection; +import java.util.Comparator; +import java.util.Set; +import java.util.Map; +import java.util.List; +import java.util.EnumMap; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.ArrayList; +import java.util.Collections; + +import java.util.stream.Collectors; + +import java.net.URI; + +import com.google.common.base.Predicate; +import com.google.common.base.Function; +import com.google.common.collect.Iterators; +import com.google.common.collect.Table; +import com.google.common.collect.HashBasedTable; + +/* + * Oddball: tracking inputs as data templates could be seen as rather + * odd but we see them as instances of data types, in the same way node + * templates are instances of node types. + */ +public class Catalog { + + /* Type hierarchies are stored as maps from a type name to its definition + * Not the best but easy to follow hierarchies towards their root .. + */ + private EnumMap> types = + new EnumMap>(Construct.class); + /* track templates: we track templates (tye instances) first per target then per contruct. + * This allows us to share the catalog among multiple templates sharign the same type set + */ + private Map>> templates = + new HashMap>>(); + + private Catalog parent; + + public Catalog(Catalog theParent) { + this.parent = theParent; + /* there are no requirement types, they are the same as capability types */ + types.put(Construct.Data, new LinkedHashMap()); + types.put(Construct.Capability, new LinkedHashMap()); + types.put(Construct.Relationship, new LinkedHashMap()); + types.put(Construct.Artifact, new LinkedHashMap()); + types.put(Construct.Interface, new LinkedHashMap()); + types.put(Construct.Node, new LinkedHashMap()); + types.put(Construct.Group, new LinkedHashMap()); + types.put(Construct.Policy, new LinkedHashMap()); + + } + + public Catalog() { + this(null); + } + + public boolean addType(Construct theConstruct, String theName, Map theDef) { + if (hasType(theConstruct, theName)) { + return false; + } + getConstructTypes(theConstruct).put(theName, theDef); + return true; + } + + public Map getTypeDefinition(Construct theConstruct, String theName) { + Map constructTypes = getConstructTypes(theConstruct); + Map typeDef = constructTypes.get(theName); + if (typeDef == null && this.parent != null) { + return this.parent.getTypeDefinition(theConstruct, theName); + } + return typeDef; + } + + public boolean hasType(Construct theConstruct, String theName) { + Map constructTypes = getConstructTypes(theConstruct); + boolean res = constructTypes.containsKey(theName); + if (!res && this.parent != null) { + res = this.parent.hasType(theConstruct, theName); + } + return res; + } + + public Map getConstructTypes(Construct theConstruct) { + Map constructTypes = this.types.get(theConstruct); + if (null == constructTypes) { + throw new RuntimeException("Something worse is cooking here!", + new CatalogException("No types for construct " + theConstruct)); + } + return constructTypes; + } + + protected Iterator> + typesIterator(Construct theConstruct) { + List> constructTypes = + new ArrayList>( + this.types.get(theConstruct).entrySet()); + Collections.reverse(constructTypes); + return (this.parent == null) + ? constructTypes.iterator() + : Iterators.concat(constructTypes.iterator(), + this.parent.typesIterator(theConstruct)); + } + + /* this will iterate through the type hierarchy for the given type, included. + */ + public Iterator> + hierarchy(Construct theConstruct, final String theName) { + return Iterators.filter(typesIterator(theConstruct), + new Predicate>() { + Object next = theName; + public boolean apply(Map.Entry theEntry) { + if (next != null && next.equals(theEntry.getKey())) { + next = theEntry.getValue().get("derived_from"); + return true; + } + else + return false; + } + }); + } + + public boolean isDerivedFrom(Construct theConstruct, String theType, String theBaseType) { + + Iterator> hierachyIterator = + hierarchy(theConstruct, theType); + while (hierachyIterator.hasNext()) { + Map.Entry typeDef = hierachyIterator.next(); + + if (typeDef.getKey().equals(theBaseType)) { + return true; + } + } + return false; + } + + /* We go over the type hierarchy and retain only an iterator over the + * elements of the given facet for each type in the hierarchy. + * We concatenate these iterators and filter out duplicates. + * TODO: cannot just filter out duplicates - a redefinition can refine the one in the base construct so we + * should merge them! + */ + public Iterator facets(Construct theConstruct, + final Facet theFacet, + final String theName) { + return + Iterators.filter( + Iterators.concat( + Iterators.transform( + hierarchy(theConstruct, theName), + new Function, Iterator>() { + public Iterator apply(Map.Entry theEntry) { + Map m = (Map)theEntry.getValue().get(theFacet.name()); + return m == null + ? Collections.emptyIterator() + : m.entrySet().iterator(); + } + } + ) + ), + new Predicate() { + Set insts = new HashSet(); + public boolean apply(Map.Entry theEntry) { + return !insts.contains(theEntry.getKey()); + } + } + ); + } + + //no need to specify a construct, only nodes can have requirements + public Iterator requirements(final String theName) { + return + Iterators.concat( + Iterators.transform( + hierarchy(Construct.Node, theName), + new Function, Iterator>() { + public Iterator apply(Map.Entry theEntry) { + List l = (List)theEntry.getValue().get("requirements"); + return l == null + ? Collections.emptyIterator() + : Iterators.concat( + Iterators.transform( + l.iterator(), + new Function> () { + public Iterator apply(Map theEntry) { + return theEntry.entrySet().iterator(); + } + } + ) + ); + } + } + ) + ); + } + + /* Example: find the definition of property 'port' of the node type + * tosca.nodes.Database (properties being a facet of the node construct) + * + * Note: the definition of a facet is cumulative, i.e. more specialized + * definitions contribute (by overwriting) to the + */ + public Map getFacetDefinition(Construct theConstruct, + String theConstructTypeName, + Facet theFacet, + String theName) { + Map def = null; + Iterator> ti = hierarchy(theConstruct, theConstructTypeName); + while (ti.hasNext()) { + //this is where requirements would yield a List .. + Map fset = (Map) + //theFacet.iterator( + ti.next().getValue().get(theFacet.name()); + if (fset != null) { + def = def == null ? fset.get(theName) + : mergeDefinitions(def, fset.get(theName)); + } + } + return def; + } + + public Map getRequirementDefinition(Construct theConstruct, + String theConstructTypeName, + String theName) { + Iterator> ti = hierarchy(theConstruct, theConstructTypeName); + while (ti.hasNext()) { + //this is where requirements yield a List .. + List reqs = (List) + ti.next().getValue().get("requirements"); + for (Map req: reqs) { + Map.Entry reqe = (Map.Entry)req.entrySet().iterator().next(); + if (theName.equals(reqe.getKey())) { + return (Map)reqe.getValue(); + } + } + } + return null; + } + + /* */ + private EnumMap> getTemplates(Target theTarget) { + EnumMap> targetTemplates = templates.get(theTarget); + if (targetTemplates == null) { + targetTemplates = new EnumMap>(Construct.class); + targetTemplates.put(Construct.Data, new LinkedHashMap()); + targetTemplates.put(Construct.Relationship, new LinkedHashMap()); + targetTemplates.put(Construct.Node, new LinkedHashMap()); + targetTemplates.put(Construct.Group, new LinkedHashMap()); + targetTemplates.put(Construct.Policy, new LinkedHashMap()); + + templates.put(theTarget, targetTemplates); + } + return targetTemplates; + } + + public Map getTargetTemplates(Target theTarget, Construct theConstruct) { + return getTemplates(theTarget).get(theConstruct); + } + + public void addTemplate(Target theTarget, Construct theConstruct, String theName, Map theDef) + throws CatalogException { + Map constructTemplates = getTargetTemplates(theTarget, theConstruct); + if (null == constructTemplates) { + throw new CatalogException("No such thing as " + theConstruct + " templates"); + } + if (constructTemplates.containsKey(theName)) { + throw new CatalogException(theConstruct + " template '" + theName + "' re-declaration"); + } + constructTemplates.put(theName, theDef); + } + + public boolean hasTemplate(Target theTarget, Construct theConstruct, String theName) { + Map constructTemplates = getTargetTemplates(theTarget, theConstruct); + return constructTemplates != null && + constructTemplates.containsKey(theName); + } + + public Map getTemplate(Target theTarget, Construct theConstruct, String theName) { + Map constructTemplates = getTargetTemplates(theTarget, theConstruct); + if (constructTemplates != null) + return constructTemplates.get(theName); + else + return null; + } + + public static Map mergeDefinitions(Map theAggregate, Map theIncrement) { + if (theIncrement == null) + return theAggregate; + + for(Map.Entry e: (Set)theIncrement.entrySet()) { + theAggregate.putIfAbsent(e.getKey(), e.getValue()); + } + return theAggregate; + } + + /* tracks imports, i.e.targets */ + private LinkedHashMap targets = + new LinkedHashMap(); + /* tracks dependencies between targets, i.e. the 'adjency' matrix defined by + * the 'import' relationship */ + private Table imports = HashBasedTable.create(); + + + /* + * theParent contains an 'include/import' statement pointing to the Target + */ + public boolean addTarget(Target theTarget, Target theParent) { + boolean cataloged = hasTarget(theTarget.getLocation()); + + if(!cataloged) { + targets.put(theTarget.getLocation(), theTarget); + } + + if (theParent != null) { + imports.put(theParent, theTarget, Boolean.TRUE); + } + + return !cataloged; + } + + public boolean hasTarget(URI theLocation) { + return this.targets.containsKey(theLocation) ? true + : this.parent != null ? this.parent.hasTarget(theLocation) + : false; + } + + public Target getTarget(URI theLocation) { + Target t = this.targets.get(theLocation); + if (t == null && this.parent != null) { + t = this.parent.getTarget(theLocation); + } + return t; + } + + public Collection targets() { + return targets.values(); + } + + /* Targets that no other targets depend on */ + public Collection topTargets() { + return targets.values() + .stream() + .filter(t -> !imports.containsColumn(t)) + .collect(Collectors.toList()); + + } + + public String importString(Target theTarget) { + return importString(theTarget, " "); + } + + private String importString(Target theTarget, String thePrefix) { + StringBuilder sb = new StringBuilder(""); + Map parents = imports.column(theTarget); + if (parents != null) { + for (Target p: parents.keySet()) { + sb.append(thePrefix) + .append("from ") + .append(p.getLocation()) + .append("\n") + .append(importString(p, thePrefix + " ")); + } + //we only keep the positive relationships + } + return sb.toString(); + } + + /* */ + private class TargetComparator implements Comparator { + + /* @return 1 if there is a dependency path from TargetOne to TargetTwo, -1 otherwise */ + public int compare(Target theTargetOne, Target theTargetTwo) { + if (hasPath(theTargetTwo, theTargetOne)) + return -1; + + if (hasPath(theTargetOne, theTargetTwo)) + return 1; + + return 0; + } + + public boolean hasPath(Target theStart, Target theEnd) { + Map deps = imports.row(theStart); + if (deps.containsKey(theEnd)) + return true; + for (Target dep: deps.keySet()) { + if (hasPath(dep, theEnd)) + return true; + } + return false; + } + } + + public Collection sortedTargets() { + List keys = new ArrayList(this.targets.values()); + Collections.sort(keys, new TargetComparator()); + return keys; + } + + public static void main(String[] theArgs) throws Exception { + + Catalog cat = new Catalog(); + + Target a = new Target("a", new URI("a")), + b = new Target("b", new URI("b")), + c = new Target("c", new URI("c")), + d = new Target("d", new URI("d")); + + cat.addTarget(a, null); + cat.addTarget(b, null); + cat.addTarget(c, null); + cat.addTarget(d, null); + + cat.addTarget(b, c); + cat.addTarget(a, c); + cat.addTarget(c, d); + cat.addTarget(a, b); + + //System.out.println(cat.importString(c)); + for (Target t: cat.sortedTargets()) + System.out.println(t); + + Catalog root = new Catalog(); + root.addType(Construct.Node, "_a", Collections.emptyMap()); + root.addType(Construct.Node, "__a", Collections.singletonMap("derived_from", "_a")); + root.addType(Construct.Node, "___a", Collections.singletonMap("derived_from", "_a")); + + Catalog base = new Catalog(root); + base.addType(Construct.Node, "_b", Collections.singletonMap("derived_from", "__a")); + base.addType(Construct.Node, "__b", Collections.singletonMap("derived_from", "_b")); + base.addType(Construct.Node, "__b_", Collections.singletonMap("derived_from", "_a")); + + if (theArgs.length > 0) { + Iterator> ti = + base.hierarchy(Construct.Node, theArgs[0]); + while (ti.hasNext()) { + System.out.println("> " + ti.next().getKey()); + } + } + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CatalogException.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CatalogException.java new file mode 100644 index 0000000..1f6b602 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CatalogException.java @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + + +/** + * + */ +public class CatalogException extends Exception { + + public CatalogException(String theMsg, Throwable theCause) { + super(theMsg, theCause); + } + + public CatalogException(String theMsg) { + super(theMsg); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Checker.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Checker.java new file mode 100644 index 0000000..9991c86 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Checker.java @@ -0,0 +1,3661 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + +import java.lang.reflect.Method; +import java.lang.reflect.InvocationTargetException; + +import java.io.File; +import java.io.InputStream; +import java.io.FileInputStream; +import java.io.Reader; +import java.io.FileReader; +import java.io.InputStreamReader; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.FileNotFoundException; + +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.net.MalformedURLException; + +import java.util.HashMap; +import java.util.TreeMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.ListIterator; +import java.util.Map; +import java.util.EnumMap; +import java.util.List; +import java.util.LinkedList; +import java.util.ArrayList; +import java.util.Set; +import java.util.Properties; +import java.util.Collection; +import java.util.Collections; +import java.util.Arrays; +import java.util.MissingResourceException; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.regex.Pattern; +import java.util.regex.Matcher; +import java.util.stream.Collectors; + +import javax.naming.CompositeName; +import javax.naming.InvalidNameException; + +import org.yaml.snakeyaml.Yaml; + +import com.google.common.collect.Maps; +import com.google.common.collect.MapDifference; +import com.google.common.reflect.Invokable; + +import com.google.common.io.CharStreams; + +import com.google.common.collect.Table; +import com.google.common.collect.HashBasedTable; + +import kwalify.YamlParser; +import kwalify.Validator; +import kwalify.Rule; +import kwalify.Types; +import kwalify.SchemaException; +import kwalify.SyntaxException; +import kwalify.ValidationException; + +import org.apache.commons.jxpath.JXPathContext; +import org.apache.commons.jxpath.JXPathException; + +import org.apache.commons.lang.reflect.ConstructorUtils; + +import org.reflections.Reflections; +import org.reflections.util.FilterBuilder; +import org.reflections.util.ConfigurationBuilder; +import org.reflections.scanners.TypeAnnotationsScanner; +import org.reflections.scanners.SubTypesScanner; +import org.reflections.scanners.MethodAnnotationsScanner; +import org.reflections.adapters.JavaReflectionAdapter; + +import org.onap.tosca.checker.annotations.Checks; +import org.onap.tosca.checker.annotations.Catalogs; +import org.onap.tosca.checker.annotations.Validates; + +import static org.onap.tosca.checker.Messages.Message; + + +/* + * To consider: model consistency checking happens now along with validation + * (is implemented as part of the validation hooks). It might be better to + * separate the 2 stages and perform all the consistency checking once + * validation is completed. + */ +public class Checker { + + + public static void main(String[] theArgs) { + + if (theArgs.length == 0) { + System.err.println("checker resource_to_validate [processor]*"); + return; + } + + try { + Catalog cat = Checker.check(new File(theArgs[0])); + + for (Target t: cat.targets()) { + System.err.println(t.getLocation() + "\n" + cat.importString(t) + "\n" + t.getReport()); + } + + for (Target t: cat.sortedTargets()) { + System.out.println(t); + } + + } + catch (Exception x) { + x.printStackTrace(); + } + } + + + private Target target = null; //what we're validating at the moment + private Map grammars = new HashMap(); //grammars for the different tosca versions + + private CheckerConfiguration config = new CheckerConfiguration(); + private Catalog catalog; + private TargetLocator locator = new CommonLocator(); + + private Table handlers = HashBasedTable.create(); + private Messages messages; + private Logger log = Logger.getLogger(Checker.class.getName()); + + private static String[] EMPTY_STRING_ARRAY = new String[0]; + + + public Checker() throws CheckerException { + loadGrammars(); + loadAnnotations(); + messages = new Messages(); + } + + /* Need a proper way to indicate where the grammars are and how they should be identified + */ + private final String[] grammarFiles = new String[] {"tosca/tosca_simple_yaml_1_0.grammar", + "tosca/tosca_simple_yaml_1_1.grammar"}; + private void loadGrammars() throws CheckerException { + + for (String grammarFile: grammarFiles) { + Target grammarTarget = this.locator.resolve(grammarFile); + if (grammarTarget == null) { + log.warning("Failed to locate grammar " + grammarFile); + continue; + } + + parseTarget(grammarTarget); + if (grammarTarget.getReport().hasErrors()) { + log.warning("Invalid grammar " + grammarFile + ": " + grammarTarget.getReport().toString()); + continue; + } + + List versions = null; + try { + versions = (List) + ((Map) + ((Map) + ((Map)grammarTarget.getTarget()) + .get("mapping")) + .get("tosca_definitions_version")) + .get("enum"); + } + catch (Exception x) { + log.warning("Invalid grammar " + grammarFile + ": cannot locate tosca_definitions_versions"); + } + if (versions == null || versions.isEmpty()) { + log.warning("Invalid grammar " + grammarFile + ": no tosca_definitions_versions specified"); + continue; + } + + for (Object version: versions) { + this.grammars.put(version.toString(), grammarTarget); + } + } + + log.finer("Loaded grammars: " + this.grammars); + } + + private void loadAnnotations() throws CheckerException { + + Reflections reflections = new Reflections( + new ConfigurationBuilder() + .forPackages("org.onap.tosca") + .filterInputsBy(new FilterBuilder() + .include(".*\\.class") + ) + .setScanners(new TypeAnnotationsScanner(), + new SubTypesScanner(), + new MethodAnnotationsScanner()) + .setExpandSuperTypes(false) + //.setMetadataAdapter(new JavaReflectionAdapter()) + ); + + Map refs = new HashMap(); + Set methods = null; + + //very similar but annotatons cannot be handled in a more 'generic' manner + + methods = reflections.getMethodsAnnotatedWith(Checks.class); + for (Method method: methods) { + handlers.put("checks:" + method.getAnnotation(Checks.class).path(), + method, + refs.computeIfAbsent(method.getDeclaringClass(), type -> newInstance(type))); + } + + methods = reflections.getMethodsAnnotatedWith(Catalogs.class); + for (Method method: methods) { + handlers.put("catalogs:" + method.getAnnotation(Catalogs.class).path(), + method, + refs.computeIfAbsent(method.getDeclaringClass(), type -> newInstance(type))); + } + + methods = reflections.getMethodsAnnotatedWith(Validates.class); + for (Method method: methods) { + Validates annotation = method.getAnnotation(Validates.class); + handlers.put(annotation.timing() + "-validates:" + annotation.rule(), + method, + refs.computeIfAbsent(method.getDeclaringClass(), type -> newInstance(type))); + } + } + + + private Object newInstance(Class theType) { + try { + return (getClass() == theType) ? this + : theType.newInstance(); + } + catch(Exception x) { + throw new RuntimeException(x); + } + } + + /* + * Lookup one of the handlers, by handler type + */ + public T getHandler(Class theType) { + return (T)handlers.values() + .stream() + .filter(h -> theType.isInstance(h)) + .findFirst() + .orElse(null); + } + + public CheckerConfiguration configuration() { + return this.config; + } + + public void setTargetLocator(TargetLocator theLocator) { + this.locator = theLocator; + } + + public TargetLocator getTargetLocator() { + return this.locator; + } + + public Collection targets() { + if (this.catalog == null) + throw new IllegalStateException("targets are only available after check"); + + return this.catalog.targets(); + } + + public Catalog catalog() { + return this.catalog; + } + + /* a facility for handling all files in a target directory .. + */ + public static Catalog check(File theSource) + throws CheckerException { + + Catalog catalog = new Catalog(commonsCatalog()); + Checker checker = new Checker(); + try { + if (theSource.isDirectory()) { + for (File f: theSource.listFiles()) { + if (f.isFile()) { + checker.check(new Target(theSource.getCanonicalPath(), f.toURI().normalize()), catalog); + } + } + } + else { + checker.check(new Target(theSource.getCanonicalPath(), theSource.toURI().normalize()), catalog); + } + } + catch (IOException iox) { + throw new CheckerException("Failed to initialize target", iox); + } + + return catalog; + } + + /** + * Main checking process entry point. In this case the source is passed to the locator in order to + * obtain a {@link org.onap.tosca.checker.Target target}, and then all other processing stages are performed. + * @param String the string representation of the uri pointing to the document/template to be processed + * @throws CheckerException for any checker encountered error + */ + public void check(String theSource) + throws CheckerException { + check(theSource, buildCatalog()); + } + + /** + * Main checking entry point using a pre-computed Catalog. Same as {@link org.onap.tosca.checker.Chacker#check(String) check} except that the given catalog information is available. i.e. all types available in the given catalog types are +available and the available targets won't be re-processed. + */ + public void check(String theSource, Catalog theCatalog) + throws CheckerException { + Target tgt = + this.locator.resolve(theSource); + if (null == tgt) { + throw new CheckerException("Unable to locate the target " + theSource); + } + + check(tgt, theCatalog); + } + + /** + * Starts the processing after the localization phase, i.e. the Target is obtained/constructed outside the checker. + * @param Target the Target representation of the document/template to be processed. The actual target content (yaml + * character string) is obtained by calling {@link org.onap.tosca.checker.Target#open() open} on the target + * @throws CheckerException for any checker encountered error + */ + public void check(Target theTarget) throws CheckerException { + check(theTarget, buildCatalog()); + } + + /** + * + * @param Target the Target representation of the document/template to be processed. The actual target content (yaml + * character string) is obtained by calling {@link org.onap.tosca.checker.Target#open() open} on the target + * @param theCatalog a default catalog providing common construct definitions + * @throws CheckerException for any checker encountered error + */ + public void check(Target theTarget, Catalog theCatalog) throws CheckerException { + + this.catalog = theCatalog; + this.locator.addSearchPath(theTarget.getLocation()); + + if (this.catalog.addTarget(theTarget, null)) { + List targets = parseTarget(theTarget); + if (theTarget.getReport().hasErrors()) + return; + for (Target target: targets) { + this.catalog.addTarget(target, null); + //what about this -> this.catalog.addTarget(target, theTarget); + if (!validateTarget(target).getReport().hasErrors()) { + checkTarget(target); + } + } + } + } + + /** + * Starts the processing after the {@link org.onap.tosca.checker.Staget#parsed parsed} stage. As such the Target must + * have been located (content is available) and {@link org.onap.tosca.checker.Staget#parsed parsed} (the parsed form + * is stored within the Target, {@see org.onap.tosca.checker.Target#getTarget getTarget}). + * The target will be validated (grammar) and chcked (consistency). While the checker uses snakeyaml to parse + * a yaml document using this entry point allows one to use any other yaml parser for a long as it produces a + * compatible representation (java primitive types object representations, Strings, Maps and Lists). + * + * @param theTarget the processing subject, located and parsed. + * @throws CheckerException for any checker encountered error + */ + public void validate(Target theTarget) throws CheckerException { + validate(theTarget, buildCatalog()); + } + + /** + * + * @param theTarget the processing subject, located and parsed. + * @param theCatalog a default catalog providing common construct definitions + * @throws CheckerException + */ + public void validate(Target theTarget, Catalog theCatalog) throws CheckerException { + this.catalog = theCatalog; + this.locator.addSearchPath(theTarget.getLocation()); + + if (this.catalog.addTarget(theTarget, null)) { + if (!validateTarget(theTarget).getReport().hasErrors()) { + checkTarget(theTarget); + } + } + } + + + /* */ + protected List parseTarget(final Target theTarget) + throws CheckerException { + log.entering(getClass().getName(), "parseTarget", theTarget); + + Reader source = null; + try { + source = theTarget.open(); + } + catch (IOException iox) { + throw new CheckerException("Failed to open target " + theTarget, iox); + } + + + List yamlRoots = new ArrayList(); + try { + Yaml yaml = new Yaml(); + for (Object yamlRoot: yaml.loadAll(source)) { + yamlRoots.add(yamlRoot); + } + + //yamlRoots.add( + // new YamlParser(CharStreams.toString(source)).parse()); + } +/* + catch(SyntaxException sx) { + System.out.println(sx.getLineNumber() + ": " + sx.getMessage()); + theTarget.report(sx); + } +*/ + catch(Exception x) { + theTarget.report(x); + return Collections.EMPTY_LIST; + //return Collections.singletonSet(theTarget); + } + finally { + try { + source.close(); + } + catch (IOException iox) { + //just log it + } + } + + List targets = new ArrayList(yamlRoots.size()); + if (yamlRoots.size() == 1) { + //he target turned out to be a bare document + theTarget.setTarget(yamlRoots.get(0)); + theTarget.setStage(Stage.parsed); + targets.add(theTarget); + } + else { + //the target turned out to be a stream containing multiple documents + for (int i = 0; i < yamlRoots.size(); i++) { +/* +!!We're changing the target below, i.e. we're changing the target implementation hence caching implementation will suffer!! +*/ + Target target = new Target(theTarget.getName(), + fragmentTargetURI(theTarget.getLocation(), String.valueOf(i))); + target.setTarget(yamlRoots.get(i)); + target.setStage(Stage.parsed); + targets.add(target); + } + } + + log.exiting(getClass().getName(), "parseTarget", theTarget); + return targets; + } + + protected URI fragmentTargetURI(URI theRoot, String theFragment) { + try { + return new URI(theRoot.getScheme(), + theRoot.getSchemeSpecificPart(), + theFragment); + } + catch(URISyntaxException urisx) { + throw new RuntimeException(); + } + } + + protected Target validateTarget(Target theTarget) + throws CheckerException { + log.entering(getClass().getName(), "validateTarget", theTarget); + + String version = (String) + ((Map)theTarget.getTarget()) + .get("tosca_definitions_version"); + if (version == null) + throw new CheckerException("Target " + theTarget + " does not specify a tosca_definitions_version"); + + Target grammar = this.grammars.get(version); + if (grammar == null) + throw new CheckerException("Target " + theTarget + " specifies unknown tosca_definitions_version " + version); + + TOSCAValidator validator = null; + try { + validator = new TOSCAValidator(theTarget, grammar.getTarget()); + } + catch (SchemaException sx) { + throw new CheckerException("Grammar error at: " + sx.getPath(), sx); + } + + theTarget.getReport().addAll( + validator.validate(theTarget.getTarget())); + theTarget.setStage(Stage.validated); + + if (!theTarget.getReport().hasErrors()) { + //applyCanonicals(theTarget.getTarget(), validator.canonicals); + } + + log.exiting(getClass().getName(), "validateTarget", theTarget); + return theTarget; + } + + /** */ + protected Target checkTarget(Target theTarget) throws CheckerException { + + log.entering(getClass().getName(), "checkTarget", theTarget); + + CheckContext ctx = new CheckContext(theTarget); + //start at the top + check_service_template_definition( + (Map)theTarget.getTarget(), ctx); + + theTarget.setStage(Stage.checked); + log.exiting(getClass().getName(), "checkTarget", theTarget); + return theTarget; + } + + private String errorReport(List theErrors) { + StringBuilder sb = new StringBuilder(theErrors.size() + " errors"); + for (Throwable x: theErrors) { + sb.append("\n"); + if (x instanceof ValidationException) { + ValidationException vx = (ValidationException)x; + //.apend("at ") + //.append(error.getLineNumber()) + //.append(" : ") + sb.append("[") + .append(vx.getPath()) + .append("] "); + } + else if (x instanceof TargetError) { + TargetError tx = (TargetError)x; + sb.append("[") + .append(tx.getLocation()) + .append("] "); + } + sb.append(x.getMessage()); + if (x.getCause() != null) { + sb.append("\n\tCaused by:\n") + .append(x.getCause()); + } + } + sb.append("\n"); + return sb.toString(); + } + + + protected void range_definition_post_validation_handler(Object theValue, Rule theRule, Validator.ValidationContext theContext) { + log.entering("", "range_definition", theContext.getPath()); + + assert theRule.getType().equals("seq"); + List bounds = (List)theValue; + + if (bounds.size() != 2) { + theContext.addError("Too many values in bounds specification", theRule, theValue, null); + return; + } + + try { + Double.parseDouble(bounds.get(0).toString()); + } + catch(NumberFormatException nfe) { + theContext.addError("Lower bound not a number", theRule, theValue, null); + } + + try { + Double.parseDouble(bounds.get(1).toString()); + } + catch(NumberFormatException nfe) { + if (!"UNBOUNDED".equals(bounds.get(1).toString())) { + theContext.addError("Upper bound not a number or 'UNBOUNDED'", theRule, theValue, null); + } + } + + } + + public void check_properties( + Map theDefinitions, CheckContext theContext) { + theContext.enter("properties"); + try { + if(!checkDefinition("properties", theDefinitions, theContext)) + return; + + for (Iterator> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_property_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + protected void check_property_definition( + String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName); + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDefinition, theContext)) { + return; + } + //check the type + if (!checkDataType (theName, theDefinition, theContext)) { + return; + } + //check default value is compatible with type + Object defaultValue = theDefinition.get("default"); + if (defaultValue != null) { + checkDataValuation(defaultValue, theDefinition, theContext); + } + + theContext.exit(); + } + + public void check_attributes( + Map theDefinitions, CheckContext theContext) { + theContext.enter("attributes"); + try { + if(!checkDefinition("attributes", theDefinitions, theContext)) + return; + + for (Iterator> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_attribute_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + protected void check_attribute_definition( + String theName, Map theDefinition, CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDefinition, theContext)) { + return; + } + if (!checkDataType(theName, theDefinition, theContext)) { + return; + } + } + finally { + theContext.exit(); + } + } + + /* top level rule, we collected the whole information set. + * this is where checking starts + */ + protected void check_service_template_definition( + Map theDef, CheckContext theContext) { + theContext.enter(""); + + if (theDef == null) { + theContext.addError("Empty template", null); + return; + } + + catalogs("", theDef, theContext); //root +//!!! imports need to be processed first now that catalogging takes place at check time!! + //first catalog whatever it is there to be cataloged so that the checks can perform cross-checking + for (Iterator> ri = theDef.entrySet().iterator(); + ri.hasNext(); ) { + Map.Entry e = ri.next(); + catalogs(e.getKey(), e.getValue(), theContext); + } + + checks("", theDef, theContext); //root + for (Iterator> ri = theDef.entrySet().iterator(); + ri.hasNext(); ) { + Map.Entry e = ri.next(); + checks(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + @Catalogs(path="/data_types") + protected void catalog_data_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter("data_types"); + try { + catalogTypes(Construct.Data, theDefinitions, theContext); + } + finally { + theContext.exit(); + } + } + + @Checks(path="/data_types") + protected void check_data_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter("data_types"); + + try { + if(!checkDefinition("data_types", theDefinitions, theContext)) + return; + + for (Iterator> i = theDefinitions.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_data_type_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + /* */ + protected void check_data_type_definition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Data); + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDefinition, theContext)) { + return; + } + + checkTypeConstruct( + Construct.Data, theName, theDefinition, theContext); + + if (theDefinition.containsKey("properties")) { + check_properties( + (Map)theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Data, theName, theDefinition, + Facet.properties, theContext); + } + } + finally { + theContext.exit(); + } + } + + @Catalogs(path="/capability_types") + protected void catalog_capability_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter("capability_types"); + try { + catalogTypes(Construct.Capability, theDefinitions, theContext); + } + finally { + theContext.exit(); + } + } + + /* */ + @Checks(path="/capability_types") + protected void check_capability_types( + Map theTypes, CheckContext theContext) { + theContext.enter("capability_types"); + try { + if(!checkDefinition("capability_types", theTypes, theContext)) + return; + + for (Iterator> i = theTypes.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_capability_type_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + /* */ + protected void check_capability_type_definition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Capability); + + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDefinition, theContext)) { + return; + } + + checkTypeConstruct( + Construct.Capability, theName, theDefinition, theContext); + + if (theDefinition.containsKey("properties")) { + check_properties( + (Map)theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Capability, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey("attributes")) { + check_attributes( + (Map)theDefinition.get("attributes"), theContext); + checkTypeConstructFacet(Construct.Capability, theName, theDefinition, + Facet.attributes, theContext); + } + + //valid_source_types: see capability_type_definition + //unclear: how is the valid_source_types list definition eveolving across + //the type hierarchy: additive, overwriting, ?? + if (theDefinition.containsKey("valid_source_types")) { + checkTypeReference(Construct.Node, theContext, + ((List)theDefinition.get("valid_source_types")).toArray(EMPTY_STRING_ARRAY)); + } + } + finally { + theContext.exit(); + } + } + + @Catalogs(path="/relationship_types") + protected void catalog_relationship_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter("relationship_types"); + try { + catalogTypes(Construct.Relationship, theDefinitions, theContext); + } + finally { + theContext.exit(); + } + } + + /* */ + @Checks(path="/relationship_types") + protected void check_relationship_types( + Map theDefinition, CheckContext theContext) { + theContext.enter("relationship_types"); + try { + if(!checkDefinition("relationship_types", theDefinition, theContext)) + return; + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_relationship_type_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + /* */ + protected void check_relationship_type_definition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Relationship); + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDefinition, theContext)) { + return; + } + + checkTypeConstruct( + Construct.Relationship, theName, theDefinition, theContext); + + if (theDefinition.containsKey("properties")) { + check_properties( + (Map)theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey("attributes")) { + check_properties( + (Map)theDefinition.get("attributes"), theContext); + checkTypeConstructFacet(Construct.Relationship, theName, theDefinition, + Facet.attributes, theContext); + } + + Map interfaces = (Map)theDefinition.get("interfaces"); + if (interfaces != null) { + theContext.enter("interfaces"); + for (Iterator> i = + interfaces.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_type_interface_definition( + e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + if (theDefinition.containsKey("valid_target_types")) { + checkTypeReference(Construct.Capability, theContext, + ((List)theDefinition.get("valid_target_types")).toArray(EMPTY_STRING_ARRAY)); + } + } + finally { + theContext.exit(); + } + } + + @Catalogs(path="/artifact_types") + protected void catalog_artifact_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter("artifact_types"); + try { + catalogTypes(Construct.Artifact, theDefinitions, theContext); + } + finally { + theContext.exit(); + } + } + + /* */ + @Checks(path="/artifact_types") + protected void check_artifact_types( + Map theDefinition, CheckContext theContext) { + theContext.enter("artifact_types"); + try { + if(!checkDefinition("artifact_types", theDefinition, theContext)) + return; + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_artifact_type_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + /* */ + protected void check_artifact_type_definition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Artifact); + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDefinition, theContext)) { + return; + } + + checkTypeConstruct( + Construct.Artifact, theName, theDefinition, theContext); + } + finally { + theContext.exit(); + } + } + + @Catalogs(path="/interface_types") + protected void catalog_interface_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter("interface_types"); + try { + catalogTypes(Construct.Interface, theDefinitions, theContext); + } + finally { + theContext.exit(); + } + } + + /* */ + @Checks(path="/interface_types") + protected void check_interface_types( + Map theDefinition, CheckContext theContext) { + theContext.enter("interface_types"); + try { + if(!checkDefinition("interface_types", theDefinition, theContext)) + return; + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_interface_type_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + /* */ + protected void check_interface_type_definition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Interface); + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDefinition, theContext)) { + return; + } + + checkTypeConstruct( + Construct.Interface, theName, theDefinition, theContext); + + //not much else here: a list of operation_definitions, each with its + //implementation and inputs + + //check that common inputs are re-defined in a compatible manner + + //check that the interface operations are overwritten in a compatible manner + //for (Iterator> i = theDefinition.entrySet() + + } + finally { + theContext.exit(); + } + } + + @Catalogs(path="/node_types") + protected void catalog_node_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter("node_types"); + try { + catalogTypes(Construct.Node, theDefinitions, theContext); + } + finally { + theContext.exit(); + } + } + + /* */ + @Checks(path="/node_types") + protected void check_node_types( + Map theDefinition, CheckContext theContext) { + theContext.enter("node_types"); + try { + if(!checkDefinition("node_types", theDefinition, theContext)) + return; + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_node_type_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + + /* */ + protected void check_node_type_definition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Node); + + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDefinition, theContext)) { + return; + } + + checkTypeConstruct( + Construct.Node, theName, theDefinition, theContext); + + if (theDefinition.containsKey("properties")) { + check_properties( + (Map)theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Node, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey("attributes")) { + check_properties( + (Map)theDefinition.get("attributes"), theContext); + checkTypeConstructFacet(Construct.Node, theName, theDefinition, + Facet.attributes, theContext); + } + + //requirements + if (theDefinition.containsKey("requirements")) { + check_requirements( + (List)theDefinition.get("requirements"), theContext); + } + + //capabilities + if (theDefinition.containsKey("capabilities")) { + check_capabilities( + (Map)theDefinition.get("capabilities"), theContext); + } + + //interfaces: + Map interfaces = + (Map)theDefinition.get("interfaces"); + if (interfaces != null) { + try { + theContext.enter("interfaces"); + for (Iterator> i = + interfaces.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_type_interface_definition( + e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + //artifacts + + } + finally { + theContext.exit(); + } + } + + @Catalogs(path="/group_types") + protected void catalog_group_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter("group_types"); + try { + catalogTypes(Construct.Group, theDefinitions, theContext); + } + finally { + theContext.exit(); + } + } + + /* */ + @Checks(path="/group_types") + protected void check_group_types( + Map theDefinition, CheckContext theContext) { + theContext.enter("group_types"); + try { + if(!checkDefinition("group_types", theDefinition, theContext)) + return; + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_group_type_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + /* */ + protected void check_group_type_definition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Group); + + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDefinition, theContext)) { + return; + } + + checkTypeConstruct( + Construct.Group, theName, theDefinition, theContext); + + if (theDefinition.containsKey("properties")) { + check_properties( + (Map)theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Group, theName, theDefinition, + Facet.properties, theContext); + } + + if (theDefinition.containsKey("targets")) { + checkTypeReference(Construct.Node, theContext, + ((List)theDefinition.get("targets")).toArray(EMPTY_STRING_ARRAY)); + } + + //interfaces + Map interfaces = + (Map)theDefinition.get("interfaces"); + if (interfaces != null) { + try { + theContext.enter("interfaces"); + for (Iterator> i = + interfaces.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_type_interface_definition( + e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + } + finally { + theContext.exit(); + } + } + + @Catalogs(path="/policy_types") + protected void catalog_policy_types( + Map theDefinitions, CheckContext theContext) { + theContext.enter("policy_types"); + try { + catalogTypes(Construct.Policy, theDefinitions, theContext); + } + finally { + theContext.exit(); + } + } + + /* */ + @Checks(path="/policy_types") + protected void check_policy_types( + Map theDefinition, CheckContext theContext) { + theContext.enter("policy_types"); + try { + if(!checkDefinition("policy_types", theDefinition, theContext)) + return; + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_policy_type_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + /* */ + protected void check_policy_type_definition(String theName, + Map theDefinition, + CheckContext theContext) { + theContext.enter(theName, Construct.Policy); + + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDefinition, theContext)) { + return; + } + + checkTypeConstruct( + Construct.Policy, theName, theDefinition, theContext); + + if (theDefinition.containsKey("properties")) { + check_properties( + (Map)theDefinition.get("properties"), theContext); + checkTypeConstructFacet(Construct.Policy, theName, theDefinition, + Facet.properties, theContext); + } + + //the targets can be known node types or group types + List targets = (List)theDefinition.get("targets"); + if (targets != null) { + if (checkDefinition("targets", targets, theContext)) { + for (String target: targets) { + if (!(this.catalog.hasType(Construct.Node, target) || + this.catalog.hasType(Construct.Group, target))) { + theContext.addError( + Message.INVALID_TYPE_REFERENCE, "targets", target, Arrays.asList(Construct.Node, Construct.Group)); + } + } + } + } + + } + finally { + theContext.exit(); + } + } + + //checking of actual constructs (capability, ..) + + /* First, interface types do not have a hierarchical organization (no + * 'derived_from' in a interface type definition). + * So, when interfaces (with a certain type) are defined in a node + * or relationship type (and they can define new? operations), what + * is there to check: + * Can operations here re-define their declaration from the interface + * type spec?? From A.5.11.3 we are to understand indicates override to be + * the default interpretation .. but they talk about sub-classing so it + * probably intended as a reference to the node or relationship type + * hierarchy and not the interface type (no hierarchy there). + * Or is this a a case of augmentation where new operations can be added?? + */ + protected void check_type_interface_definition( + String theName, Map theDef, CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDef, theContext)) { + return; + } + + if (!checkTypeReference(Construct.Interface, theContext, (String)theDef.get("type"))) + return; + + if (theDef.containsKey("inputs")) { + check_inputs((Map)theDef.get("inputs"), theContext); + } + + //operations: all entries except for 'type' and 'inputs' + /* + for (Iterator> i = + theDef.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + String ename = e.getKey(); + if ("type".equals(ename) || "inputs".equals(ename)) { + continue; + } + ?? check_operation_definition(ename, e.getValue(), theContext); + } + */ + } + finally { + theContext.exit(); + } + } + + /* */ + protected void check_capabilities(Map theDefinition, + CheckContext theContext) { + theContext.enter("capabilities"); + try { + if(!checkDefinition("capabilities", theDefinition, theContext)) + return; + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_capability_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + /* A capability definition appears within the context ot a node type + */ + protected void check_capability_definition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName, Construct.Capability); + + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDef, theContext)) { + return; + } + + //check capability type + if(!checkTypeReference(Construct.Capability, theContext, (String)theDef.get("type"))) + return; + + //check properties + if (!checkFacetAugmentation( + Construct.Capability, theDef, Facet.properties, theContext)) + return; + + //check attributes + if (!checkFacetAugmentation( + Construct.Capability, theDef, Facet.attributes, theContext)) + return; + + //valid_source_types: should point to valid template nodes + if (theDef.containsKey("valid_source_types")) { + checkTypeReference(Construct.Node, theContext, + ((List)theDef.get("valid_source_types")).toArray(EMPTY_STRING_ARRAY)); + //per A.6.1.4 there is an additinal check to be performed here: + //"Any Node Type (names) provides as values for the valid_source_types keyname SHALL be type-compatible (i.e., derived from the same parent Node Type) with any Node Types defined using the same keyname in the parent Capability Type." + } + //occurences: were verified in range_definition + + } + finally { + theContext.exit(); + } + } + + protected void check_artifact_definition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName, Construct.Artifact); + + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDef, theContext)) { + return; + } + //check artifact type + if(!checkTypeReference(Construct.Artifact, theContext, (String)theDef.get("type"))) + return; + } + finally { + theContext.exit(); + } + } + + protected void check_requirements(List theDefinition, + CheckContext theContext) { + theContext.enter("requirements"); + try { + if(!checkDefinition("requirements", theDefinition, theContext)) + return; + + for (Iterator i = theDefinition.iterator(); i.hasNext(); ) { + Map e = i.next(); + Iterator> ei = + (Iterator>)e.entrySet().iterator(); + Map.Entry eie = ei.next(); + check_requirement_definition(eie.getKey(), eie.getValue(), theContext); + assert ei.hasNext() == false; + } + } + finally { + theContext.exit(); + } + } + + protected void check_requirement_definition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName, Construct.Requirement); + + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDef, theContext)) { + return; + } + //check capability type + String capabilityType = (String)theDef.get("capability"); + if (null != capabilityType) { + checkTypeReference(Construct.Capability, theContext, capabilityType); + } + + //check node type + String nodeType = (String)theDef.get("node"); + if (null != nodeType) { + checkTypeReference(Construct.Node, theContext, nodeType); + } + + //check relationship type + Map relationshipSpec = (Map)theDef.get("relationship"); + String relationshipType = null; + if (null != relationshipSpec) { + relationshipType = (String)relationshipSpec.get("type"); + if (relationshipType != null) { //should always be the case + checkTypeReference(Construct.Relationship,theContext,relationshipType); + } + + Map interfaces = (Map) + relationshipSpec.get("interfaces"); + if (interfaces != null) { + //augmentation (additional properties or operations) of the interfaces + //defined by the above relationship types + + //check that the interface types are known + for (Map interfaceDef : interfaces.values()) { + checkTypeReference(Construct.Interface, theContext, (String)interfaceDef.get("type")); + } + } + } + + //cross checks + + //the capability definition might come from the capability type or from the capability definition + //within the node type. We might have more than one as a node might specify multiple capabilities of the + //same type. + //the goal here is to cross check the compatibility of the valid_source_types specification in the + //target capability definition (if that definition contains a valid_source_types entry). + List capabilityDefs = new LinkedList(); + //nodeType exposes capabilityType + if (nodeType != null) { + Map capabilities = + findTypeFacetByType(Construct.Node, nodeType, + Facet.capabilities, capabilityType); + if (capabilities.isEmpty()) { + theContext.addError("The node type " + nodeType + " does not appear to expose a capability of a type compatible with " + capabilityType, null); + } + else { + for (Map.Entry capability: capabilities.entrySet()) { + //this is the capability as it was defined in the node type + Map capabilityDef = capability.getValue(); + //if it defines a valid_source_types then we're working with it, + //otherwise we're working with the capability type it points to. + //The spec does not make it clear if the valid_source_types in a capability definition augments or + //overwrites the one from the capabilityType (it just says they must be compatible). + if (capabilityDef.containsKey("valid_source_types")) { + capabilityDefs.add(capabilityDef); + } + else { + capabilityDef = + catalog.getTypeDefinition(Construct.Capability, (String)capabilityDef.get("type")); + if (capabilityDef.containsKey("valid_source_types")) { + capabilityDefs.add(capabilityDef); + } + else { + //!!if there is a capability that does not have a valid_source_type than there is no reason to + //make any further verification (as there is a valid node_type/capability target for this requirement) + capabilityDefs.clear(); + break; + } + } + } + } + } + else { + Map capabilityDef = catalog.getTypeDefinition(Construct.Capability, capabilityType); + if (capabilityDef.containsKey("valid_source_types")) { + capabilityDefs.add(capabilityDef); + } + } + + //check that the node type enclosing this requirement definition + //is in the list of valid_source_types + if (!capabilityDefs.isEmpty()) { + String enclosingNodeType = + theContext.enclosingConstruct(Construct.Node).name(); + assert enclosingNodeType != null; + + if (!capabilityDefs.stream().anyMatch( + (Map capabilityDef)->{ + List valid_source_types = + (List)capabilityDef.get("valid_source_types"); + return valid_source_types.stream().anyMatch( + (String source_type)-> catalog.isDerivedFrom( + Construct.Node, enclosingNodeType, source_type)); + })) { + theContext.addError("Node type: " + enclosingNodeType + " not compatible with any of the valid_source_types provided in the definition of compatible capabilities", null); + + } + + /* + boolean found = false; + for (Map capabilityDef: capabilityDefs) { + + List valid_source_types = + (List)capabilityDef.get("valid_source_types"); + String enclosingNodeType = + theContext.enclosingConstruct(Construct.Node); + assert enclosingNodeType != null; + + //make sure enclosingNodeType is compatible (same or derived from) + //one valid source type + for (String source_type: valid_source_types) { + if (catalog.isDerivedFrom( + Construct.Node, enclosingNodeType, source_type)) { + found = true; + break; + } + } + } + + if (!found) { + //the message is not great as it points to the declared + //capabilityType which is not necessarly where the information + //is coming from + theContext.addError("Node type: " + enclosingNodeType + " not compatible with any of the valid_source_types " + valid_source_types + " provided in the definition of capability " + capabilityType, null); + } + */ + } + + //if we have a relationship type, check if it has a valid_target_types + //if it does, make sure that the capability type is compatible with one + //of them + if (relationshipType != null) { //should always be the case + Map relationshipTypeDef = catalog.getTypeDefinition( + Construct.Relationship, relationshipType); + if (relationshipTypeDef != null) { + List valid_target_types = + (List)relationshipTypeDef.get("valid_target_types"); + if (valid_target_types != null) { + boolean found = false; + for (String target_type: valid_target_types) { + if (catalog.isDerivedFrom( + Construct.Capability, capabilityType, target_type)) { + found = true; + break; + } + } + if (!found) { + theContext.addError("Capability type: " + capabilityType + " not compatible with any of the valid_target_types " + valid_target_types + " provided in the definition of relationship type " + relationshipType, null); + } + } + } + } + + //relationship declares the capabilityType in its valid_target_type set + //in A.6.9 'Relationship Type' the spec does not indicate how inheritance + //is to be applied to the valid_target_type spec: cumulative, overwrites, + //so we treat it as an overwrite. + } + finally { + theContext.exit(); + } + } + + //topology_template_definition and sub-rules + /* */ + @Checks(path="/topology_template") + protected void check_topology_template( + Map theDef, final CheckContext theContext) { + + theContext.enter("topology_template"); + + try { + theDef.entrySet().stream() + .forEach(e -> catalogs(e.getKey(), e.getValue(), theContext)); + + theDef.entrySet().stream() + .forEach(e -> checks(e.getKey(), e.getValue(), theContext)); +/* + for (Iterator> ri = theDef.entrySet().iterator(); + ri.hasNext(); ) { + Map.Entry e = ri.next(); + checks(e.getKey(), e.getValue(), theContext); + } +*/ + } + finally { + theContext.exit(); + } + } + + /* + * Once the syntax of the imports section is validated parse/validate/catalog * all the imported template information + */ + @Checks(path="/imports") + protected void check_imports(List theImports, CheckContext theContext) { + theContext.enter("imports"); + + for (ListIterator li = theImports.listIterator(); li.hasNext(); ) { + Object importEntry = li.next(), + importFile = ((Map)mapEntry(importEntry).getValue()).get("file"); + Target tgt = null; + try { + tgt = catalog.getTarget( (URI)importFile ); + } + catch (ClassCastException ccx) { + System.out.println("Import is " + importFile); + } + + if (tgt == null) { + //malfunction + theContext.addError("Checking import '" + importFile + "': failed at a previous stage", null); + return; + } + + if (tgt.getReport().hasErrors()) { + //import failed parsing or validation, we skip it + continue; + } + + if (tgt.getStage() == Stage.checked) { + //been here before, this target had already been processed + continue; + } + + //import should have been fully processed by now ??? + log.log(Level.FINE, "Processing import " + tgt + "."); + try { + checkTarget(tgt); + } + catch (CheckerException cx) { + theContext.addError("Failure checking import '" + tgt + "'", cx); + } + + } + theContext.exit(); + } + + /* */ + @Checks(path="/topology_template/substitution_mappings") + protected void check_substitution_mappings(Map theSub, + CheckContext theContext) { + theContext.enter("substitution_mappings"); + try { + //type is mandatory + String type = (String)theSub.get("node_type"); + if (!checkTypeReference(Construct.Node, theContext, type)) { + return; + } + + Map capabilities = (Map)theSub.get("capabilities"); + if (null != capabilities) { + for (Map.Entry ce: capabilities.entrySet()) { + //the key must be a capability of the type + if (null == findTypeFacetByName(Construct.Node, type, + Facet.capabilities, ce.getKey())) { + theContext.addError("Unknown node type capability: " + ce.getKey() + ", type " + type, null); + } + //the value is a 2 element list: first is a local node, + //second is the name of one of its capabilities + List target = ce.getValue(); + if (target.size() != 2) { + theContext.addError("Invalid capability mapping: " + target + ", expecting 2 elements", null); + continue; + } + + String targetNode = (String)target.get(0), + targetCapability = (String)target.get(1); + + Map targetNodeDef = (Map) + this.catalog.getTemplate(theContext.target(), Construct.Node, targetNode); + if (null == targetNodeDef) { + theContext.addError("Invalid capability mapping node template: " + targetNode, null); + continue; + } + + String targetNodeType = (String)targetNodeDef.get("type"); + if (null == findTypeFacetByName(Construct.Node, targetNodeType, + Facet.capabilities, targetCapability)) { + theContext.addError("Invalid capability mapping capability: " + targetCapability + ". No such capability found for node template " + targetNode + ", of type " + targetNodeType, null); + } + } + } + + Map requirements = (Map)theSub.get("requirements"); + if (null != requirements) { + for (Map.Entry re: requirements.entrySet()) { + //the key must be a requirement of the type + if (null == findNodeTypeRequirementByName(type, re.getKey())) { + theContext.addError("Unknown node type requirement: " + re.getKey() + ", type " + type, null); + } + + List target = re.getValue(); + if (target.size() != 2) { + theContext.addError("Invalid requirement mapping: " + target + ", expecting 2 elements", null); + continue; + } + + String targetNode = (String)target.get(0), + targetRequirement = (String)target.get(1); + + Map targetNodeDef = (Map) + this.catalog.getTemplate(theContext.target(), Construct.Node, targetNode); + if (null == targetNodeDef) { + theContext.addError("Invalid requirement mapping node template: " + targetNode, null); + continue; + } + + String targetNodeType = (String)targetNodeDef.get("type"); + if (null == findNodeTypeRequirementByName(targetNodeType,targetRequirement)) { + theContext.addError("Invalid requirement mapping requirement: " + targetRequirement + ". No such requirement found for node template " + targetNode + ", of type " + targetNodeType, null); + } + } + } + } + finally { + theContext.exit(); + } + } + + /* */ + @Catalogs(path="/topology_template/inputs") + protected void catalog_inputs(Map theInputs, + CheckContext theContext) { + theContext.enter("inputs"); + + try { + catalogTemplates(Construct.Data, theInputs, theContext); + } + finally { + theContext.exit(); + } + } + + /* */ + @Checks(path="/topology_template/inputs") + protected void check_inputs(Map theInputs, + CheckContext theContext) { + theContext.enter("inputs"); + + try { + if(!checkDefinition("inputs", theInputs, theContext)) + return; + + for (Iterator> i = theInputs.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_input_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + /* */ + protected void check_input_definition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDef, theContext)) { + return; + } + // + if (!checkDataType(theName, theDef, theContext)) { + return; + } + //check default value + Object defaultValue = theDef.get("default"); + if (defaultValue != null) { + checkDataValuation(defaultValue, theDef, theContext); + } + } + finally { + theContext.exit(); + } + } + + @Checks(path="topology_template/outputs") + protected void check_outputs(Map theOutputs, + CheckContext theContext) { + theContext.enter("outputs"); + + try { + if(!checkDefinition("outputs", theOutputs, theContext)) + return; + + for (Iterator> i = theOutputs.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_output_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + protected void check_output_definition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDef, theContext)) { + return; + } + //check the expression + } + finally { + theContext.exit(); + } + } + + @Checks(path="/topology_template/groups") + protected void check_groups(Map theGroups, + CheckContext theContext) { + theContext.enter("groups"); + + try { + if(!checkDefinition("groups", theGroups, theContext)) + return; + + for (Iterator> i = theGroups.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_group_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + protected void check_group_definition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDef, theContext)) { + return; + } + + if (!checkTypeReference(Construct.Group, theContext, (String)theDef.get("type"))) + return; + + if (!checkFacet( + Construct.Group, theDef, Facet.properties, theContext)) + return; + + if (theDef.containsKey("targets")) { + //checkTemplateReference(Construct.Node, theContext, + // ((List)theDef.get("targets")).toArray(EMPTY_STRING_ARRAY)); + + List targetsTypes = (List) + this.catalog.getTypeDefinition(Construct.Group, + (String)theDef.get("type")) + .get("targets"); + + List targets = (List)theDef.get("targets"); + for (String target: targets) { + if (!this.catalog.hasTemplate(theContext.target(),Construct.Node, target)) { + theContext.addError("The 'targets' entry must contain a reference to a node template, '" + target + "' is not one", null); + } + else { + if (targetsTypes != null) { + String targetType = (String) + this.catalog.getTemplate(theContext.target(), Construct.Node, target).get("type"); + + boolean found = false; + for (String type: targetsTypes) { + found = this.catalog + .isDerivedFrom(Construct.Node, targetType, type); + if (found) + break; + } + + if (!found) { + theContext.addError("The 'targets' entry '" + target + "' is not type compatible with any of types specified in policy type targets", null); + } + } + } + } + } + + if (theDef.containsKey("interfaces")) { + } + } + finally { + theContext.exit(); + } + } + + @Checks(path="/topology_template/policies") + protected void check_policies(List> thePolicies, + CheckContext theContext) { + theContext.enter("policies"); + + try { + if(!checkDefinition("policies", thePolicies, theContext)) + return; + + for (Map policy: thePolicies) { + assert policy.size() == 1; + Map.Entry e = policy.entrySet().iterator().next(); + check_policy_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + protected void check_policy_definition(String theName, + Map theDef, + CheckContext theContext) { + theContext.enter(theName); + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theDef, theContext)) { + return; + } + + if (!checkTypeReference(Construct.Policy, theContext, (String)theDef.get("type"))) + return; + + if (!checkFacet( + Construct.Policy, theDef, Facet.properties, theContext)) + return; + + //targets: must point to node or group templates (that are of a type + //specified in the policy type definition, if targets were specified + //there). + if (theDef.containsKey("targets")) { + List targetsTypes = (List) + this.catalog.getTypeDefinition(Construct.Policy, + (String)theDef.get("type")) + .get("targets"); + + List targets = (List)theDef.get("targets"); + for (String target: targets) { + Construct targetConstruct = null; + + if (this.catalog.hasTemplate(theContext.target(),Construct.Group, target)) { + targetConstruct = Construct.Group; + } + else if (this.catalog.hasTemplate(theContext.target(),Construct.Node, target)) { + targetConstruct = Construct.Node; + } + else { + theContext.addError(Message.INVALID_TEMPLATE_REFERENCE, "targets", target, new Object[] {"node", "group"}); + } + + if (targetConstruct != null && + targetsTypes != null) { + //get the target type and make sure is compatible with the types + //indicated in the type spec + String targetType = (String) + this.catalog.getTemplate(theContext.target(), targetConstruct, target).get("type"); + + boolean found = false; + for (String type: targetsTypes) { + found = this.catalog + .isDerivedFrom(targetConstruct, targetType, type); + if (found) + break; + } + + if (!found) { + theContext.addError("The 'targets' " + targetConstruct + " entry '" + target + "' is not type compatible with any of types specified in policy type targets", null); + } + } + } + } + + if (theDef.containsKey("triggers")) { + List triggers = (List)theDef.get("triggers"); + //TODO + } + + } + finally { + theContext.exit(); + } + } + + /* */ + @Catalogs(path="/topology_template/node_templates") + protected void catalog_node_templates(Map theTemplates, + CheckContext theContext) { + theContext.enter("node_templates"); + + try { + catalogTemplates(Construct.Node, theTemplates, theContext); + } + finally { + theContext.exit(); + } + } + + /* */ + @Checks(path="/topology_template/node_templates") + protected void check_node_templates(Map theTemplates, + CheckContext theContext) { + theContext.enter("node_templates"); + try { + if(!checkDefinition("node_templates", theTemplates, theContext)) + return; + + for (Iterator> i = theTemplates.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_node_template_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + /* */ + protected void check_node_template_definition(String theName, + Map theNode, + CheckContext theContext) { + theContext.enter(theName, Construct.Node); + + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theNode, theContext)) { + return; + } + + if (!checkTypeReference(Construct.Node, theContext, (String)theNode.get("type"))) + return; + + //copy + String copy = (String)theNode.get("copy"); + if (copy != null) { + if (!checkTemplateReference(Construct.Node, theContext, copy)) { + theContext.addError(Message.INVALID_TEMPLATE_REFERENCE, "copy", copy, Construct.Node); + } + else { + //the 'copy' node specification should be used to provide 'defaults' + //for this specification, we should check them + } + } + + /* check that we operate on properties and attributes within the scope of + the specified node type */ + if (!checkFacet( + Construct.Node, /*theName,*/theNode, Facet.properties, theContext)) + return; + + if (!checkFacet( + Construct.Node, /*theName,*/theNode, Facet.attributes, theContext)) + return; + + //requirement assignment seq + if (theNode.containsKey("requirements")) { + check_requirements_assignment_definition( + (List)theNode.get("requirements"), theContext); + } + + //capability assignment map: subject to augmentation + if (theNode.containsKey("capabilities")) { + check_capabilities_assignment_definition( + (Map)theNode.get("capabilities"), theContext); + } + + //interfaces + if (theNode.containsKey("interfaces")) { + check_template_interfaces_definition( + (Map)theNode.get("interfaces"), theContext); + } + + //artifacts: artifacts do not have different definition forms/syntax + //depending on the context (type or template) but they are still subject + //to 'augmentation' + if (theNode.containsKey("artifacts")) { + check_template_artifacts_definition( + (Map)theNode.get("artifacts"), theContext); + } + + /* node_filter: the context to which the node filter is applied is very + * wide here as opposed to the node filter specification in a requirement + * assignment which has a more strict context (target node/capability are + * specified). + * We could check that there are nodes in this template having the + * properties/capabilities specified in this filter, i.e. the filter has + * a chance to succeed. + */ + } + finally { + theContext.exit(); + } + } + + @Checks(path="/topology_template/relationship_templates") + protected void check_relationship_templates(Map theTemplates, + CheckContext theContext) { + theContext.enter("relationship_templates"); + + for (Iterator> i = theTemplates.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_relationship_template_definition(e.getKey(), e.getValue(), theContext); + } + theContext.exit(); + } + + /* */ + protected void check_relationship_template_definition( + String theName, + Map theRelationship, + CheckContext theContext) { + theContext.enter(theName, Construct.Relationship); + try { + if (!checkName(theName, theContext) || + !checkDefinition(theName, theRelationship, theContext)) { + return; + } + + if (!checkTypeReference(Construct.Relationship, theContext, (String)theRelationship.get("type"))) + return; + + String copy = (String)theRelationship.get("copy"); + if (copy != null) { + if (!checkTemplateReference(Construct.Relationship, theContext, copy)) { + theContext.addError(Message.INVALID_TEMPLATE_REFERENCE, "copy", copy, Construct.Relationship); + } + } + + /* check that we operate on properties and attributes within the scope of + the specified relationship type */ + if (!checkFacet(Construct.Relationship, theRelationship, + Facet.properties, theContext)) + return; + + if (!checkFacet(Construct.Relationship, theRelationship, + Facet.attributes, theContext)) + return; + + /* interface definitions + note: augmentation is allowed here so not clear what to check .. + maybe report augmentations if so configured .. */ + + } + finally { + theContext.exit(); + } + } + + //requirements and capabilities assignment appear in a node templates + protected void check_requirements_assignment_definition( + List theRequirements, CheckContext theContext) { + theContext.enter("requirements"); + try { + if(!checkDefinition("requirements", theRequirements, theContext)) + return; + + //the node type for the node template enclosing these requirements + String nodeType = (String)catalog.getTemplate( + theContext.target(), + Construct.Node, + theContext.enclosingConstruct(Construct.Node).name()) + .get("type"); + + for(Iterator ri = theRequirements.iterator(); ri.hasNext(); ) { + Map requirement = (Map)ri.next(); + + Iterator> rai = + (Iterator>)requirement.entrySet().iterator(); + + Map.Entry requirementEntry = rai.next(); + assert !rai.hasNext(); + + String requirementName = requirementEntry.getKey(); + Map requirementDef = findNodeTypeRequirementByName( + nodeType, requirementName); + + if (requirementDef == null /*&& + !config.allowAugmentation()*/) { + theContext.addError("No requirement " + requirementName + " was defined for the node type " + nodeType, null); + continue; + } + + check_requirement_assignment_definition( + requirementName, requirementEntry.getValue(), requirementDef, theContext); + } + } + finally { + theContext.exit(); + } + } + + protected void check_requirement_assignment_definition( + String theRequirementName, + Map theAssignment, + Map theDefinition, + CheckContext theContext) { + theContext//.enter("requirement_assignment") + .enter(theRequirementName, Construct.Requirement); + + //grab the node type definition to verify compatibility + + try { + //node assignment + boolean targetNodeIsTemplate = false; + String targetNode = (String)theAssignment.get("node"); + if (targetNode == null) { + targetNode = (String)theDefinition.get("node"); + //targetNodeIsTemplate stays false, targetNode must be a type + } + else { + //the value must be a node template or a node type + targetNodeIsTemplate = isTemplateReference( + Construct.Node, theContext, targetNode); + if (!targetNodeIsTemplate) { + if (!isTypeReference(Construct.Node/*, theContext*/, targetNode)) { + theContext.addError(Message.INVALID_CONSTRUCT_REFERENCE, "node", targetNode, Construct.Node); + return; + } + //targetNode is a type reference + } + + //additional checks + String targetNodeDef = (String)theDefinition.get("node"); + if (targetNodeDef != null && targetNode != null) { + if (targetNodeIsTemplate) { + //if the target is node template, it must be compatible with the + //node type specification in the requirement defintion + String targetNodeType = (String) + catalog.getTemplate(theContext.target(),Construct.Node,targetNode).get("type"); + if (!catalog.isDerivedFrom( + Construct.Node, targetNodeType,targetNodeDef)) { + theContext.addError(Message.INCOMPATIBLE_REQUIREMENT_TARGET, Construct.Node, targetNodeType + " of target node " + targetNode, targetNodeDef); + return; + } + } + else { + //if the target is a node type it must be compatible (= or derived + //from) with the node type specification in the requirement definition + if (!catalog.isDerivedFrom( + Construct.Node, targetNode, targetNodeDef)) { + theContext.addError(Message.INCOMPATIBLE_REQUIREMENT_TARGET, Construct.Node, targetNode, targetNodeDef); + return; + } + } + } + } + + String targetNodeType = targetNodeIsTemplate ? + (String)catalog.getTemplate(theContext.target(),Construct.Node,targetNode).get("type"): + targetNode; + + //capability assignment + boolean targetCapabilityIsType = false; + String targetCapability = (String)theAssignment.get("capability"); + if (targetCapability == null) { + targetCapability = (String)theDefinition.get("capability"); + //in a requirement definition the target capability can only be a + //capability type (and not a capability name within some target node + //type) + targetCapabilityIsType = true; + } + else { + targetCapabilityIsType = isTypeReference(Construct.Capability, targetCapability); + + //check compatibility with the target compatibility type specified + //in the requirement definition, if any + String targetCapabilityDef = (String)theDefinition.get("capability"); + if (targetCapabilityDef != null && targetCapability != null) { + if (targetCapabilityIsType) { + if (!catalog.isDerivedFrom( + Construct.Capability, targetCapability, targetCapabilityDef)) { + theContext.addError(Message.INCOMPATIBLE_REQUIREMENT_TARGET, Construct.Capability, targetCapability, targetCapabilityDef); + return; + } + } + else { + //the capability is from a target node. Find its definition and + //check that its type is compatible with the capability type + //from the requirement definition + + //check target capability compatibility with target node + if (targetNode == null) { + theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', which was not specified", null); + return; + } + if (!targetNodeIsTemplate) { + theContext.addError("The capability '" + targetCapability + "' is not a capability type, hence it has to be a capability of the node template indicated in 'node', but there you specified a node type", null); + return; + } + //check that the targetNode (its type) indeed has the + //targetCapability + + Map targetNodeCapabilityDef = + findTypeFacetByName( + Construct.Node, targetNodeType, + Facet.capabilities, targetCapability); + if (targetNodeCapabilityDef == null) { + theContext.addError("No capability '" + targetCapability + "' was specified in the node " + targetNode + " of type " + targetNodeType, null); + return; + } + + String targetNodeCapabilityType = (String)targetNodeCapabilityDef.get("type"); + + if (!catalog.isDerivedFrom(Construct.Capability, + targetNodeCapabilityType, + targetCapabilityDef)) { + theContext.addError("The required target capability type '" + targetCapabilityDef + "' is not compatible with the target capability type found in the target node type capability definition : " + targetNodeCapabilityType + ", targetNode " + targetNode + ", capability name " + targetCapability, null); + return; + } + } + } + } + + //relationship assignment + Map targetRelationship = (Map)theAssignment.get("relationship"); + if (targetRelationship != null) { + //this has to be compatible with the relationship with the same name + //from the node type + //check the type + } + + //node_filter; used jxpath to simplify the navigation somewhat + //this is too cryptic + JXPathContext jxPath = JXPathContext.newContext(theAssignment); + jxPath.setLenient(true); + + List propertiesFilter = + (List)jxPath.getValue("/node_filter/properties"); + if (propertiesFilter != null) { + for (Map propertyFilter: propertiesFilter) { +//System.out.println("propertiesFilter " + propertyFilter); + + if (targetNode != null) { + //if we have a target node or node template then it must have + //have these properties + for (Object propertyName: propertyFilter.keySet()) { + if (null == findTypeFacetByName(Construct.Node, + targetNodeType, + Facet.properties, + propertyName.toString())) { + theContext.addError("The node_filter property " + propertyName + " is invalid: requirement target node " + targetNode + " does not have such a property", null); + } + } + } + else if (targetCapability != null) { + /* + //if we have a target capability type (but not have a target node) + //than it must have these properties + + Not true, the filter always refer to node properties: it is the processor's/orchestrator job to match the + this requirement with a node that satisfies the filter. We cannot anticipate the values of all properties + (some might come from inputs) so we cannot scan for candidates at this point. + + + if (targetCapabilityIsType) { + for (Object propertyName: propertyFilter.keySet()) { + if (null == findTypeFacetByName(Construct.Capability, + targetCapability, + Facet.properties, + propertyName.toString())) { + theContext.addError("The node_filter property " + propertyName + " is invalid: requirement target capability " + targetCapability + " does not have such a property", null); + } + } + } + else { + //cannot be: if you point to an explicit capability then you must + //have specified a targetNode + } + */ + } + else { + //what are the properties suppose to filter on ?? + } + } + } + + List capabilitiesFilter = + (List)jxPath.getValue("node_filter/capabilities"); + if (capabilitiesFilter != null) { + for (Map capabilityFilterDef: capabilitiesFilter) { + assert capabilityFilterDef.size() == 1; + Map.Entry capabilityFilterEntry = + (Map.Entry)capabilityFilterDef.entrySet().iterator().next(); + String targetFilterCapability = capabilityFilterEntry.getKey(); + Map targetFilterCapabilityDef = null; + + //if we have a targetNode capabilityName must be a capability of + //that node (type); or it can be simply capability type (but the node + //must have a capability of that type) + + String targetFilterCapabilityType = null; + if (targetNode != null) { + targetFilterCapabilityDef = + findTypeFacetByName(Construct.Node, targetNodeType, + Facet.capabilities, targetFilterCapability); + if (targetFilterCapabilityDef != null) { + targetFilterCapabilityType = + (String)targetFilterCapabilityDef/*.values().iterator().next()*/.get("type"); + } + else { + Map targetFilterCapabilities = + findTypeFacetByType(Construct.Node, targetNodeType, + Facet.capabilities, targetFilterCapability); + + if (!targetFilterCapabilities.isEmpty()) { + if (targetFilterCapabilities.size() > 1) { + log.warning("check_requirement_assignment_definition: filter check, target node type '" + targetNodeType + "' has more than one capability of type '" + targetFilterCapability + "', not supported"); + } + //pick the first entry, it represents a capability of the required type + Map.Entry capabilityEntry = targetFilterCapabilities.entrySet().iterator().next(); + targetFilterCapabilityDef = Collections.singletonMap(capabilityEntry.getKey(), + capabilityEntry.getValue()); + targetFilterCapabilityType = targetFilterCapability; + } + } + } + else { + //no node (type) specified, it can be a straight capability type + targetFilterCapabilityDef = catalog.getTypeDefinition( + Construct.Capability, targetFilterCapability); + //here comes the odd part: it can still be a just a name in which + //case we should look at the requirement definition, see which + //capability (type) it indicates + assert targetCapabilityIsType; //cannot be otherwise, we'd need a node + targetFilterCapabilityDef = catalog.getTypeDefinition( + Construct.Capability, targetCapability); + targetFilterCapabilityType = targetCapability; + } + + if (targetFilterCapabilityDef == null) { + theContext.addError("Capability (name or type) " + targetFilterCapability + " is invalid: not a known capability (type) " + + ((targetNodeType != null) ? (" of node type" + targetNodeType) : ""), null); + continue; + } + + for (Map propertyFilter: + (List)jxPath.getValue("/node_filter/capabilities/" + targetFilterCapability + "/properties")) { + //check that the properties are in the scope of the + //capability definition + for (Object propertyName: propertyFilter.keySet()) { + if (null == findTypeFacetByName(Construct.Capability, + targetCapability, + Facet.properties, + propertyName.toString())) { + theContext.addError("The capability filter " + targetFilterCapability + " property " + propertyName + " is invalid: target capability " + targetFilterCapabilityType + " does not have such a property", null); + } + } + } + } + } + + } + finally { + theContext//.exit() + .exit(); + } + } + + protected void check_capabilities_assignment_definition( + Map theCapabilities, CheckContext theContext) { + theContext.enter("capabilities"); + try { + if(!checkDefinition("capabilities", theCapabilities, theContext)) + return; + + //the node type for the node template enclosing these requirements + String nodeType = (String)catalog.getTemplate( + theContext.target(), + Construct.Node, + theContext.enclosingConstruct(Construct.Node).name()) + .get("type"); + + for (Iterator> ci = + theCapabilities.entrySet().iterator(); + ci.hasNext(); ) { + + Map.Entry ce = ci.next(); + + String capabilityName = ce.getKey(); + Map capabilityDef = findTypeFacetByName(Construct.Node, nodeType, + Facet.capabilities, capabilityName); + if (capabilityDef == null) { + theContext.addError("No capability " + capabilityName + " was defined for the node type " + nodeType, null); + continue; + } + + check_capability_assignment_definition( + capabilityName, ce.getValue(), capabilityDef,theContext); + } + } + finally { + theContext.exit(); + } + } + + protected void check_capability_assignment_definition( + String theCapabilityName, + Map theAssignment, + Map theDefinition, + CheckContext theContext) { + + theContext.enter(theCapabilityName, Construct.Capability); + try { + String capabilityType = (String)theDefinition.get("type"); + //list of property and attributes assignments + checkFacet(Construct.Capability, theAssignment, capabilityType, + Facet.properties, theContext); + checkFacet(Construct.Capability, theAssignment, capabilityType, + Facet.attributes, theContext); + } + finally { + theContext.exit(); + } + } + + /** */ + protected void check_template_interfaces_definition( + Map theInterfaces, + CheckContext theContext) { + theContext.enter("interfaces"); + try { + if(!checkDefinition("interfaces", theInterfaces, theContext)) + return; + + //the node type for the node template enclosing these requirements + String nodeType = (String)catalog.getTemplate( + theContext.target(), + Construct.Node, + theContext.enclosingConstruct(Construct.Node).name()) + .get("type"); + + for (Iterator> ii = + theInterfaces.entrySet().iterator(); + ii.hasNext(); ) { + + Map.Entry ie = ii.next(); + + String interfaceName = ie.getKey(); + Map interfaceDef = findTypeFacetByName(Construct.Node, nodeType, + Facet.interfaces, interfaceName); + + if (interfaceDef == null) { + /* this is subject to augmentation: this could be a warning but not an error */ + theContext.addError(Message.INVALID_INTERFACE_REFERENCE, nodeType, interfaceName, Construct.Node); + continue; + } + + check_template_interface_definition( + interfaceName, ie.getValue(), interfaceDef, theContext); + } + } + finally { + theContext.exit(); + } + } + + protected void check_template_interface_definition( + String theInterfaceName, + Map theAssignment, + Map theDefinition, + CheckContext theContext) { + + theContext.enter(theInterfaceName, Construct.Interface); + try { + //check the assignment of the common inputs +//System.out.println("Checking interface inputs for " + theInterfaceName); + checkFacet(Construct.Interface, + theAssignment, + (String)theDefinition.get("type"), + Facet.inputs, + theContext); + + //check the assignment of inputs in each operation + //unfortunately operations are not defined as a facet (grouped under a + //facet name) i.e. operations.. + +/* + Map inputsDefs = theDefinition.get("inputs"); + Map inputs = theAssignment.get("inputs"); + + if (inputs != null && !inputs.isEmpty()) { + for (Map.Entry inputEntry: input.entrySet()) { + //check the input name part of the definition + if (inputDefs != null && inputDefs.containsKey(inputEntry.getKey())) { + checkDataValuation(inputEntry.getValue(), + inputsDefs.get(inputEntry.getKey()), + theContext); + } + else { + theContext.addError("No input " + inputEntry.getKey() + " was defined for the interface " + theInterfaceName, null); + } + } + } +*/ +/* + String interfaceType = (String)theDefinition.get("type"); + //list of property and attributes assignments + checkFacet(Construct.Interface, theAssignment, interfaceType, + "inputs", theContext); +*/ + //the interface operations: can new operations be defined here?? + } + finally { + theContext.exit(); + } + } + + + @Checks(path="/topology_template/artifacts") + protected void check_template_artifacts_definition( + Map theDefinition, + CheckContext theContext) { + theContext.enter("artifacts"); + try { + } + finally { + theContext.exit(); + } + } + + protected void check_template_artifact_definition( + String theArtifactName, + Map theAssignment, + Map theDefinition, + CheckContext theContext) { + + theContext.enter(theArtifactName, Construct.Artifact); + try { + + } + finally { + theContext.exit(); + } + } + + //generic checking actions, not related to validation rules + + + /* the type can be: + * a known type: predefined or user-defined + * a collection (list or map) and then check that the entry_schema points to one of the first two cases (is that it?) + */ + protected boolean checkDataType( + String theName, Map theSpec, CheckContext theContext) { + + if (!checkTypeReference(Construct.Data, theContext, (String)theSpec.get("type"))) + return false; + + String type = (String)theSpec.get("type"); + if (/*isCollectionType(type)*/ + "list".equals(type) || "map".equals(type)) { + Map entry_schema = (Map)theSpec.get("entry_schema"); + if (entry_schema == null) { + //maybe issue a warning ?? or is 'string' the default?? + return true; + } + + if (!catalog.hasType(Construct.Data,(String)entry_schema.get("type"))) { + theContext.addError("Unknown entry_schema type: " + entry_schema, null); + return false; + } + } + return true; + } + + /* + * generic checks for a type specification + */ + protected boolean checkTypeConstruct(Construct theConstruct, + String theTypeName, + Map theDef, + CheckContext theContext) { + /* There is a 'weakness' in the super-type check before: the search for the supertype is done globally and + * not strictly on the 'import' path, i.e. one should explore for the super-type definition the target sub-tree + * starting at the current target and not ALL the targets + */ + String parentType = (String)theDef.get("derived_from"); + if (parentType != null && !catalog.hasType(theConstruct, parentType)) { + theContext.addError( + Message.INVALID_TYPE_REFERENCE, "derived_from", parentType, theConstruct); + return false; + } + return true; + } + + /* Check that a particular facet (properties, attributes) of a construct type + * (node type, capability type, etc) is correctly (consistenly) defined + * across a type hierarchy + */ + protected boolean checkTypeConstructFacet(Construct theConstruct, + String theTypeName, + Map theTypeSpec, + Facet theFacet, + CheckContext theContext) { + Map defs = + (Map)theTypeSpec.get(theFacet.name()); + if (null == defs) { + return true; + } + + boolean res = true; + + //given that the type was cataloged there will be at least one entry + Iterator> i = + catalog.hierarchy(theConstruct, theTypeName); + if (!i.hasNext()) { + theContext.addError( + "The type " + theTypeName + " needs to be cataloged before attempting 'checkTypeConstruct'", null); + return false; + } + i.next(); //skip self + while(i.hasNext()) { + Map.Entry e = i.next(); + Map superDefs = (Map)e.getValue() + .get(theFacet.name()); + if (null == superDefs) { + continue; + } + //this computes entries that appear on both collections but with different values, i.e. the re-defined properties + Map> diff = Maps.difference(defs, superDefs).entriesDiffering(); + + for (Iterator>> di = diff.entrySet().iterator(); di.hasNext(); ) { + Map.Entry> de = di.next(); + MapDifference.ValueDifference dediff = de.getValue(); + log.finest( + theConstruct + " type " + theFacet + ": " + de.getKey() + " has been re-defined between the " + theConstruct + " types " + e.getKey() + " and " + theTypeName); + //for now we just check that the type is consistenly re-declared + //if (!dediff.leftValue().get("type").equals(dediff.rightValue().get("type"))) { + if (!this.catalog.isDerivedFrom(theFacet.construct(), + (String)dediff.leftValue().get("type"), + (String)dediff.rightValue().get("type"))) { + theContext.addError( + theConstruct + " type " + theFacet + ", redefiniton changed its type: "+ de.getKey() + " has been re-defined between the " + theConstruct + " types " + e.getKey() + " and " + theTypeName + " in an incompatible manner", null); + res = false; + } + } + } + + return res; + } + + /* + * Checks the validity of a certain facet of a construct + * (properties of a node) across a type hierarchy. + * For now the check is limited to a verifying that a a facet was declared + * somewhere in the construct type hierarchy (a node template property has + * been declared in the node type hierarchy). + * + * 2 versions with the more generic allowing the specification of the type + * to be done explicitly. + */ + protected boolean checkFacet(Construct theConstruct, + Map theSpec, + Facet theFacet, + CheckContext theContext) { + return checkFacet(theConstruct, theSpec, null, theFacet, theContext); + } + + /** + * We walk the hierarchy and verify the assignment of a property with respect to its definition. + * We also collect the names of those properties defined as required but for which no assignment was provided. + */ + protected boolean checkFacet(Construct theConstruct, + Map theSpec, + String theSpecType, + Facet theFacet, + CheckContext theContext) { + + Map defs = (Map)theSpec.get(theFacet.name()); + if (null == defs) { + return true; + } + defs = Maps.newHashMap(defs); // + + boolean res = true; + if (theSpecType == null) { + theSpecType = (String)theSpec.get("type"); + } + if (theSpecType == null) { + theContext.addError("No specification type available", null); + return false; + } + + Map missed = new HashMap(); //keeps track of the missing required properties, the value is + //false if a default was found along the hierarchy + Iterator> i = + catalog.hierarchy(theConstruct, theSpecType); + while (i.hasNext() && !defs.isEmpty()) { + Map.Entry type = i.next(); + +//System.out.println(" **** type : " + type.getKey() ); + + Map typeDefs = (Map)type.getValue() + .get(theFacet.name()); + if (null == typeDefs) { + continue; + } + + MapDifference diff = Maps.difference(defs, typeDefs); + + //this are the ones this type and the spec have in common (same key, + //different values) + Map> facetDefs = + diff.entriesDiffering(); + //TODO: this assumes the definition of the facet is not cumulative, i.e. + //subtypes 'add' something to the definition provided by the super-types + //it considers the most specialized definition stands on its own + for (MapDifference.ValueDifference valdef: facetDefs.values()) { + checkDataValuation(valdef.leftValue(), valdef.rightValue(), theContext); + } + + //the ones that appear in the type but not in spec; ensure the type does not requires them. +/* + Map unassigned = diff.entriesOnlyOnRight(); + for (Map.Entry unassignedEntry: unassigned.entrySet()) { + +System.out.println(" **** unassigned -> " + unassignedEntry.getKey() + " : " + unassignedEntry.getValue()); + + if (unassignedEntry.getValue().containsKey("required")) { + Boolean required = (Boolean)unassignedEntry.getValue().get("required"); + System.out.println(" **** before " + unassignedEntry.getKey() + ", required " + required + " = " + missed.get(unassignedEntry.getKey())); + missed.compute(unassignedEntry.getKey(), + (k, v) -> v == null ? (required.booleanValue() ? (byte)1 + : (byte)0) + : (required.booleanValue() ? (byte)(v.byteValue() | 0x01) + : (byte)(v.byteValue() & 0x02))); + + + System.out.println(" **** after " + unassignedEntry.getKey() + ", required " + required + " = " + missed.get(unassignedEntry.getKey())); + } + if (unassignedEntry.getValue().containsKey("default")) { + System.out.println(" **** before " + unassignedEntry.getKey() + ", default = " + missed.get(unassignedEntry.getKey())); + missed.compute(unassignedEntry.getKey(), + (k, v) -> v == null ? (byte)2 + : (byte)(v.byteValue() | 0x02)); + System.out.println(" **** after " + unassignedEntry.getKey() + ", default = " + missed.get(unassignedEntry.getKey())); + } + } +*/ + //remove from properties all those that appear in this type: unfortunately this returns an unmodifiable map .. + defs = Maps.newHashMap(diff.entriesOnlyOnLeft()); + } + + if (!defs.isEmpty()) { + theContext.addError(Message.INVALID_FACET_REFERENCE, theConstruct, theFacet, theSpecType, defs); + res = false; + } + + if (!missed.isEmpty()) { + List missedNames = + missed.entrySet() + .stream() + .filter(e -> e.getValue().byteValue() == (byte)1) + .map(e -> e.getKey()) + .collect(Collectors.toList()); + if (!missedNames.isEmpty()) { + theContext.addError(theConstruct + " " + theFacet + " missing required values for: " + missedNames, null); + res = false; + } + } + + return res; + } + + /* Augmentation occurs in cases such as the declaration of capabilities within a node type. + * In such cases the construct facets (the capabilitity's properties) can redefine (augment) the + * specification found in the construct type. + */ + protected boolean checkFacetAugmentation(Construct theConstruct, + Map theSpec, + Facet theFacet, + CheckContext theContext) { + return checkFacetAugmentation(theConstruct, theSpec, null, theFacet, theContext); + } + + protected boolean checkFacetAugmentation(Construct theConstruct, + Map theSpec, + String theSpecType, + Facet theFacet, + CheckContext theContext) { + + Map augs = (Map)theSpec.get(theFacet.name()); + if (null == augs) { + return true; + } + + boolean res = true; + if (theSpecType == null) { + theSpecType = (String)theSpec.get("type"); + } + if (theSpecType == null) { + theContext.addError("No specification type available", null); + return false; + } + + for (Iterator> ai = augs.entrySet().iterator(); ai.hasNext(); ) { + Map.Entry ae = ai.next(); + + //make sure it was declared by the type + Map facetDef = catalog.getFacetDefinition(theConstruct, theSpecType, theFacet, ae.getKey()); + if (facetDef == null) { + theContext.addError("Unknown " + theConstruct + " " + theFacet + " (not declared by the type " + theSpecType + ") were used: " + ae.getKey(), null); + res = false; + continue; + } + + //check the compatibility of the augmentation: only the type cannot be changed + //can the type be changed in a compatible manner ?? + if (!facetDef.get("type").equals(ae.getValue().get("type"))) { + theContext.addError(theConstruct + " " + theFacet + " " + ae.getKey() + " has a different type than its definition: " + ae.getValue().get("type") + " instead of " + facetDef.get("type"), null); + res = false; + continue; + } + + //check any valuation (here just defaults) + Object defaultValue = ae.getValue().get("default"); + if (defaultValue != null) { + checkDataValuation(defaultValue, ae.getValue(), theContext); + } + } + + return res; + } + + protected boolean catalogTypes(Construct theConstruct, Map theTypes, CheckContext theContext) { + + boolean res = true; + for (Map.Entry typeEntry: theTypes.entrySet()) { + res &= catalogType(theConstruct, typeEntry.getKey(), typeEntry.getValue(), theContext); + } + + return res; + } + + /* */ + protected boolean catalogType(Construct theConstruct, + String theName, + Map theDef, + CheckContext theContext) { + + if (!catalog.addType(theConstruct, theName, theDef)) { + theContext.addError(theConstruct + " type " + theName + " re-declaration", null); + return false; + } + log.finer(theConstruct + " type " + theName + " has been cataloged"); + + return true; + } + + + /* */ + protected boolean checkTypeReference(Construct theConstruct, + CheckContext theContext, + String... theTypeNames) { + boolean res = true; + for (String typeName: theTypeNames) { + if (!isTypeReference(theConstruct, typeName)) { + theContext.addError(Message.INVALID_TYPE_REFERENCE, "", typeName, theConstruct); + res = false; + } + } + return res; + } + + protected boolean isTypeReference(Construct theConstruct, + String theTypeName) { + return this.catalog.hasType(theConstruct, theTypeName); + } + + /* node or relationship templates */ + protected boolean checkTemplateReference(Construct theConstruct, + CheckContext theContext, + String... theTemplateNames) { + boolean res = true; + for (String templateName: theTemplateNames) { + if (!isTemplateReference(theConstruct, theContext, templateName)) { + theContext.addError(Message.INVALID_TEMPLATE_REFERENCE, "", templateName, theConstruct); + res = false; + } + } + return res; + } + + protected boolean catalogTemplates(Construct theConstruct, + Map theTemplates, + CheckContext theContext) { + + boolean res = true; + for (Map.Entry typeEntry: theTemplates.entrySet()) { + res &= catalogTemplate(theConstruct, typeEntry.getKey(), typeEntry.getValue(), theContext); + } + + return res; + } + + protected boolean catalogTemplate(Construct theConstruct, + String theName, + Map theDef, + CheckContext theContext) { + try { + catalog.addTemplate(theContext.target(), theConstruct, theName, theDef); + log.finer(theConstruct + " " + theName + " has been cataloged"); + } + catch(CatalogException cx) { + theContext.addError("Failed to catalog " + theConstruct + " " + theName, cx); + return false; + } + return true; + } + + protected boolean isTemplateReference(Construct theConstruct, + CheckContext theContext, + String theTemplateName) { + return this.catalog.hasTemplate(theContext.target(),theConstruct, theTemplateName); + } + + /* + * For inputs/properties/attributes/(parameters). It is the caller's + * responsability to provide the value (from a 'default', inlined, ..) + * + * @param theDef the definition of the given construct/facet as it appears in + * its enclosing type definition. + * @param + */ + protected boolean checkDataValuation(Object theExpr, + Map theDef, + CheckContext theContext) { + //first check if the expression is a function, if not handle it as a value assignment + Data.Function f = Data.function(theExpr); + if (f != null) { + return f.evaluator() + .eval(theExpr, theDef, theContext); + } + else { + Data.Type type = Data.typeByName((String)theDef.get("type")); + if (type != null) { +//System.out.println("Evaluating " + theExpr + " as " + theExpr.getClass().getName() + " against " + theDef); + Data.Evaluator evaluator = null; + + evaluator = type.evaluator(); + if (evaluator == null) { + log.info("No value evaluator available for type " + type); + } + else { + if (theExpr != null) { + if (!evaluator.eval(theExpr, theDef, theContext)) { + return false; + } + } + } + + evaluator = type.constraintsEvaluator(); + if (evaluator == null) { + log.info("No constraints evaluator available for type " + type); + } + else { + if (theExpr != null) { + if (!evaluator.eval(theExpr, theDef, theContext)) { + return false; + } + } + else { + //should have a null value validator + } + } + + return true; + } + else { + theContext.addError("Expression " + theExpr + " of " + theDef + " could not be evaluated", null); + return false; + } + } + } + + /** Given the type of a certain construct (node type for example), look up + * in one of its facets (properties, capabilities, ..) for one of the given + * facet type (if looking in property, one of the given data type). + * @return a map of all facets of the given type, will be empty to signal + * none found + * + * Should we look for a facet construct of a compatible type: any type derived + * from the given facet's construct type?? + */ + protected Map + findTypeFacetByType(Construct theTypeConstruct, + String theTypeName, + Facet theFacet, + String theFacetType) { + + log.logp(Level.FINER, "", "findTypeFacetByType", theTypeName + " " + theTypeConstruct + ": " + theFacetType + " " + theFacet); + Map res= new HashMap(); + Iterator> i = + catalog.hierarchy(theTypeConstruct, theTypeName); + while (i.hasNext()) { + Map.Entry typeSpec = i.next(); + log.logp(Level.FINER, "", "findTypeFacetByType", "Checking " + theTypeConstruct + " type " + typeSpec.getKey() ); + Map typeFacet = + (Map)typeSpec.getValue().get(theFacet.name()); + if (typeFacet == null) { + continue; + } + Iterator> fi = typeFacet.entrySet().iterator(); + while(fi.hasNext()) { + Map.Entry facet = fi.next(); + String facetType = (String)facet.getValue().get("type"); + log.logp(Level.FINER, "", "findTypeFacetByType", "Checking " + facet.getKey() + " type " + facetType); + + //here is the question: do we look for an exact match or .. + //now we check that the type has a capability of a type compatible + //(equal or derived from) the given capability type. + if (catalog.isDerivedFrom( + theFacet.construct(), /*theFacetType, facetType*/facetType, theFacetType)) { + //res.merge(facet.getKey(), facet.getValue(), (currDef, newDef)->(merge the base class definition in the existing definition but provide the result in a new map as to avoid changing the stored defintitions)); + res.putIfAbsent(facet.getKey(), facet.getValue()); + } + } + } + log.logp(Level.FINER, "", "findTypeFacetByType", "found " + res); + + return res; + } + + /* */ + protected Map + findTypeFacetByName(Construct theTypeConstruct, + String theTypeName, + Facet theFacet, + String theFacetName) { + log.logp(Level.FINER, "", "findTypeFacetByName", theTypeConstruct + " " + theTypeName); + Iterator> i = + catalog.hierarchy(theTypeConstruct, theTypeName); + while (i.hasNext()) { + Map.Entry typeSpec = i.next(); + log.logp(Level.FINER, "", "findTypeFacetByName", "Checking " + theTypeConstruct + " type " + typeSpec.getKey() ); + Map typeFacet = + (Map)typeSpec.getValue().get(theFacet.name()); + if (typeFacet == null) { + continue; + } + Map facet = typeFacet.get(theFacetName); + if (facet != null) { + return facet; + } + } + return null; + } + + /* Requirements are the odd ball as they are structured as a sequence .. + */ + protected Map findNodeTypeRequirementByName( + String theNodeType, String theRequirementName) { + log.logp(Level.FINER, "", "findNodeTypeRequirementByName", theNodeType + "/" + theRequirementName); + Iterator> i = + catalog.hierarchy(Construct.Node, theNodeType); + while (i.hasNext()) { + Map.Entry nodeType = i.next(); + log.logp(Level.FINER, "", "findNodeTypeRequirementByName", "Checking node type " + nodeType.getKey() ); + List> nodeTypeRequirements = + (List>)nodeType.getValue().get("requirements"); + if (nodeTypeRequirements == null) { + continue; + } + + for (Map requirement: nodeTypeRequirements) { + Map requirementDef = requirement.get(theRequirementName); + if (requirementDef != null) { + return requirementDef; + } + } + } + return null; + } + + /* + * + */ + public Map findNodeTemplateInterfaceOperation( + Target theTarget, String theNodeName, String theInterfaceName, String theOperationName) { + + Map nodeDefinition = (Map)catalog.getTemplate(theTarget, Construct.Node, theNodeName); + if (nodeDefinition == null) + return null; + + Map interfaces = (Map)nodeDefinition.get("interfaces"); + if (interfaces == null) + return null; + + Map interfaceDef = (Map)interfaces.get(theInterfaceName); + if (interfaceDef == null) + return null; + + return (Map)interfaceDef.get(theOperationName); + } + + public Map findNodeTypeInterfaceOperation( + String theNodeType, String theInterfaceName, String theOperationName) { + + return null; + } + + /* + * Assumes that at this time the constrints (syntax) for all names (construct + * types, constructs, facets: ) are the same. + */ + public boolean checkName(String theName, + CheckContext theContext) { + return true; + } + + /* + * Additional generics checks to be performed on any definition: construct, + * construct types, etc .. + */ + public boolean checkDefinition(String theName, + Map theDefinition, + CheckContext theContext) { + if (theDefinition == null) { + theContext.addError("Missing definition for " + theName, null); + return false; + } + + if (theDefinition.isEmpty()) { + theContext.addError("Empty definition for " + theName, null); + return false; + } + + return true; + } + + public boolean checkDefinition(String theName, + List theDefinition, + CheckContext theContext) { + if (theDefinition == null) { + theContext.addError("Missing definition for " + theName, null); + return false; + } + + if (theDefinition.isEmpty()) { + theContext.addError("Empty definition for " + theName, null); + return false; + } + + return true; + } + + /* I'd rather validate each import once at it's own rule time (see next method) but unfortunately the canonicals + * are not visible 'right away' (they are applied at the end of the pre-validation but not visible in the + * post-validation of the same rule because of kwalify validator implementation). + */ + @Validates(rule="service_template_definition",timing=Validates.Timing.post) + protected void validate_imports( + Object theValue, Rule theRule, Validator.ValidationContext theContext) { + + Map template = (Map)theValue; + List imports = (List)template.get("imports"); + + if (imports != null) { + for (Map importEntry: imports) { + validate_import(mapEntry(importEntry).getValue(), theRule, theContext); + } + } + } + + //@Validates(rule="import_definition",timing=Validates.Timing.post) + protected void validate_import( + Object theValue, Rule theRule, Validator.ValidationContext theContext) { + + log.entering("", "import", theContext.getPath()); + + TOSCAValidator validator = (TOSCAValidator)theContext.getValidator(); + Target tgt = validator.getTarget(); + + Map def = (Map)theValue; //importEntry.getValue(); + log.fine("Processing import " + def); + + String tfile = (String)def.get("file"); + Target tgti = this.locator.resolve(tfile); + if (tgti == null) { + theContext.addError("Failure to resolve import '" + def + "', imported from " + tgt, theRule, null, null); + return; + } + log.finer("Import " + def + " located at " + tgti.getLocation()); + + if (this.catalog.addTarget(tgti, tgt)) { + + //we've never seen this import (location) before + try { + List tgtis = parseTarget(tgti); + if (tgtis.isEmpty()) + return; //continue; + + if (tgtis.size() > 1) { + theContext.addError("Import '" + tgti + "', imported from " + tgt + ", contains multiple yaml documents" , theRule, null, null); + return; //continue; + } + + tgti = tgtis.get(0); + if (tgt.getReport().hasErrors()) { + theContext.addError("Failure parsing import '" + tgti + "',imported from " + tgt, theRule, null, null); + return; //continue; + } + + validateTarget(tgti); + if (tgt.getReport().hasErrors()) { + theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule, null, null); + return; //continue; + } + } + catch (CheckerException cx) { + theContext.addError("Failure validating import '" + tgti + "',imported from " + tgt, theRule, cx, null); + } + } + + //replace with the actual location (also because this is what they get + //index by .. bad, this exposed catalog inner workings) + def.put("file", tgti.getLocation()); + } + + /* plenty of one entry maps around */ + private Map.Entry mapEntry(Object theMap) { + return (Map.Entry)((Map)theMap).entrySet().iterator().next(); + } + + + /* */ + protected static Catalog commonsCatalog = null; + + /* + * commons are built-in and supposed to be bulletproof so any error in here + * goes out loud. + */ + protected static Catalog commonsCatalog() { + + synchronized (Catalog.class) { + + if (commonsCatalog != null) { + return commonsCatalog; + } + + //if other templates are going to be part of the common type system + //add them to this list. order is relevant. + final String[] commons = new String[] { + "tosca/tosca-common-types.yaml" }; + + Checker commonsChecker = null; + try { + commonsChecker = new Checker(); + + for (String common: commons) { + commonsChecker.check(common, buildCatalog(false)); + Report commonsReport = commonsChecker.targets().iterator().next().getReport(); + + if (commonsReport.hasErrors()) { + throw new RuntimeException("Failed to process commons:\n" + + commonsReport); + } + } + } + catch(CheckerException cx) { + throw new RuntimeException("Failed to process commons", cx); + } + + return commonsCatalog = commonsChecker.catalog; + } + } + + public static Catalog buildCatalog() { + return buildCatalog(true); + } + + /* + */ + public static Catalog buildCatalog(boolean doCommons) { + + Catalog catalog = new Catalog(doCommons ? commonsCatalog() : null); + if (!doCommons) { + //add core TOSCA types + for (Data.CoreType type: Data.CoreType.class.getEnumConstants()) { + catalog.addType(Construct.Data, type.toString(), Collections.emptyMap()); + } + } + return catalog; + } + + protected void checks(String theName, + Object theTarget, + CheckContext theContext) { + + handles("checks:" + theContext.getPath(theName), theTarget, theContext); + } + + protected void catalogs(String theName, + Object theTarget, + CheckContext theContext) { + + handles("catalogs:" + theContext.getPath(theName), theTarget, theContext); + } + + protected boolean validates(Validates.Timing theTiming, + Object theTarget, + Rule theRule, + Validator.ValidationContext theContext) { + //might look odd but we need both 'handles' call to be executed + boolean validated = + handles(theTiming + "-validates:" + theRule.getName(), theTarget, theRule, theContext); + return handles(theTiming + "-validates:", theTarget, theRule, theContext) || validated; + } + + /* + * allow the handlers to return a boolean .. only do this in order to accomodate the Canonical's way of avoiding + * validation when a short form is encoutered. + * @return true if any handler returned true (if they returned something at all), false otherwise (even when no + * handlers were found) + */ + protected boolean handles(String theHandlerKey, Object... theArgs) { + + boolean handled = false; + Map entries = handlers.row(theHandlerKey); + if (entries != null) { + for (Map.Entry entry: entries.entrySet()) { + Object res = null; + try { + res = entry.getKey().invoke(entry.getValue(), theArgs); + } + catch (Exception x) { + log.log(Level.WARNING, theHandlerKey + " by " + entry.getKey() + " failed", x); + } + handled |= res == null ? false : (res instanceof Boolean && ((Boolean)res).booleanValue()); + } + } + return handled; + } + + /** + */ + public class TOSCAValidator extends Validator { + + //what were validating + private Target target; + + public TOSCAValidator(Target theTarget, Object theSchema) + throws SchemaException { + super(theSchema); + this.target = theTarget; + } + + public Target getTarget() { + return this.target; + } + + /* hook method called by Validator#validate() + */ + protected boolean preValidationHook(Object value, Rule rule, ValidationContext context) { + + return validates(Validates.Timing.pre, value, rule, context); + } + + /* + * Only gets invoked once the value was succesfully verified against the syntax indicated by the given rule. + */ + protected void postValidationHook(Object value, + Rule rule, + ValidationContext context) { + validates(Validates.Timing.post, value, rule, context); + } + + } + + /** + * Maintains state across the checking process. + */ + public class CheckContext { + + public class Step { + + private final Construct construct; + private final String name; + private final Object info; + + public Step(String theName, Construct theConstruct, Object theInfo) { + this.construct = theConstruct; + this.name = theName; + this.info = theInfo; + } + + public Construct construct() { return this.construct; } + public String name() { return this.name; } + public Object info() { return this.info; } + } + + + private Target target; + private ArrayList steps = new ArrayList(20); //artificial max nesting .. + + + public CheckContext(Target theTarget) { + this.target = theTarget; + } + + public CheckContext enter(String theName) { + return enter(theName, null, null); + } + + public CheckContext enter(String theName, Construct theConstruct) { + return enter(theName, theConstruct, null); + } + + public CheckContext enter(String theName, Construct theConstruct, Object theInfo) { + this.steps.add(new Step(theName, theConstruct, theInfo)); + Checker.this.log.entering("check", theName, getPath()); + return this; + } + + public CheckContext exit() { + Step step = this.steps.get(this.steps.size()-1); + Checker.this.log.exiting("check", step.name(), getPath()); + this.steps.remove(this.steps.size()-1); + return this; + } + + public String getPath() { + return buildPath(null); + } + + public String getPath(String theNextElem) { + return buildPath(theNextElem); + } + + protected String buildPath(String theElem) { + StringBuffer sb = new StringBuffer(""); + for (Step s: this.steps) + sb.append(s.name()) + .append("/"); + if (theElem != null) + sb.append(theElem) + .append("/"); + + return sb.substring(0,sb.length()-1); + } + + public Step enclosingConstruct(Construct theConstruct) { + for (int i = this.steps.size()-1; i > 0; i--) { + Construct c = this.steps.get(i).construct(); + if (c != null && c.equals(theConstruct)) { + return this.steps.get(i); + } + } + return null; + } + + public Step enclosingElement(String theName) { + for (int i = this.steps.size()-1; i > 0; i--) { + String n = this.steps.get(i).name(); + if (n != null && n.equals(theName)) { + return this.steps.get(i); + } + } + return null; + } + + public Step enclosing() { + if (this.steps.size() > 0) { + return this.steps.get(this.steps.size()-1); + } + return null; + } + + public CheckContext addError(String theMessage, Throwable theCause) { + this.target.report(new TargetError("", getPath(), theMessage, theCause)); + return this; + } + + public CheckContext addError(Message theMsg, Object... theArgs) { + this.target.report(new TargetError("", getPath(), messages.format(theMsg, theArgs), null)); + return this; + } + + public boolean hasErrors() { + return this.target.getReport().hasErrors(); + } + + public Checker checker() { + return Checker.this; + } + + public Catalog catalog() { + return Checker.this.catalog; + } + + public Target target() { + return this.target; + } + + public String toString() { + return "CheckContext(" + this.target.getLocation() + "," + getPath() + ")"; + } + } + + public static class CheckerConfiguration { + + private boolean allowAugmentation = true; + private String defaultImportsPath = null; + private String defaultCheckerRoots = null; + + protected CheckerConfiguration() { + } + + public CheckerConfiguration allowAugmentation(boolean doAllow) { + this.allowAugmentation = doAllow; + return this; + } + + public boolean allowAugmentation() { + return this.allowAugmentation; + } + + public CheckerConfiguration defaultImportsPath(String thePath) { + this.defaultImportsPath = thePath; + return this; + } + + public String defaultImportsPath() { + return this.defaultImportsPath; + } + + } + +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CheckerException.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CheckerException.java new file mode 100644 index 0000000..3751bb5 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CheckerException.java @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + + +/** + * A checker exception represents an error that stops the checker from + * completing its task. + */ +public class CheckerException extends Exception { + + public CheckerException(String theMsg, Throwable theCause) { + super(theMsg, theCause); + } + + public CheckerException(String theMsg) { + super(theMsg); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CommonLocator.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CommonLocator.java new file mode 100644 index 0000000..f650f9e --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/CommonLocator.java @@ -0,0 +1,156 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + +import java.io.InputStream; +import java.io.IOException; + +import java.net.URL; +import java.net.URI; +import java.net.URISyntaxException; + +import java.nio.file.Paths; + +import java.util.Set; +import java.util.LinkedHashSet; + +import java.util.logging.Level; +import java.util.logging.Logger; + +import com.google.common.collect.Iterables; + + +public class CommonLocator implements TargetLocator { + + private static Logger log = + Logger.getLogger("com.att.research.is.tosca.yaml.TargetLocator"); + + private Set searchPaths = new LinkedHashSet(); + + /* will create a locator with 2 default search paths: the file directory + * from where the app was and the jar from which this checker (actually this + * class) was loaded */ + public CommonLocator() { + addSearchPath( + Paths.get(".").toAbsolutePath().normalize().toUri()); + } + + public CommonLocator(String... theSearchPaths) { + for (String path: theSearchPaths) { + addSearchPath(path); + } + } + + public boolean addSearchPath(URI theURI) { + + if (!theURI.isAbsolute()) { + log.log(Level.WARNING, "Search paths must be absolute uris: " + theURI); + return false; + } + + return searchPaths.add(theURI); + } + + public boolean addSearchPath(String thePath) { + URI suri = null; + try { + suri = new URI(thePath); + } + catch(URISyntaxException urisx) { + log.log(Level.WARNING, "Invalid search path: " + thePath, urisx); + return false; + } + + return addSearchPath(suri); + } + + public Iterable searchPaths() { + return Iterables.unmodifiableIterable(this.searchPaths); + } + + /** + * Takes the given path and first URI resolves it and then attempts to open + * it (a way of verifying its existence) against each search path and stops + * at the first succesful test. + */ + public Target resolve(String theName) { + URI puri = null; + InputStream pis = null; + + //try absolute + try { + puri = new URI(theName); + if (puri.isAbsolute()) { + try { + pis = puri.toURL().openStream(); + } + catch (IOException iox) { + log.log(Level.WARNING, "The path " + theName + " is an absolute uri but it canont be opened", iox); + return null; + } + } + } + catch(URISyntaxException urisx) { + log.log(Level.FINER, "TargetResolver failed attempting " + puri, urisx); + //keep it silent but what are the chances .. + } + + //try relative to the search paths + for (URI suri: searchPaths) { + try { + puri = suri.resolve(theName); + log.finer("TargetResolver trying " + puri); + pis = puri.toURL().openStream(); + return new Target(theName, puri.normalize()); + } + catch (Exception x) { + log.log(Level.FINER, "TargetResolver failed attempting " + puri, x); + continue; + } + finally { + if (pis!= null) { + try { + pis.close(); + } + catch (IOException iox) { + } + } + } + } + + //try classpath + URL purl = getClass().getClassLoader().getResource(theName); + if (purl != null) { + try { + return new Target(theName, purl.toURI()); + } + catch (URISyntaxException urisx) { + } + } + + return null; + } + + public String toString() { + return "CommonLocator(" + this.searchPaths + ")"; + } + + + public static void main(String[] theArgs) { + TargetLocator tl = new CommonLocator(); + tl.addSearchPath(java.nio.file.Paths.get("").toUri()); + tl.addSearchPath("file:///"); + System.out.println( + tl.resolve(theArgs[0])); + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Construct.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Construct.java new file mode 100644 index 0000000..45108cb --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Construct.java @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + +/* + * What exactly is allowed to go in here is a subject of meditation :) I would have said 'elements with a type' but + * that will no cover Requirement and Workflow, or topology template top elements but won't cover others .. + * + * Properties/Attributes/Inputs/Outputs are just Data constructs under a particular name. + */ +public enum Construct { + Data, + Requirement, + Capability, + Relationship, + Artifact, + Interface, + Node, + Group, + Policy, + Workflow +} + + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Data.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Data.java new file mode 100644 index 0000000..fc29dcf --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Data.java @@ -0,0 +1,923 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + +import java.lang.reflect.InvocationTargetException; + +import java.util.Collection; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.HashMap; +import java.util.Iterator; +import java.util.EnumSet; + +import java.util.logging.Logger; +import java.util.logging.Level; + +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; + +import com.google.common.base.Preconditions; +import com.google.common.collect.Table; +import com.google.common.collect.HashBasedTable; + +/* + * tosca data type (primitive or user defined) evaluators and validators, used in checking valuations + */ +public class Data { + + private static Logger log = Logger.getLogger(Data.class.getName()); + + + private Data() { + } + + /* + */ + @FunctionalInterface + public static interface Evaluator { + + public boolean eval(Object theExpr, Map theDef, Checker.CheckContext theCtx); + } + + + /* data type processing */ + + private static Map typesByName = new HashMap(); + static { + //CoreType.String.toString(); + //CoreFunction.concat.toString(); + //Constraint.equal.toString(); + } + + + public static Data.Type typeByName(String theName) { + return typesByName.getOrDefault(theName, userType); + } +/* + public static Evaluator getTypeEvaluator(Type theType) { + } +*/ + + /* Needs a better name ?? RValue?? + * This is not an rvalue (C def) per se but the construct who's instances + * yield rvalues. It is a construct that yields data, not the data (yield) + * itself. + */ + public static interface Type { + + public String name(); + + public Evaluator evaluator(); + + public Evaluator constraintsEvaluator(); + } + + /* generic placeholder + */ + private static Type userType = new Type() { + + public String name() { + return null; + } + + public Evaluator evaluator() { + return Data::evalUser; + } + + public Evaluator constraintsEvaluator() { + return Data::evalUserConstraints; + } + }; + + + public static enum CoreType implements Type { + + String("string", + (expr,def,ctx) -> expr != null && expr instanceof String, + Data::evalScalarConstraints), + Integer("integer", + (expr,def,ctx) -> Data.valueOf(ctx, expr, Integer.class), + Data::evalScalarConstraints), + Float("float", + (expr,def,ctx) -> Data.valueOf(ctx, expr, Double.class, Integer.class), + Data::evalScalarConstraints), + Boolean("boolean", + (expr,def,ctx) -> Data.valueOf(ctx, expr, Boolean.class), + Data::evalScalarConstraints), + Null("null", + (expr,def,ctx) -> expr.equals("null"), + null), + Timestamp("timestamp", + (expr,def,ctx) -> timestampRegex.matcher(expr.toString()).matches(), + null), + List("list", Data::evalList, Data::evalListConstraints), + Map("map", Data::evalMap, Data::evalMapConstraints), + Version("version", + (expr,def,ctx) -> versionRegex.matcher(expr.toString()).matches(), + null), + /* use a scanner and check that the upper bound is indeed greater than + * the lower bound */ + Range("range", + (expr,def,ctx) -> { return rangeRegex.matcher(expr.toString()).matches();}, + null ), + Size("scalar-unit.size", + (expr,def,ctx) -> sizeRegex.matcher(expr.toString()).matches(), + null), + Time("scalar-unit.time", + (expr,def,ctx) -> timeRegex.matcher(expr.toString()).matches(), + null), + Frequency("scalar-unit.frequency", + (expr,def,ctx) -> frequencyRegex.matcher(expr.toString()).matches(), + null); + + + private String toscaName; + private Evaluator valueEvaluator, + constraintsEvaluator; + + private CoreType(String theName, Evaluator theValueEvaluator, Evaluator theConstraintsEvaluator) { + this.toscaName = theName; + this.valueEvaluator = theValueEvaluator; + this.constraintsEvaluator = theConstraintsEvaluator; + + if (typesByName == null) + throw new RuntimeException("No type index available!"); + + typesByName.put(this.toscaName, this); + } + + public String toString() { + return this.toscaName; + } + + public Evaluator evaluator() { + return this.valueEvaluator; + } + + public Evaluator constraintsEvaluator() { + return this.constraintsEvaluator; + } + } + + private static Pattern timestampRegex = null, + versionRegex = null, + rangeRegex = null, + sizeRegex = null, + timeRegex = null, + frequencyRegex = null; + + static { + try { + timestampRegex = Pattern.compile( + "\\p{Digit}+"); //?? where to find the definition + + //.[.[.[-> constraints = + (List>)theDef.get("constraints"); + if (constraints == null) { + return true; + } + + //check value against constraints + boolean res = true; + for (Map constraintDef: constraints) { + Map.Entry constraintEntry = + constraintDef.entrySet().iterator().next(); + Data.Constraint constraint = constraintByName(constraintEntry.getKey()); + +// the def passed here includes all constraints, not necessary! we can pass +// simple constraintEntry.getValue() + Evaluator constraintEvaluator = getTypeConstraintEvaluator(type, constraint); + if (constraintEvaluator == null) { + log.info("No constaint evaluator available for " + type + "/" + constraint); + continue; + } + + if (!constraintEvaluator.eval(theVal, theDef, theCtx)) { + theCtx.addError("Value " + theVal + " failed constraint " + constraintEntry, null); + res = false; + } + } + return res; + } + + /* + * It assumes the specification is complete, i.e. it contains a valid + * entry_schema section. + * TODO: check constraints, i.e. entrySchema.get("constraints") + */ + public static boolean evalList(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + try { + return evalCollection((List)theVal, theDef, theCtx); + } + catch (ClassCastException ccx) { + theCtx.addError("Value " + theVal + " not a list", null); + return false; + } + } + + public static boolean evalMap(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + try { + return evalCollection(((Map)theVal).values(), theDef, theCtx); + } + catch (ClassCastException ccx) { + theCtx.addError("Value " + theVal + " not a map", null); + return false; + } + } + + + /** + * The elements of a collection can be of a core type or user defined type. + */ + private static boolean evalCollection(Collection theVals, + Map theDef, + Checker.CheckContext theCtx) { + +//System.out.println("evalCollection: " + theDef + ", " + theVals); + + Data.Type entryType = null; + Map entryTypeDef = (Map)theDef.get("entry_schema"); + if (null != entryTypeDef) + entryType = typeByName((String)entryTypeDef.get("type")); + +//System.out.println("evalCollection, entry definition: " + entryTypeDef); + boolean res = true; + for (Object val: theVals) { + //check if the value is not a function call + Data.Function f = Data.function(val); + if (f != null && + f.evaluator().eval(val, entryTypeDef, theCtx)) { + res = false; + } + else if (entryType != null && + !entryType.evaluator().eval(val, entryTypeDef, theCtx)) { + res= false; + //the error should hav been reported by the particular evaluator + //theCtx.addError("Value " + val + " failed evaluation", null); + } + } + return res; + } + + public static boolean evalListConstraints(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + return evalCollectionConstraints((List)theVal, theDef, theCtx); + } + + public static boolean evalMapConstraints(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + return evalCollectionConstraints(((Map)theVal).values(), theDef, theCtx); + } + + private static boolean evalCollectionConstraints(Collection theVals, + Map theDef, + Checker.CheckContext theCtx) { +//System.out.println("evalCollectionConstraints: " + theDef + ", " + theVals); + + //should check overall constraints + + if (theVals == null) + return true; + + Map entryTypeDef = (Map)theDef.get("entry_schema"); + if (null == entryTypeDef) + return true; + + String entryTypeName = (String)entryTypeDef.get("type"); + Data.Type entryType = typeByName(entryTypeName); + +//System.out.println("evalCollectionConstraints, entry definition: " + entryTypeDef); + + boolean res = true; + for (Object val: theVals) { + Evaluator entryEvaluator = entryType.constraintsEvaluator(); + if (entryEvaluator != null && + !entryEvaluator.eval(val, entryTypeDef, theCtx)) { + res= false; + //the constraints evaluator should have already added an error, but it also adds some context + //theCtx.addError("Value " + val + " failed evaluation", null); + } + } + return res; + } + + /* + * All required properties across the hierarchical defintion must be present + * TODO: The expr cannot contain any entry not specified in the type definition + */ + public static boolean evalUser(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { +//System.out.println("evalUser: " + theDef + ", " + theVal); + + boolean res = true; + Map val = (Map)theVal; + //must be done with respect to the super-type(s) definition + Iterator props = theCtx.catalog() + .facets(Construct.Data, + Facet.properties, + (String)theDef.get("type")); + while (props.hasNext()) { + Map.Entry propEntry = props.next(); + Map propDef = (Map)propEntry.getValue(); + Object propVal = val.get(propEntry.getKey()); + +//System.out.println("evalUser: " + propVal); + + if (propVal != null) { + Data.Type propType = typeByName((String)propDef.get("type")); + + if (!propType.evaluator().eval(propVal, propDef, theCtx)) { + res= false; + //the constraints evaluator should have already added an error + //theCtx.addError("Property " + propEntry.getKey() + " failed evaluation for " + propVal, null); + } + } + } + return res; + } + + public static boolean evalUserConstraints(Object theVal, + Map theDef, + Checker.CheckContext theCtx) { + boolean res = true; + Map val = (Map)theVal; + Iterator props = theCtx.catalog() + .facets(Construct.Data, + Facet.properties, + (String)theDef.get("type")); + while (props.hasNext()) { + Map.Entry propEntry = props.next(); + Map propDef = (Map)propEntry.getValue(); + Object propVal = val.get(propEntry.getKey()); + + if (propVal != null) { + Data.Type propType = typeByName((String)propDef.get("type")); + + if (propType.constraintsEvaluator() != null && + !propType.constraintsEvaluator().eval(propVal, propDef, theCtx)) { + res= false; + //the constraints evaluator should have already added an error + //theCtx.addError("Property " + propEntry.getKey() + " failed evaluation for " + propVal, null); + } + } + else { + if (Boolean.TRUE == (Boolean)propDef.getOrDefault("required", Boolean.FALSE) && + !propDef.containsKey("default")) { + theCtx.addError("Property " + propEntry.getKey() + " failed 'required' constraint; definition is " + propDef, null); + res = false; + } + } + } + return res; + } + + private static boolean valueOf(Checker.CheckContext theCtx, + Object theExpr, + Class ... theTypes) { + for (Class type: theTypes) { + if (type.isAssignableFrom(theExpr.getClass())) { + return true; + } + } + + theCtx.addError("Expression " + theExpr + " as " + theExpr.getClass().getName() + " is not compatible with any of required types: " + Arrays.toString(theTypes), null); + return false; + } + +/* + private static boolean valueOf(Class theTarget, + String theExpr, + Checker.CheckContext theCtx) { + try { + theTarget.getMethod("valueOf", new Class[] {String.class}) + .invoke(null, theExpr); + return true; + } + catch (InvocationTargetException itx) { + theCtx.addError("Failed to parse " + theExpr + " as a " + theTarget.getName(), itx.getCause()); + return false; + } + catch (Exception x) { + theCtx.addError("Failed to valueOf " + theExpr + " as a " + theTarget.getName(), x); + return false; + } + } +*/ + + /* + * Function e(valuation) + * ? + * note to self : is there a more efficient way of retrieving a map's + * single entry? (without knowing the key) + * + * ! Function evaluators have to handle null definition (i.e. perform argument checking) so that + * we can use them in the context of collections with without entry_schemas + */ + + //just as Type but is it worth expressing this 'commonality'?? + + public static interface Function { + + public String name(); + + public Evaluator evaluator(); + } + + /* + * This is a heuristic induced from the tosca specification .. it answers the + * question of wether the given expression is a function + */ + public static Function function(Object theExpr) { + if (theExpr instanceof Map && + ((Map)theExpr).size() == 1) { + try { + return Enum.valueOf(CoreFunction.class, functionName(theExpr)); + } + catch (IllegalArgumentException iax) { + //no such function but we cannot really record an error as we only guessed the expression as being a function .. + log.info("Failed attempt to interpret " + theExpr + " as a function call"); + } + } + + return null; + } + + /* + */ + public static String functionName(Object theExpr) { + return (String) + ((Map.Entry) + ((Map)theExpr).entrySet().iterator().next()) + .getKey(); + } + + /* + */ + public static Data.Function functionByName(String theName) { + return Enum.valueOf(CoreFunction.class, theName); + } + + /* + */ + public static enum CoreFunction implements Function { + + concat(Data::evalConcat), + token(Data::evalToken), + get_input(Data::evalGetInput), + get_property(Data::evalGetProperty), + get_attribute(Data::evalGetAttribute), + get_operation_output((expr,def,ctx) -> true), + get_nodes_of_type(Data::evalGetNodesOfType), + get_artifact((expr,def,ctx) -> true); + + private Evaluator evaluator; + + private CoreFunction(Evaluator theEval) { + this.evaluator = theEval; + } + + public Evaluator evaluator() { + return this.evaluator; + } + } + + private static boolean evalConcat( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return true; + } + + private static boolean evalToken( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return true; + } + + private static boolean evalGetInput( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + Map val = (Map)theVal; + Map.Entry entry = (Map.Entry)val.entrySet().iterator().next(); + + if (!(entry.getValue() instanceof String)) { + theCtx.addError("get_input: argument must be a String" ,null); + return false; + } + + //check that an input with the given name exists and has a compatible type + Map inputDef = theCtx.catalog() + .getTemplate(theCtx.target(), Construct.Data, (String)entry.getValue()); + if (inputDef == null) { + theCtx.addError("get_input: no such input " + entry.getValue(), null); + return false; + } + + if (theDef == null) + return true; + + //the output must be type compatible with the input + String targetType = (String)theDef.get("type"); + if (targetType != null) { + String inputType = (String)inputDef.get("type"); + + if (!theCtx.catalog() + .isDerivedFrom(Construct.Data, inputType, targetType)) { + theCtx.addError("get_input: input type " + inputType + " is incompatible with the target type " + targetType, null); + return false; + } + } + + return true; + } + + /* + * Who's the smarty that decided to define optional arguments in between + * required ones ?! + * (factors the evaluation of get_attribute and get_property) + */ + private static boolean evalGetData( + Object theVal, Map theDef, + EnumSet theFacets, Checker.CheckContext theCtx) { + + Map val = (Map)theVal; + Map.Entry entry = (Map.Entry)val.entrySet().iterator().next(); + + if (!(entry.getValue() instanceof List)) { + theCtx.addError("get_property: argument must be a List" ,null); + return false; + } + + List args = (List)entry.getValue(); + if (args.size() < 2) { + theCtx.addError("'get_property' has at least 2 arguments", null); + return false; + } + + //the first argument is a node or relationship template + String tmpl = (String)args.get(0); + Construct tmplConstruct = null; + Map tmplSpec = null; + + if ("SELF".equals(tmpl)) { + tmpl = theCtx.enclosingConstruct(Construct.Node).name(); + if (tmpl == null) { + tmpl = theCtx.enclosingConstruct(Construct.Relationship).name(); + if (tmpl == null) { + theCtx.addError("'get_property' invalid SELF reference: no node or relationship template in scope at " + theCtx.getPath(), null); + return false; + } + else { + tmplConstruct = Construct.Relationship; + } + } + else { + tmplConstruct = Construct.Node; + } + tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), tmplConstruct, tmpl); + } + else if ("SOURCE".equals("tmpl")) { + //we are in the scope of a relationship template and this is the source node template. + tmpl = theCtx.enclosingConstruct(Construct.Relationship).name(); + if (tmpl == null) { + theCtx.addError("'get_property' invalid SOURCE reference: no relationship template in scope at " + theCtx.getPath(), null); + return false; + } + + return true; + } + else if ("TARGET".equals("tmpl")) { + //we are in the scope of a relationship template and this is the target node template. + tmpl = theCtx.enclosingConstruct(Construct.Relationship).name(); + if (tmpl == null) { + theCtx.addError("'get_property' invalid TARGET reference: no relationship template in scope at " + theCtx.getPath(), null); + return false; + } + + return true; + } + else if ("HOST".equals("tmpl")) { + tmpl = theCtx.enclosingConstruct(Construct.Node).name(); + if (tmpl == null) { + theCtx.addError("'get_property' invalid HOST reference: no node template in scope at " + theCtx.getPath(), null); + return false; + } + + return true; + } + else { + //try node template first + tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), Construct.Node, tmpl); + if (tmplSpec == null) { + //try relationship + tmplSpec = theCtx.catalog().getTemplate(theCtx.target(), Construct.Relationship, tmpl); + if (tmplSpec == null) { + theCtx.addError("'get_data' invalid template reference '" + tmpl + "': no node or relationship template with this name", null); + return false; + } + else { + tmplConstruct = Construct.Relationship; + } + } + else { + tmplConstruct = Construct.Node; + } + } + + int facetNameIndex = 1; + Construct facetConstruct = tmplConstruct; //who's construct the facet is supposed to belong to + Map facetConstructSpec = null; + String facetConstructType = null; + + if (tmplConstruct.equals(Construct.Node) && + args.size() > 2) { + //the second arg might be a capability or requirement name. If it is a + //capability than the third argument becomes a property of the + //coresponding capability type. If it is a requirement than the + //requirement definition indicates a capability who's type has a + //property with the name indicated in the third argument .. + // + //while the spec does not make it explicit this can only take place + //if the first argument turned out to be a node template (as relationship + //templates/types do not have capabilities/requirements + String secondArg = (String)args.get(1); + if ((facetConstructSpec = theCtx.catalog().getFacetDefinition( + tmplConstruct, + (String)tmplSpec.get("type"), + Facet.capabilities, + secondArg)) != null) { + facetNameIndex = 2; + facetConstruct = Construct.Capability; + facetConstructType = (String)facetConstructSpec.get("type"); + } + else if ((facetConstructSpec = theCtx.catalog().getRequirementDefinition( + tmplConstruct, + (String)tmplSpec.get("type"), + secondArg)) != null) { + facetNameIndex = 2; + facetConstruct = Construct.Capability; + + //find the specof the capability this requirement points to + //TODO: check, can the capability reference be anything else but a capability tyep? + facetConstructType = (String)facetConstructSpec.get("capability"); + } + } + else { + //we'll attempt to handle it as a property of the node template + facetConstruct = Construct.Node; + facetConstructSpec = tmplSpec; + facetConstructType = (String)facetConstructSpec.get("type"); + } + + //validate the facet name + Map facetSpec = null; + { + String facetName = (String)args.get(facetNameIndex); + for (Facet facet: theFacets) { +//System.out.println("get_data: looking at " + facetConstruct + " " + facetConstructType + ", " + facet + " " + facetName); + facetSpec = theCtx.catalog() + .getFacetDefinition( + facetConstruct, + facetConstructType, + facet, + facetName); + if (facetSpec != null) + break; + } + + if (facetSpec == null) { +//TODO: not the greatest message if the call strated with a requirement .. + theCtx.addError("'get_data' invalid reference, '" + facetConstruct + "' " + facetConstructType + " has no " + theFacets + " with name " + facetName, null); + return false; + } + } + + //the rest of the arguments have to resolve to a field of the property's + //data type; the propertySpec contains the type specification + for (int i = facetNameIndex + 1; i < args.size(); i++) { + } + + return true; + } + + /**/ + private static boolean evalGetProperty( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return evalGetData(theVal, theDef, EnumSet.of(Facet.properties), theCtx); + } + + /* + * get_property and get_attribute are identical, just operating on different + * facets, with one exception: there is an intrinsec attribute for every + * declared property. + */ + private static boolean evalGetAttribute( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + return evalGetData(theVal, theDef, EnumSet.of(Facet.attributes, Facet.properties), theCtx); + } + + private static boolean evalGetNodesOfType( + Object theVal, Map theDef, Checker.CheckContext theCtx) { + + Map val = (Map)theVal; + Map.Entry entry = (Map.Entry)val.entrySet().iterator().next(); + + if (!(entry.getValue() instanceof String)) { + theCtx.addError("get_nodes_of_type: argument must be a String", null); + return false; + } + + String arg = (String)entry.getValue(); + + if (null == theCtx.catalog().getTypeDefinition(Construct.Node, arg)) { + theCtx.addError("get_nodes_of_type: no such node type " + arg, null); + return false; + } + else { + return true; + } + } + + /* */ + public static Constraint constraintByName(String theName) { + return Enum.valueOf(Constraint.class, theName); + } + + /* */ + public static Constraint constraint(Object theExpr) { + if (theExpr instanceof Map && + ((Map)theExpr).size() == 1) { + return constraintByName(constraintName(theExpr)); + } + + return null; + } + + /* */ + public static String constraintName(Object theExpr) { + return (String) + ((Map.Entry) + ((Map)theExpr).entrySet().iterator().next()) + .getKey(); + } + + private static Object getConstraintValue(Map theDef, + Constraint theConstraint) { + List constraints = (List)theDef.get("constraints"); + if (null == constraints) + return null; + + for(Map constraint: constraints) { + Object val = constraint.get(theConstraint.toString()); + if (val != null) + return val; + } + return null; + } + + public static enum Constraint { + equal, + greater_than, + greater_or_equal, + less_than, + less_or_equal, + in_range, + valid_values, + length, + min_length, + max_length, + pattern; + } + + + /* hold the constraint evaluators for pairs of type/constraint. + * If a pair is not present than the given constraint does not apply + * to the type. + */ + private static Table typeConstraintEvaluator =null; + + public static Evaluator + getTypeConstraintEvaluator(Type theType, Constraint theConstraint) { + if (typeConstraintEvaluator == null) { + typeConstraintEvaluator = HashBasedTable.create(); + + typeConstraintEvaluator.put(CoreType.String, Constraint.equal, + (val,def,ctx) -> val.equals(getConstraintValue(def,Constraint.equal))); + typeConstraintEvaluator.put(CoreType.String, Constraint.valid_values, + (val,def,ctx) -> { + return ((List)getConstraintValue(def,Constraint.valid_values)).contains(val); + }); + typeConstraintEvaluator.put(CoreType.String, Constraint.length, + (val,def,ctx) -> ((String)val).length() == ((Number)getConstraintValue(def,Constraint.length)).intValue()); + typeConstraintEvaluator.put(CoreType.String, Constraint.min_length, + (val,def,ctx) -> ((String)val).length() >= ((Number)getConstraintValue(def,Constraint.min_length)).intValue()); + typeConstraintEvaluator.put(CoreType.String, Constraint.max_length, + (val,def,ctx) -> ((String)val).length() <= ((Number)getConstraintValue(def,Constraint.max_length)).intValue()); + typeConstraintEvaluator.put(CoreType.String, Constraint.pattern, + (val,def,ctx) -> Pattern.compile((String)getConstraintValue(def,Constraint.pattern)) + .matcher((String)val) + .matches()); + + typeConstraintEvaluator.put(CoreType.Integer, Constraint.equal, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.equal)) == 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.greater_than, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.greater_than)) > 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.greater_or_equal, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.greater_or_equal)) >= 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.less_than, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.less_than)) < 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.less_or_equal, + (val,def,ctx) -> ((Integer)val).compareTo((Integer)getConstraintValue(def,Constraint.less_or_equal)) <= 0); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.in_range, + (val,def,ctx) -> { List range = (List)getConstraintValue(def, Constraint.in_range); + return ((Integer)val).compareTo(range.get(0)) >= 0 && + ((Integer)val).compareTo(range.get(1)) <= 0; + }); + typeConstraintEvaluator.put(CoreType.Integer, Constraint.valid_values, + (val,def,ctx) -> ((List)getConstraintValue(def, Constraint.valid_values)).contains((Integer)val)); + +//yaml parser represents yaml floats as java Double and we are even more tolerant as many double values +//get represented as ints and the parser will substitute an Integer + typeConstraintEvaluator.put(CoreType.Float, Constraint.equal, + (val,def,ctx) -> ((Number)val).doubleValue() == ((Number)getConstraintValue(def,Constraint.equal)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.greater_than, + (val,def,ctx) -> ((Number)val).doubleValue() > ((Number)getConstraintValue(def,Constraint.greater_than)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.greater_or_equal, + (val,def,ctx) -> ((Number)val).doubleValue() >= ((Number)getConstraintValue(def,Constraint.greater_or_equal)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.less_than, + (val,def,ctx) -> ((Number)val).doubleValue() < ((Number)getConstraintValue(def,Constraint.less_than)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.less_or_equal, + (val,def,ctx) -> ((Number)val).doubleValue() <= ((Number)getConstraintValue(def,Constraint.less_or_equal)).doubleValue()); + typeConstraintEvaluator.put(CoreType.Float, Constraint.in_range, + (val,def,ctx) -> { List range = (List)getConstraintValue(def, Constraint.in_range); + return ((Number)val).doubleValue() >= range.get(0).doubleValue() && + ((Number)val).doubleValue() <= range.get(1).doubleValue(); + }); + typeConstraintEvaluator.put(CoreType.Float, Constraint.valid_values, + (val,def,ctx) -> ((List)getConstraintValue(def, Constraint.valid_values)).contains((Number)val)); + } + + Evaluator eval = typeConstraintEvaluator.get(theType, theConstraint); + + return eval == null ? (expr,def,ctx) -> true + : eval; + } + + + private static boolean stringValidValues(String theVal, + List theValidValues, + Checker.CheckContext theCtx) { + if (!theValidValues.contains(theVal)) { + theCtx.addError("not a valid value: " + theVal + " not part of " + theValidValues, null); + return false; + } + + return true; + } + + public static final void main(String[] theArgs) { + Data.CoreType dt = Enum.valueOf(Data.CoreType.class, theArgs[0]); + System.out.println(theArgs[1] + " > " + dt.evaluator().eval(theArgs[1], null, null)); + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Facet.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Facet.java new file mode 100644 index 0000000..ea9fd48 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Facet.java @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + +/* + * Oddballs: + * - requirements (a requirement does not have a type - i.e. is not based + * on a Construct) and can target a node, a capability or both .. When present + * as a facet of another Construct it is also the only one represented as a + * sequence so it will need special handling anyway. + */ +public enum Facet { + + inputs(Construct.Data), + outputs(Construct.Data), + properties(Construct.Data), + attributes(Construct.Data), + capabilities(Construct.Capability), + //requirements(Construct.Capability),//?? + artifacts(Construct.Artifact), + interfaces(Construct.Interface); + /* + Node + Relationship + they can be considered as facets of the topology template ... + */ + + private Construct construct; + + private Facet(Construct theConstruct) { + this.construct = theConstruct; + } + + public Construct construct() { + return this.construct; + } +} + + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Messages.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Messages.java new file mode 100644 index 0000000..98158da --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Messages.java @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + +import java.text.MessageFormat; +import java.util.ResourceBundle; +import java.util.MissingResourceException; + +/* + * This class should be generated programatically based on the keys available in messages.properties + */ +public class Messages { + + private ResourceBundle messages; + + public Messages() { + try { + this.messages = ResourceBundle.getBundle("org/onap/tosca/checker/messages"); + } + catch (MissingResourceException mrx) { + throw new RuntimeException("", mrx); + } + + //check that the Message enum is in sync with the resource bundle + } + + public String format(Message theMessage, Object[] theArgs) { + String message = this.messages.getString(theMessage.name()); + if (message == null) + throw new RuntimeException("Un-available message: " + theMessage); + + return MessageFormat.format(message, theArgs); + } + + public enum Message { + EMPTY_TEMPLATE, + INVALID_CONSTRUCT_REFERENCE, + INVALID_TYPE_REFERENCE, + INVALID_TEMPLATE_REFERENCE, + INVALID_INTERFACE_REFERENCE, + INVALID_FACET_REFERENCE, + INCOMPATIBLE_REQUIREMENT_TARGET + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Paths.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Paths.java new file mode 100644 index 0000000..2cedaca --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Paths.java @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + +import java.util.Map; +import java.util.HashMap; +import java.util.Iterator; + +import org.apache.commons.jxpath.JXPathContext; +import org.apache.commons.jxpath.JXPathException; + +import org.onap.tosca.checker.annotations.Catalogs; + +/* + * Facilitates processing of catalog information through xpath expressions. + * In development. + */ +public class Paths { + + private Map paths = new HashMap(); + private JXPathContext types = JXPathContext.newContext(new HashMap()); + + @Catalogs(path="/") + public void catalog_root(Map theDefinition, Checker.CheckContext theContext) { + paths.put(theContext.target(), JXPathContext.newContext(theContext.target().getTarget())); + theDefinition + .forEach((k,v) -> { if (k.toString().endsWith("_types")) + ((Map)types.getContextBean()).merge(k, v, (v1, v2) -> { ((Map)v1).putAll((Map)v2); + return v1; + }); + }); + } + + public Object resolve(Target theTarget, String thePath) { + return paths.get(theTarget).getValue(thePath); + } + + public Object resolve(String thePath) { + return types.getValue(thePath); + } + + /* */ + public String nodeType(String theNodeType) { + return (String)resolve("/node_types/" + theNodeType); + } + + public Map nodeTypeInterface(String theNodeType, String theInterfaceName) { + return (Map)resolve("/node_types/" + theNodeType + "/interfaces/" + theInterfaceName); + } + + public String nodeTypeInterfaceType(String theNodeType, String theInterfaceName) { + return (String)resolve("/node_types/" + theNodeType + "/interfaces/" + theInterfaceName + "/type"); + } + + /* */ + public Map interfaceType(String theInterfaceType) { + return (Map)resolve("/interface_types/" + theInterfaceType); + } + + //this will also resolve 'metadata', 'derived_from', version and other common entries as operations + public Map interfaceTypeOperation(String theInterfaceType, String theOperationName) { + return (Map)resolve("/interface_types/" + theInterfaceType + "/" + theOperationName); + } + + /* node templates */ + public String nodeTemplateType(Target theTarget, String theNodeName) { + return (String)resolve(theTarget, "/topology_template/node_templates/" + theNodeName + "/type"); + } + + public Map nodeTemplateInterface(Target theTarget, String theNodeName, String theInterfaceName) { + return (Map)resolve(theTarget, "/topology_template/node_templates/" + theNodeName + "/interfaces/" + theInterfaceName); + } + + public Map nodeTemplateInterfaceOperation( + Target theTarget, String theNodeName, String theInterfaceName, String theOperationName) { + return (Map)resolve(theTarget, "/topology_template/node_templates/" + theNodeName + "/interfaces/" + theInterfaceName + "/" + theOperationName); + } + + + /* */ + public Map nodeTypeInterfaceOperation(String theNodeType, String theInterfaceName, String theOperationName) { + return (Map)resolve("/node_types/" + theNodeType + "/interfaces/" + theInterfaceName + "/" + theOperationName); + } + + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Report.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Report.java new file mode 100644 index 0000000..bce0729 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Report.java @@ -0,0 +1,115 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + +import java.io.IOException; + +import java.util.LinkedList; +import java.util.Collections; + +import org.yaml.snakeyaml.error.MarkedYAMLException; +import kwalify.ValidationException; + +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonProcessingException; + +/** + * Represents a collection of errors that occured during one of the stages + * of the checker: yaml parsing, yaml validation (tosca syntax), tosca checking + */ +/* + * This needs some re-thinking: while it is useful to have all original errors introducing + * the custom json conversion (just to help the service) is not great either. + * I was torn between this approach or creating a custom deserializer and object mapper (which + * would have kept all the customized serialization in the service but then the error analysis + * would be duplicated there too ..). + */ +@JsonSerialize(contentUsing=org.onap.tosca.checker.Report.ReportEntrySerializer.class) +public class Report extends LinkedList { + + public Report() { + } + + public Report(T[] theErrors) { + Collections.addAll(this, theErrors); + } + + public boolean hasErrors() { + return !this.isEmpty(); + } + + public boolean addOnce(T theError) { + for (T e: this) { + if (e.getMessage().equals(theError.getMessage())) + return false; + } + return add(theError); + } + + public String toString() { + StringBuilder sb = new StringBuilder(this.size() + " errors"); + for (Throwable x: this) { + sb.append("\n") + .append("[") + .append(location(x)) + .append("] ") + .append(x.getMessage()); + if (x.getCause() != null) { + sb.append("\n\tCaused by:\n") + .append(x.getCause()); + } + } + sb.append("\n"); + return sb.toString(); + } + + private static String location(Throwable theError) { + if (theError instanceof MarkedYAMLException) { + MarkedYAMLException mx = (MarkedYAMLException)theError; + return "line " + mx.getProblemMark().getLine() + ", column " + mx.getProblemMark().getColumn(); + } + if (theError instanceof ValidationException) { + ValidationException vx = (ValidationException)theError; + return vx.getPath(); + } + if (theError instanceof TargetError) { + TargetError tx = (TargetError)theError; + return tx.getLocation(); + } + return "unknown"; + } + + + public static class ReportEntrySerializer extends StdSerializer { + + public ReportEntrySerializer() { + super(Throwable.class); + } + + @Override + public void serialize(Throwable theError, JsonGenerator theGenerator, SerializerProvider theProvider) + throws IOException, JsonProcessingException { + theGenerator.writeStartObject(); + theGenerator.writeStringField("location", location(theError)); + theGenerator.writeStringField("message", theError.getMessage()); + if (theError.getCause() != null) + theGenerator.writeStringField("cause", theError.getCause().toString()); + theGenerator.writeEndObject(); + } + } +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Repository.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Repository.java new file mode 100644 index 0000000..66c37af --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Repository.java @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + + +import java.net.URI; +import java.net.URL; +import java.net.MalformedURLException; + +import java.util.logging.Level; +import java.util.logging.Logger; + +import java.util.Map; + +/** + * Represents a 'container' of (yaml) TOSCA documents + */ +public abstract class Repository { + + protected static Logger log = + Logger.getLogger("com.att.research.is.tosca.yaml.Repository"); + + private String name, + description; + protected URI rootURI; + protected Map credential; //TOSCA type tosca.datatype.Credential + + public Repository(String theName, URI theRoot) { + this.name = theName; + this.rootURI = theRoot; + } + + public String getName() { + return this.name; + } + + public URI getRoot() { + return this.rootURI; + } + + /** optional */ + public abstract Iterable targets(); + + /** */ + public abstract Target resolve(URI theURI); + + @Override + public String toString() { + return "Repository " + this.name + " at " + this.rootURI; + } +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Stage.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Stage.java new file mode 100644 index 0000000..416a533 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Stage.java @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + +/* + * The Target processing stages + */ +public enum Stage { + + located, /* not really used as we do not track a Target that we cannot locate */ + parsed, /* yaml parsing succesfully completed */ + validated, /* syntax check succesfully completed: document is compliant to yaml tosca grammar */ + cataloged, /* all the constructs have been cataloged */ + checked; /* 'semantic' checking completed */ + + private static final Stage[] stages = values(); + + public Stage next() { + return stages[ordinal() + 1]; + } +} + + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Target.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Target.java new file mode 100644 index 0000000..c21593b --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Target.java @@ -0,0 +1,109 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.io.BufferedReader; +import java.io.IOException; + +import java.net.URI; +import java.net.URL; +import java.net.MalformedURLException; + +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * Represents a yaml document to be parsed/validated/checked. + * A Target's scope is the checker that is processing it. + */ +public class Target { + + private static Logger log = + Logger.getLogger("com.att.research.is.tosca.yaml.Target"); + + private String name; + private URI location; + //while it is convenient having the information below in here I am conflicted about it + //as it represents processing state + private Object target; //this is the parsed form of the target + private Stage stage = Stage.located; //the processinf stage this target has reached + private Report report = new Report(); //collects the errors related to this target + + public Target(String theName, URI theLocation) { + this.name = theName; + this.location = theLocation; + } + + public String getName() { + return this.name; + } + + public URI getLocation() { + return this.location; + } + + public Report getReport() { + return this.report; + } + + public void report(Throwable theError) { + this.report.add(theError); + } + + public void report(String theErrMsg) { + this.report.add(new Exception(theErrMsg)); + } + + public void setTarget(Object theTarget) { + this.target = theTarget; + } + + public Object getTarget() { + return this.target; + } + + public Stage getStage() { + return this.stage; + } + + public void setStage(Stage theStage) { + if (theStage.ordinal() <= this.stage.ordinal()) + throw new IllegalArgumentException("Stage can only be set forward."); + + this.stage = theStage; + } + + public void nextStage() { + setStage(this.stage.next()); + } + + /* + * @return a reader for the source or null if failed + */ + public Reader open() throws IOException { + + return new BufferedReader( + new InputStreamReader( + this.location.toURL().openStream())); + } + + public String toString() { + //return String.format("Target %s (%.20s ...)", this.location, this.target == null ? "" : this.target.toString()); + return String.format("Target %s at %s", this.name, this.location); + + } +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetError.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetError.java new file mode 100644 index 0000000..917ec3f --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetError.java @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + + +/** + * A target error represents an error in target the resource being checked. + * We only represent it as a Throwable because the libraries that perform parsing and syntax validation + * represent their errors as such .. + */ +public class TargetError extends Throwable { + + /* + public static enum Level { + error, + warning + } + */ + + private String location; //we might need an more detailed representation + //here: it could be a YAML document jpath or + //document location (line). + private String target; + + public TargetError(String theTarget, String theLocation, String theMessage, Throwable theCause) { + super(theMessage, theCause); + this.target = theTarget; + this.location = theLocation; + } + + public TargetError(String theTarget, String theLocation, String theMessage) { + this(theTarget, theLocation, theMessage, null); + } + + public String getTarget() { + return this.target; + } + + public String getLocation() { + return this.location; + } + + +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetLocator.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetLocator.java new file mode 100644 index 0000000..920e1a6 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/TargetLocator.java @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + +import java.net.URI; + +/** + * Used to locate a document that needs to be processed by the checker + */ +public interface TargetLocator { + + /** */ + public boolean addSearchPath(URI theURI); + + /** */ + public boolean addSearchPath(String thePath); + + /** */ + public Iterable searchPaths(); + + /** */ + public Target resolve(String theName); + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Workflows.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Workflows.java new file mode 100644 index 0000000..3f6a445 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/Workflows.java @@ -0,0 +1,287 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker; + +import java.util.Map; +import java.util.List; +import java.util.Iterator; + +import org.onap.tosca.checker.annotations.Checks; + +/** + * Workflows checking isolated as it is of significant size. Showcases the way checking can be added to the core + * checker through annotations. + */ +@Checks +public class Workflows { + + @Checks(path="/topology_template/workflows") + public void check_workflows(Map theDefinition, Checker.CheckContext theContext) { + + theContext.enter("workflows", null, theDefinition); + + try { + if(!theContext.checker().checkDefinition("workflows", theDefinition, theContext)) + return; + + for (Iterator> i = theDefinition.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_workflow_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + } + + + public void check_workflow_definition(String theName, Map theDef, Checker.CheckContext theContext) { + + theContext.enter("workflow", Construct.Workflow); + + if (theDef.containsKey("inputs")) { + theContext + .checker() + .check_properties((Map)theDef.get("inputs"), theContext); + } + + if (theDef.containsKey("preconditions")) { + check_workflow_preconditions_definition((List)theDef.get("preconditions"), theContext); + } + + if (theDef.containsKey("steps")) { + check_workflow_steps_definition((Map)theDef.get("steps"), theContext); + } + + theContext.exit(); + } + + + public void check_workflow_steps_definition(Map theSteps, Checker.CheckContext theContext) { + + theContext.enter("steps"); + + try { + for (Iterator> i = theSteps.entrySet().iterator(); i.hasNext(); ) { + Map.Entry e = i.next(); + check_workflow_step_definition(e.getKey(), e.getValue(), theContext); + } + } + finally { + theContext.exit(); + } + + } + + public void check_workflow_step_definition(String theName, Map theDef, Checker.CheckContext theContext) { + + theContext.enter(theName, null, theDef); + try { + //target + check_target_definition(theDef, theContext); + + //operation_host + + //filter: sequence of constraints with workflow assertion definitions (see section 3.5.18) + //where the keys are attribute names: representing the name of an attribute defined on the assertion context + //entity (node instance, relationship instance, group instance) + //we could verify that all th eattributes ae defined .. how accurate can that be + + //activities + List activities = (List)theDef.get("activities"); + if (activities != null) { + for (Map activity: activities) { + check_workflow_step_activity_definition(activity, theContext); + } + } + + List successSteps = (List)theDef.get("on_success"); + List failureSteps = (List)theDef.get("on_failure"); + if (successSteps != null || failureSteps != null) { + Map steps = (Map)theContext.catalog().getTemplate(theContext.target(), + Construct.Workflow, + theContext.enclosingConstruct(Construct.Workflow).name()) + .get("steps"); + + if (successSteps != null) { + for (Object successStep: successSteps) { + if (!steps.containsKey(successStep)) { + theContext.addError("The 'on_success' step " + successStep + " was not declared", null); + } + } + } + + if (failureSteps != null) { + for (Object failureStep: failureSteps) { + if (!steps.containsKey(failureStep)) { + theContext.addError("The 'on_failure' step " + failureStep + " was not declared", null); + } + } + } + } + } + finally { + theContext.exit(); + } + } + + public void check_workflow_preconditions_definition(List thePreconditions, Checker.CheckContext theContext) { + + theContext.enter("preconditions"); + + try { + for (Map precondition: thePreconditions) { + check_workflow_precondition_definition(precondition, theContext); + } + } + finally { + theContext.exit(); + } + } + + public void check_workflow_precondition_definition(Map theDef, Checker.CheckContext theContext) { + + Map targetDef = check_target_definition(theDef, theContext); + if (targetDef != null) { + List condDef = (List)theDef.get("condition"); + if (condDef != null) { + check_condition_clause_definition(targetDef, condDef, theContext); + } + } + } + + public void check_workflow_step_activity_definition(Map theDef, Checker.CheckContext theContext) { + if (theDef.size() != 1) + theContext.addError("An activity has only one entry", null); + + //only one entry expected + String delegate = (String)theDef.get("delegate"); + if (delegate != null) { + //not clear to me what this is: a reference to a declarative workflow? + } + + String state = (String)theDef.get("set_state"); + if (state != null) { + } + + String operation = (String)theDef.get("call_operation"); + if (operation != null) { + int pos = operation.lastIndexOf('.'); + if (pos < 0) { + theContext.addError("Invalid 'operation_call' statement", null); + } + else { + String interfaceName = operation.substring(0, pos), + interfaceOp = operation.substring(pos+1); + + //we expect the enclosing to be the workflow step, where the target is specified + String stepTarget = (String)((Map)theContext.enclosing().info()).get("target"); + Paths paths = theContext.checker().getHandler(Paths.class); + Map opDef = null; + + //look in the node template + if (null == (opDef = paths.nodeTemplateInterfaceOperation( + theContext.target(), + stepTarget, + interfaceName, + interfaceOp))) { + //look in the node type + if (null == (opDef = paths.nodeTypeInterfaceOperation( + paths.nodeTemplateType(theContext.target(), stepTarget), + interfaceName, + interfaceOp))) { + + //look into the interface type definition, if the node has an interface with the given name + if (null == + (opDef = paths.interfaceTypeOperation( + paths.nodeTypeInterfaceType(paths.nodeTemplateType(theContext.target(), stepTarget), + interfaceName), + interfaceOp))) { + theContext.addError("Step target node " + stepTarget + " does not have a " + interfaceName + " with operation " + interfaceOp, null); + } + } + } + + System.out.println("*** opDef: " + opDef); + } + } + + String workflow = (String)theDef.get("inline"); + if (workflow != null && !((Map)theContext.enclosingElement("workflows").info()).containsKey(workflow)) { + theContext.addError("Activity 'inline' " + workflow + " was not declared", null); + } + } + + /* it would have been great to be ab le to convey in the return value the type of construct + */ + protected Map check_target_definition(Map theDef, Checker.CheckContext theContext) { + + String target = (String)theDef.get("target"); + Construct targetConstruct = null; + Map targetDef = null; + + if ((targetDef = theContext.catalog().getTemplate(theContext.target(), Construct.Group, target)) != null) { + targetConstruct = Construct.Group; + } + else if ((targetDef = theContext.catalog().getTemplate(theContext.target(), Construct.Node, target)) != null) { + targetConstruct = Construct.Node; + } + else { + theContext.addError("The 'target' entry must contain a reference to a node template or group template, '" + target + "' is none of those", null); + } + + String targetRelationship = (String)theDef.get("target_relationship"); + if (targetRelationship != null) { + if (targetConstruct.equals(Construct.Node)) { + //'The optional name of a requirement of the target in case the step refers to a relationship rather than a node or group' + if (null == theContext.catalog().getRequirementDefinition(Construct.Node, + (String)targetDef.get("type"), + targetRelationship)) { + theContext.addError("The 'target' (" + target + ") has no requirement '" + targetRelationship + "', as indicated by 'targetRelationship'", null); + } + } + else { + theContext.addError("The 'target_relationship' is only applicable if the 'target' (" + target + ") is a node template (which it is not)", null); + } + } + + return targetDef; + } + + protected boolean check_condition_clause_definition(Map theTargetDefinition, + List theConditionDef, + Checker.CheckContext theContext) { + boolean res = true; + + for (Map clause: theConditionDef) { + Map.Entry clauseEntry = (Map.Entry)clause.entrySet().iterator().next(); + String clauseType = (String)clauseEntry.getKey(); + + if ("and".equals(clauseType) || "or".equals(clauseType)) { + res &= check_condition_clause_definition(theTargetDefinition, (List)clauseEntry.getValue(), theContext); + } + else if ("assert".equals(clauseType)) { + List assertions = (List)clauseEntry.getValue(); + for (Map assertion: assertions) { + //expected to be a one entry map + for (Object attributeName: assertion.keySet()) { + //the attribute must be defined in the given Target .. but there are also exposed built-in attributes (such as 'state') that we are not aware of + } + } + } + } + + return res; + } +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Catalogs.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Catalogs.java new file mode 100644 index 0000000..829d756 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Catalogs.java @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.annotations; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + + +/** + * Marks a method as a processing step during the cataloging phase. Allows for custom cataloging of (any) tosca + * construct information. + * The expected signature of a cataloging processing function is: + * param theDef of type matching the representation of the expected/processed construct + * param theContext exposes the processing state of the checker + * + *
+ * {@code + * package org.onap.tosca.myaddons; + * + * import org.onap.tosca.checker.annotations.Catalogs; + * + * public class MyToscaCatalog { + * .. + * @Catalogs(path="/node_types") + * public void catalog_node_types( + * Map theDefinitions, CheckContext theContext) { + * //doing my thing + * } + * } + *
+ * + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.METHOD}) +public @interface Catalogs { + String path() default "/"; +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Checks.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Checks.java new file mode 100644 index 0000000..856ac1b --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Checks.java @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.annotations; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + + +/** + * Marks a method as a processing step during the checking phase. Allows for additional checking to be 'plugged' in. + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.TYPE, ElementType.METHOD}) +/** + * Marks a method as participating in the consistency checking stage. The method is expected to have the following + * signature: + * param theDef type of expected representation of the construct's information, for example Map for + * node_types or List for requirements + * param theContext CheckContext instance exposing the checker state + * + * Note: currently not used at type level + */ +/* The iffy part: as a type annotaton we do not need a path or a version specification, + as a method annotation it is mandatory (cannot be the default) + We could forsee that a version indication at type level would cover all check handler within the type + */ +public @interface Checks { + String path() default "/"; + String[] version() default { "1.0", "1.0.0", "1.1", "1.1.0" }; +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Validates.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Validates.java new file mode 100644 index 0000000..f34a9fc --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/Validates.java @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.annotations; + +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.ElementType; +import java.lang.annotation.Target; + + +/** + * Marks a method as a processing step during the validation phase (grammar rules checking). + * The expected signature of a method so annotated is: + * param theObject the POJO currently being subject to validation + * param theRule kwalify yaml syntax rule representation + * param theContext kwalify validation context exposing the state of the validator + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.TYPE, ElementType.METHOD}) +public @interface Validates { + + public static enum Timing { + pre, + post + } + + /* empty string means 'all rules' */ + String rule(); + + Validates.Timing timing(); +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/package-info.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/package-info.java new file mode 100644 index 0000000..e7b268a --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/annotations/package-info.java @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ + +/** + * Contains the definition of the annotations used to mark methods used in processing a TOSCA template, one annotation + * for each stage (not for locate and parsed: the location process is done through org.onap.tosca.checker.Locator and + * parsing is currently built in). + * + * The checker {@link org.onap.tosca.checker.Checker Checker} scans the classpath for classes with methods annotated + * accordingly to annotations in this package (currently we scan packages prefixed with org.onap.tosca). + * + * At different stages the checker will delegate processing to this annotated methods. + * + * {@link org.onap.tosca.checker.annotations.Validates Validates} marks a method as part of the validation stage, i.e. + * syntax checking. It requires the indication of the grammar rule it intends to add validation to and wether it + * performs pre or post processing. + * + * {@link org.onap.tosca.checker.annotations.Checks Checks} marks a method as part of the consistency checks stage. + * A method such annotated will be invoked when the processing reaches the document path indicated in the annotation. + * A document path follows the strcture of the TOSCA yaml document, for example: + * /node_types/some_node_type + * /topology_template/node_templates + * Note that current implementation works by delegation down the hierachical structure of the document. If no processor + * is in place for /topology_templates/ or the one(s) in place do not delegate any further then a processor for + * /topology_template/node_templates will not get involved. The framework only attempts the invocation of first level + * (hierachically speaking) checks. + * + * {@link org.onap.tosca.checker.annotations.Catalogs Catalogs} marks a method as part of the cataloging stage, i.e. + * the storage/indexing (of some sort, it does not really need to be storing) of a construct that has been validated. + * Same as with {@link org.onap.tosca.checker.annotations.Checks Checks} the scope of a Cataloger is determined through + * a document path specification. + * + * A third party can provide its own processing (by using these annotations) and perform custom verifications, indexing + * for special post-processing, etc. + */ + +package org.onap.tosca.checker.annotations; diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifact.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifact.java new file mode 100644 index 0000000..8d67f0f --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifact.java @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +/** + * Artifact definition, spec section 3.5.6 + */ +public interface Artifact extends TOSCAObject { + + public String name(); + + public String type(); + + public String description(); + + public String file(); + + public String repository(); + + public String deploy_path(); + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactType.java new file mode 100644 index 0000000..798a550 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactType.java @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; + +/** + * Artifact type definition, spec section 3.6.4 + */ +public interface ArtifactType extends TOSCAObject { + + public String name(); + + public String derived_from(); + + public String description(); + + public String version(); + + public default Metadata metadata() { + return (Metadata)proxy("metadata", Metadata.class); + } + + public String mime_type(); + + public List file_ext(); + + public default Properties properties() { + return (Properties)proxy("properties", Properties.class); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactTypes.java new file mode 100644 index 0000000..d714cae --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ArtifactTypes.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface ArtifactTypes extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifacts.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifacts.java new file mode 100644 index 0000000..7c22eac --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Artifacts.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface Artifacts extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attribute.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attribute.java new file mode 100644 index 0000000..0515f78 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attribute.java @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +/** + */ +public interface Attribute extends TOSCAObject { + + public String name(); + + public String type(); + + public String description(); + + public default Object _default() { + return info().get("default"); + } + + public Boolean required(); + + public String status(); + + public String entry_schema(); +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attributes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attributes.java new file mode 100644 index 0000000..d5ee2ca --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Attributes.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface Attributes extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/AttributesAssignments.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/AttributesAssignments.java new file mode 100644 index 0000000..70ac163 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/AttributesAssignments.java @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +/* + * A simple representation of the attribute value assignments, spec section 3.5.11 + */ +public interface AttributesAssignments extends TOSCAObject { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capabilities.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capabilities.java new file mode 100644 index 0000000..31d0e45 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capabilities.java @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +/** + * Collection of type specific capability definitions (spec section 3.6.2) + */ +public interface Capabilities extends TOSCAMap { + +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilitiesAssignments.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilitiesAssignments.java new file mode 100644 index 0000000..d0f4200 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilitiesAssignments.java @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +/** + * Collection of template specific capability assignment (spec section 3.7.2) + */ +public interface CapabilitiesAssignments extends TOSCAMap { + +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capability.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capability.java new file mode 100644 index 0000000..72df0b7 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Capability.java @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; + +/* + * Type specific capability definition, spec section 3.6.2 + */ +public interface Capability extends TOSCAObject { + + /** + * The required name of the Capability Type the capability definition is based upon. + */ + public String type(); + + /** + * The optional description of the Capability definition. + */ + public String description(); + + /** + * An optional list of one or more valid names of Node Types that are supported as valid sources of any + * relationship established to the declared Capability Type. + */ + public List valid_source_types(); + + /** + * The optional minimum and maximum occurrences for the capability. + * By default, an exported Capability should allow at least one relationship to be formed with it with a + * maximum of UNBOUNDED relationships. + */ + public default Range occurences() { + return (Range)proxyList("occurences", Range.class); + } + + /** + * An optional list of property definitions for the Capability definition. + */ + public default Properties properties() { + return (Properties)proxy("properties", Properties.class); + } + + /** + * An optional list of attribute definitions for the Capability definition. + */ + public default Attributes attributes() { + return (Attributes)proxy("attributes", Attributes.class); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityAssignment.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityAssignment.java new file mode 100644 index 0000000..1c49dbe --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityAssignment.java @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; + +/* + * Type specific capability definition, spec section 3.7.1 + */ +public interface CapabilityAssignment extends TOSCAObject { + + /** */ + public String name(); + + /** + * An optional list of property definitions for the Capability definition. + */ + public default PropertiesAssignments properties() { + return (PropertiesAssignments)proxy("properties", PropertiesAssignments.class); + } + + /** + * An optional list of attribute definitions for the Capability definition. + */ + public default AttributesAssignments attributes() { + return (AttributesAssignments)proxy("attributes", AttributesAssignments.class); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityType.java new file mode 100644 index 0000000..64808d1 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityType.java @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; + +public interface CapabilityType extends TOSCAObject { + + public String name(); + + public String derived_from(); + + public String description(); + + public String version(); + + public List valid_source_types(); + + public default Properties properties() { + return (Properties)proxy("properties", Properties.class); + } + + public default Attributes attributes() { + return (Attributes)proxy("attributes", Attributes.class); + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityTypes.java new file mode 100644 index 0000000..3567afe --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/CapabilityTypes.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface CapabilityTypes extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraint.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraint.java new file mode 100644 index 0000000..03c8d20 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraint.java @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +/** + * constraint definition, spec section 3.6.6 + */ +public interface Constraint extends TOSCAObject { + + public Constraint.Type name(); + + /* this is a one entry map so here we pick the single + */ + public default Object expression() { + return info().values().iterator().next(); + } + + public enum Type { + equal, + greater_than, + greater_or_equal, + less_than, + less_or_equal, + in_range, + valid_values, + length, + min_length, + max_length, + pattern + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraints.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraints.java new file mode 100644 index 0000000..8413cd7 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Constraints.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface Constraints extends TOSCASeq { + +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataType.java new file mode 100644 index 0000000..941695c --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataType.java @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +/** + * Data type definition, spec section 3.6.6 + */ +public interface DataType extends TOSCAObject { + + public String name(); + + public String derived_from(); + + public String description(); + + public String version(); + + public default Metadata metadata() { + return (Metadata)proxy("metadata", Metadata.class); + } + + public default Properties properties() { + return (Properties)proxy("properties", Properties.class); + } + + public default Constraints constraints() { + return (Constraints)proxy("constraints", Constraints.class); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataTypes.java new file mode 100644 index 0000000..10b343a --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/DataTypes.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface DataTypes extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/EntrySchema.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/EntrySchema.java new file mode 100644 index 0000000..3aebaab --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/EntrySchema.java @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +/** + * Used in data type, property, input and so on definitions, see spec section + */ +public interface EntrySchema extends TOSCAObject { + + public String type(); + + public String description(); + + public default Constraints constraints() { + return (Constraints)proxy("constraints", Constraints.class); + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Group.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Group.java new file mode 100644 index 0000000..9e530c1 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Group.java @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; + +/** + * Spec section 3.7.5 + */ +public interface Group extends TOSCAObject { + + public String name(); + + public String type(); + + public String description(); + + public default Metadata metadata() { + return (Metadata)proxy("metadata", Metadata.class); + } + + public default PropertiesAssignments properties() { + return (PropertiesAssignments)proxy("properties", PropertiesAssignments.class); + } + + public default TemplateInterfaces interfaces() { + return (TemplateInterfaces)proxy("interfaces", TemplateInterfaces.class); + } + + public List members(); + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupType.java new file mode 100644 index 0000000..acaea03 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupType.java @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; + +/** + * Group type definition, spec section 3.6.11 + */ +public interface GroupType extends TOSCAObject { + + public String name(); + + public String derived_from(); + + public String description(); + + public String version(); + + public default Metadata metadata() { + return (Metadata)proxy("metadata", Metadata.class); + } + + public List members(); + + public default Properties properties() { + return (Properties)proxy("properties", Properties.class); + } + + public default Attributes attributes() { + return (Attributes)proxy("attributes", Attributes.class); + } + + public default TypeInterfaces interfaces() { + return (TypeInterfaces)proxy("interfaces", TypeInterfaces.class); + } + + public default Requirements requirements() { + return (Requirements)proxy("requirements", Requirements.class); + } + + public default Capabilities capabilities() { + return (Capabilities)proxy("capabilities", Capabilities.class); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupTypes.java new file mode 100644 index 0000000..8254af1 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/GroupTypes.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface GroupTypes extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Groups.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Groups.java new file mode 100644 index 0000000..d6f770a --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Groups.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface Groups extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Import.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Import.java new file mode 100644 index 0000000..2e7edf8 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Import.java @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +/** + * Import defintion, see section + */ +public interface Import extends TOSCAObject { + + public String name(); + + public String file(); + + public String repository(); + + public String namespace_uri(); + + public String namespace_prefix(); + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Imports.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Imports.java new file mode 100644 index 0000000..3aed669 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Imports.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface Imports extends TOSCAMap { + + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Input.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Input.java new file mode 100644 index 0000000..e636d90 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Input.java @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +/** + * Same as property definition + */ +public interface Input extends TOSCAObject { + + public String name(); + + public String type(); + + public String description(); + + public default Object _default() { + return info().get("default"); + } + + public boolean required(); + + public Status status(); + + public default Constraints constraints() { + return (Constraints)proxy("constraints", Constraints.class); + } + + public default EntrySchema entry_schema() { + return (EntrySchema)proxy("entry_schema", EntrySchema.class); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Inputs.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Inputs.java new file mode 100644 index 0000000..1f19079 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Inputs.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface Inputs extends TOSCAMap { + + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceType.java new file mode 100644 index 0000000..559f24b --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceType.java @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.Map; + +import com.google.common.collect.Maps; + +/** + * Interface type definition, spec section 3.6.5 + */ +public interface InterfaceType extends TOSCAObject { + + public String name(); + + public String derived_from(); + + public String description(); + + public String version(); + + public default Metadata metadata() { + return (Metadata)proxy("metadata", Metadata.class); + } + + public default Properties inputs() { + return (Properties)proxy("inputs", Properties.class); + } + + /** + * The set of operations, made up by all keys but the ones above .. + */ + public default Operations operations() { + return (Operations) + TOSCAProxy.record(info(), + info -> TOSCAProxy.buildMap(null, + Maps.filterKeys((Map)info, + key -> !("derived_from".equals(key) || + "description".equals(key) || + "version".equals(key) || + "metadata".equals(key) || + "inputs".equals(key))), + Operations.class)); + + } + + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceTypes.java new file mode 100644 index 0000000..4b42c8e --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/InterfaceTypes.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface InterfaceTypes extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Metadata.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Metadata.java new file mode 100644 index 0000000..8e395dc --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Metadata.java @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +/* + * No predefined entries here, so just use the java.util.Map interface get, i.e. get("some_entry_name") + */ +public interface Metadata extends TOSCAObject { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Models.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Models.java new file mode 100644 index 0000000..868d059 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Models.java @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.io.File; +import java.util.Map; + +import org.onap.tosca.checker.Target; +import org.onap.tosca.checker.Catalog; +import org.onap.tosca.checker.Checker; + + +public class Models { + + public static ServiceTemplate service_template(Target theTarget) { + return TOSCAProxy.buildObject("", (Map)theTarget.getTarget(), ServiceTemplate.class); + } + + + + public static void main(String[] theArgs) { + + try { + Catalog cat = new Checker().check(new File(theArgs[0])); + + for (Target t: cat.targets()) { + System.err.println(t.getLocation() + "\n" + cat.importString(t) + "\n" + t.getReport()); + } + + //for (Target t: cat.sortedTargets()) { + // System.out.println(t); + //} + + ServiceTemplate tmpl = service_template(cat.topTargets().iterator().next()); + + NodeTypes node_types = tmpl.node_types(); + if (node_types != null) { + for (NodeType nt: node_types.values()) { + System.out.println(" ** node type : " + nt.name()); + Requirements reqs = nt.requirements(); + if (reqs != null) { + for (Requirement req: reqs) + System.out.println("\t ** requirement " + req.name() + "/" + req.capability()); + } + } + } + + Substitution subs = tmpl.topology_template().substitution_mappings(); + System.out.format("Substitution Mapping %s", subs.node_type()); + Substitution.Mappings maps = subs.capabilities(); + if (maps != null) { + for (Substitution.Mapping m: maps.values()) + System.out.format("\n\tCapability mapping %s : %s)", m.target(), m.mapping()); + } + maps = subs.requirements(); + if (maps != null) { + for (Substitution.Mapping m: maps.values()) + System.out.format("\n\tRequirement mapping %s : %s", m.target(), m.mapping()); + } +/* +for(NodeTemplate nt: tmpl.topology_template().node_templates().values()) { + System.out.println(" ** node : " + nt.name() + " : " + nt.type()); + PropertiesAssignments props = nt.properties(); + if (props != null) { + props.entrySet().stream().forEach(e -> { + System.out.println(" ** property : " + e.getKey() + " : " + e.getValue()); + }); + } +*/ + } + catch (Exception x) { + x.printStackTrace(); + } + } +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeFilter.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeFilter.java new file mode 100644 index 0000000..27e3de2 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeFilter.java @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +/** + * Node filter definition, spec section + */ +public interface NodeFilter extends TOSCAObject { + + public default Properties properties() { + return (Properties)proxy("properties", Properties.class); + } + + public default Capabilities capabilities() { + return (Capabilities)proxy("capabilities", Capabilities.class); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplate.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplate.java new file mode 100644 index 0000000..6f20e2b --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplate.java @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; + +/** + * See tosca spec section + */ +public interface NodeTemplate extends TOSCAObject { + + public String name(); + + public String type(); + + public String description(); + + public List directives(); + + public String copy(); + + public default Metadata metadata() { + return (Metadata)proxy("metadata", Metadata.class); + } + + public default PropertiesAssignments properties() { + return (PropertiesAssignments)proxy("properties", PropertiesAssignments.class); + } + + public default AttributesAssignments attributes() { + return (AttributesAssignments)proxy("attributes", AttributesAssignments.class); + } + + public default CapabilitiesAssignments capabilities() { + return (CapabilitiesAssignments)proxy("capabilities", CapabilitiesAssignments.class); + } + + public default RequirementsAssignments requirements() { + return (RequirementsAssignments)proxy("requirements", RequirementsAssignments.class); + } + + public default TemplateInterfaces interfaces() { + return (TemplateInterfaces)proxy("interfaces", TemplateInterfaces.class); + } + + public default Artifacts artifacts() { + return (Artifacts)proxy("artifacts", Artifacts.class); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplates.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplates.java new file mode 100644 index 0000000..4e4bb09 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTemplates.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface NodeTemplates extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeType.java new file mode 100644 index 0000000..06d79bb --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeType.java @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +/** + * Node type definition, spec section 3.6.9 + */ +public interface NodeType extends TOSCAObject { + + public String name(); + + public String derived_from(); + + public String description(); + + public String version(); + + public default Metadata metadata() { + return (Metadata)proxy("metadata", Metadata.class); + } + + public default Properties properties() { + return (Properties)proxy("properties", Properties.class); + } + + public default Attributes attributes() { + return (Attributes)proxy("attributes", Attributes.class); + } + + public default Requirements requirements() { + return (Requirements)proxy("requirements", Requirements.class); + } + + public default Capabilities capabilities() { + return (Capabilities)proxy("capabilities", Capabilities.class); + } + + public default TypeInterfaces interfaces() { + return (TypeInterfaces)proxy("interfaces", TypeInterfaces.class); + } + + public default Artifacts artifacts() { + return (Artifacts)proxy("artifacts", Artifacts.class); + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTypes.java new file mode 100644 index 0000000..e61deb4 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/NodeTypes.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface NodeTypes extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operation.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operation.java new file mode 100644 index 0000000..fbc66b3 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operation.java @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +/** + * This is the type specific defintion, as per spec section 3.5.13.2.2 + */ +public interface Operation extends TOSCAObject { + + + public String name(); + + public String description(); + + public String implementation(); + + public default Properties inputs() { + return (Properties)proxy("inputs", Properties.class); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operations.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operations.java new file mode 100644 index 0000000..726ad90 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Operations.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface Operations extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Outputs.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Outputs.java new file mode 100644 index 0000000..2daed56 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Outputs.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface Outputs extends TOSCAMap { + + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Parameter.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Parameter.java new file mode 100644 index 0000000..e1d771b --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Parameter.java @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +/** + * Spec section 3.5.12 + */ +public interface Parameter extends TOSCAObject { + + public String name(); + + public String type(); + + public String description(); + + public Object value(); + + public default Object _default() { + return info().get("default"); + } + + public boolean required(); + + public Status status(); + + public default Constraints constraints() { + return (Constraints)proxy("constraints", Constraints.class); + } + + public default EntrySchema entry_schema() { + return (EntrySchema)proxy("entry_schema", EntrySchema.class); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policies.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policies.java new file mode 100644 index 0000000..21a6be5 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policies.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface Policies extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policy.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policy.java new file mode 100644 index 0000000..4d291a9 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Policy.java @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; + +/** + * Policy type definition, spec section 3.7.6 + */ +public interface Policy extends TOSCAObject { + + public String type(); + + public String description(); + + public String version(); + + public default Metadata metadata() { + return (Metadata)proxy("metadata", Metadata.class); + } + + public List targets(); + + public default PropertiesAssignments properties() { + return (PropertiesAssignments)proxy("properties", PropertiesAssignments.class); + } + + public default PolicyType.Triggers triggers() { + return (PolicyType.Triggers)proxy("triggers", PolicyType.Triggers.class); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyType.java new file mode 100644 index 0000000..08a4bfa --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyType.java @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; + +/** + * Policy type definition, spec section 3.6.12 + */ +public interface PolicyType extends TOSCAObject { + + public String name(); + + public String derived_from(); + + public String description(); + + public String version(); + + public default Metadata metadata() { + return (Metadata)proxy("metadata", Metadata.class); + } + + public List targets(); + + public default Properties properties() { + return (Properties)proxy("properties", Properties.class); + } + + public default Triggers triggers() { + return (Triggers)proxy("triggers", Triggers.class); + } + + public interface Triggers extends TOSCAMap { + + } + + public interface Trigger extends TOSCAObject { + + public String description(); + + public String event_type(); + + public String schedule(); + + public default Constraints constraint() { + return (Constraints)proxy("constraint", Constraints.class); + } + + public default Constraints condition() { + return (Constraints)proxy("condition", Constraints.class); + } + + public int period(); + + public int evaluations(); + + public String method(); + + public String action(); + + //target_filter + public default EventFilter target_filter() { + return (EventFilter)proxy("target_filter", EventFilter.class); + } + + } + + public interface EventFilter extends TOSCAObject { + + public String node(); + + public String requirement(); + + public String capability(); + + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyTypes.java new file mode 100644 index 0000000..8ef6278 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PolicyTypes.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface PolicyTypes extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Properties.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Properties.java new file mode 100644 index 0000000..d2a8e85 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Properties.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface Properties extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PropertiesAssignments.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PropertiesAssignments.java new file mode 100644 index 0000000..dd949ff --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/PropertiesAssignments.java @@ -0,0 +1,22 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +/* + * A simple representation of the property value assignments, to be used through the Map interface + * Working with this more basic representation keeps all (jx)paths expressions valid + */ +public interface PropertiesAssignments extends TOSCAObject { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Property.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Property.java new file mode 100644 index 0000000..126a346 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Property.java @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +/** + * See spec section 3.5.8 + */ +public interface Property extends TOSCAObject { + + public String name(); + + public String type(); + + public String description(); + + public default Object _default() { + return info().get("default"); + } + + public boolean required(); + + public Status status(); + + public default Constraints constraints() { + return (Constraints)proxy("constraints", Constraints.class); + } + + public default EntrySchema entry_schema() { + return (EntrySchema)proxy("entry_schema", EntrySchema.class); + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Range.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Range.java new file mode 100644 index 0000000..ed90ed7 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Range.java @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; + +/* + */ +public interface Range extends List { + + public default Object lower() { + return get(0); + } + + public default Object upper() { + return get(1); + } + + public default boolean isUnbounded() { + return "UNBOUNDED".equals(upper()); + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplate.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplate.java new file mode 100644 index 0000000..89e1fa0 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplate.java @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; + +/** + */ +public interface RelationshipTemplate extends TOSCAObject { + + public String name(); + + public String type(); + + public String description(); + + public String copy(); + + public default Metadata metadata() { + return (Metadata)proxy("metadata", Metadata.class); + } + + public default PropertiesAssignments properties() { + return (PropertiesAssignments)proxy("properties", PropertiesAssignments.class); + } + + public default AttributesAssignments attributes() { + return (AttributesAssignments)proxy("attributes", AttributesAssignments.class); + } + + public default TemplateInterfaces interfaces() { + return (TemplateInterfaces)proxy("interfaces", TemplateInterfaces.class); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplates.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplates.java new file mode 100644 index 0000000..d4f73d6 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTemplates.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface RelationshipTemplates extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipType.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipType.java new file mode 100644 index 0000000..844f176 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipType.java @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; + +/** + * Relationship type definition, spec section 3.6.10 + */ +public interface RelationshipType extends TOSCAObject { + + public String name(); + + public String derived_from(); + + public String description(); + + public String version(); + + public default Metadata metadata() { + return (Metadata)proxy("metadata", Metadata.class); + } + + public default Properties properties() { + return (Properties)proxy("properties", Properties.class); + } + + public default Attributes attributes() { + return (Attributes)proxy("attributes", Attributes.class); + } + + public default TypeInterfaces interfaces() { + return (TypeInterfaces)proxy("interfaces", TypeInterfaces.class); + } + + public List valid_target_types(); + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTypes.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTypes.java new file mode 100644 index 0000000..c5144f5 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RelationshipTypes.java @@ -0,0 +1,18 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface RelationshipTypes extends TOSCAMap { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repositories.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repositories.java new file mode 100644 index 0000000..b30bfe7 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repositories.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface Repositories extends TOSCAMap { + +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repository.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repository.java new file mode 100644 index 0000000..b685378 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Repository.java @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.Map; + + +/* + * Repository definition (spec section 3.6.3) + */ +public interface Repository extends TOSCAObject { + + /** */ + public String name(); + + /** */ + public String description(); + + /** */ + public String url(); + + /** */ + public default Credential credential() { + return (Credential)proxy("credential", Credential.class); + } + + /** */ + public interface Credential extends TOSCAObject { + + /** */ + public String protocol(); + + /** */ + public String token_type(); + + /** */ + public String token(); + + /** */ + public String user(); + + /** */ + public Map keys(); + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirement.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirement.java new file mode 100644 index 0000000..5e3c4f8 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirement.java @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +/* + * Requirement definition, as it appears in node type definitions (spec section 3.6.3) + */ +public interface Requirement extends TOSCAObject { + + /** */ + public String name(); + + /** */ + public String capability(); + + /** */ + public String node(); + + /** */ + public default Range occurences() { + return (Range)proxy("relationship", Range.class); + } + + public default Relationship relationship() { + return (Relationship)proxy("relationship", Relationship.class); + } + + /** + * Spec section 3.6.3.2.3 + */ + public interface Relationship extends TOSCAObject { + + public String type(); + + public default TypeInterfaces interfaces() { + return (TypeInterfaces)proxy("interfaces", TypeInterfaces.class); + } + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementAssignment.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementAssignment.java new file mode 100644 index 0000000..7d03453 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementAssignment.java @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +/* + * Requirement assignment as it appears in node templates. See spec section 3.7.2 + */ +public interface RequirementAssignment extends TOSCAObject { + + public String name(); + + /** + * Provide the name of either a: + * Capability definition within a target node template that can fulfill the requirement. + * Capability Type that the provider will use to select a type-compatible target node template to fulfill the requirement at runtime. + */ + public String capability(); + + /** + */ + public String node(); + + /** */ + public default NodeFilter node_filter() { + return (NodeFilter)proxy("node_filter", NodeFilter.class); + } + + /** */ + public default RelationshipAssignment relationship() { + return (RelationshipAssignment)proxy("relationship", RelationshipAssignment.class); + } + + public interface RelationshipAssignment extends TOSCAObject { + + public String type(); + + public default PropertiesAssignments properties() { + return (PropertiesAssignments)proxy("properties", PropertiesAssignments.class); + } + + public default TemplateInterfaces interfaces() { + return (TemplateInterfaces)proxy("interfaces", TemplateInterfaces.class); + } + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirements.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirements.java new file mode 100644 index 0000000..b29a256 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Requirements.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface Requirements extends TOSCASeq { + +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementsAssignments.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementsAssignments.java new file mode 100644 index 0000000..a621c44 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/RequirementsAssignments.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface RequirementsAssignments extends TOSCASeq { + +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ServiceTemplate.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ServiceTemplate.java new file mode 100644 index 0000000..7eb686c --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/ServiceTemplate.java @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +/** + */ +public interface ServiceTemplate extends TOSCAObject { + + public String tosca_definitions_version(); + + public String description(); + + public default Metadata metadata() { + return (Metadata)proxy("metadata", Metadata.class); + } + + public default Imports imports() { + return (Imports)proxy("imports", Imports.class); + } + + public default Repositories repositories() { + return (Repositories)proxy("repositories", Repositories.class); + } + + public default ArtifactTypes artifact_types() { + return (ArtifactTypes)proxy("artifact_types", ArtifactTypes.class); + } + + public default DataTypes data_types() { + return (DataTypes)proxy("data_types", DataTypes.class); + } + + public default NodeTypes node_types() { + return (NodeTypes)proxy("node_types", NodeTypes.class); + } + + public default GroupTypes group_types() { + return (GroupTypes)proxy("group_types", GroupTypes.class); + } + + public default PolicyTypes policy_types() { + return (PolicyTypes)proxy("policy_types", PolicyTypes.class); + } + + public default RelationshipTypes relationship_types() { + return (RelationshipTypes)proxy("relationship_types", RelationshipTypes.class); + } + + public default CapabilityTypes capability_types() { + return (CapabilityTypes)proxy("capability_types", CapabilityTypes.class); + } + + public default InterfaceTypes interface_types() { + return (InterfaceTypes)proxy("interface_types", InterfaceTypes.class); + } + + public default TopologyTemplate topology_template() { + return (TopologyTemplate)proxy("topology_template", TopologyTemplate.class); + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Status.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Status.java new file mode 100644 index 0000000..e76bff5 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Status.java @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public enum Status { + + supported, + unsupported, + experimental, + deprecated + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Substitution.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Substitution.java new file mode 100644 index 0000000..c41a429 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/Substitution.java @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; + + +/** + * Spec section 3.8 + */ +public interface Substitution extends TOSCAObject { + + public String node_type(); + + public default Mappings capabilities() { + return (Mappings)proxy("capabilities", Mappings.class); + } + + public default Mappings requirements() { + return (Mappings)proxy("requirements", Mappings.class); + } + + /** */ + public interface Mappings extends TOSCAMap { + } + + /** */ + public interface Mapping extends TOSCAObject { + + /** to promote the key */ + public String name(); + + /** + */ + public default String target() { + return name(); + } + + /** */ + public default List mapping() { + return (List)info().values().iterator().next(); + } + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAMap.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAMap.java new file mode 100644 index 0000000..50feb44 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAMap.java @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.Map; + + +/* + * used to render TOSCA constructs that are maps of names to actual construct data: + * - node types, etc + * - topology template inputs, etc + */ +public interface TOSCAMap> extends Map { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAObject.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAObject.java new file mode 100644 index 0000000..a0332fd --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAObject.java @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.Map; +import java.util.List; +import java.util.Collections; + +import java.lang.reflect.Proxy; + +/* + * Some choice to make here: do we explcitly extend Map or not + * Note that this makes assumptions about the POJO representation of the yaml document. + */ +public interface TOSCAObject> extends Map { + + /** */ + public abstract Map info(); + + /* if the key is absent this produces a null value, rather normal I'd say + */ + public default A proxy(String theKey, final Class theType) { + return (A)/*info().*/computeIfPresent(theKey, + (key, val)-> { + /* + if (val instanceof TOSCAObject || + val instanceof TOSCAMap || + val instanceof TOSCASeq) + return val; + */ + //this makes the assumption that no other proxies are at play + if (Proxy.isProxyClass(val.getClass())) + return val; + + if (val instanceof Map && TOSCAMap.class.isAssignableFrom(theType)) + return TOSCAProxy.buildMap((String)key, (Map)val, (Class)theType); + if (val instanceof List && TOSCASeq.class.isAssignableFrom(theType)) + return TOSCAProxy.buildSeq((List)val, (Class)theType); + + return TOSCAProxy.buildObject((String)key, (Map)val, theType); + }); + } + + public default A proxyList(String theKey, final Class theType) { + return (A)computeIfPresent(theKey, + (key, val)-> { + if (Proxy.isProxyClass(val.getClass())) + return val; + return TOSCAProxy.buildList((List)val, theType); + }); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAProxy.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAProxy.java new file mode 100644 index 0000000..bce8675 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCAProxy.java @@ -0,0 +1,179 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; +import java.util.LinkedList; +import java.util.Map; +import java.util.HashMap; +import java.util.Collections; + +import java.util.stream.Collectors; + +import java.util.function.Function; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +import java.lang.reflect.Type; +import java.lang.reflect.Method; +import java.lang.reflect.Array; +import java.lang.reflect.Constructor; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.InvocationHandler; +import java.lang.reflect.InvocationTargetException; + +import java.lang.invoke.MethodHandles; + +import com.google.common.reflect.Invokable; +import com.google.common.reflect.AbstractInvocationHandler; + + +/** + */ +public class TOSCAProxy + extends AbstractInvocationHandler { + + + public static Constructor lookupHandleConstructor; + + static { + try { + lookupHandleConstructor = + MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, + int.class); + + if (!lookupHandleConstructor.isAccessible()) { + lookupHandleConstructor.setAccessible(true); + } + } + catch (Exception x) { + throw new RuntimeException(x); + } + } + + protected static Class typeArgument(Class theType) { + return (Class) + ((ParameterizedType)theType.getGenericInterfaces()[0]). + getActualTypeArguments()[0]; + } + + private static Map proxyRecords = new HashMap(); + + /** */ + public static Object record(Object theRef, Function theProxyBuilder) { + return proxyRecords.computeIfAbsent(theRef, theProxyBuilder); + } + + /* a version allowing for the 'decoration/wrapping' of a basic list + */ + public static T buildList(final List theInfo, Class theType) { + return (T)java.lang.reflect.Proxy.newProxyInstance( + TOSCAProxy.class.getClassLoader(), + new Class[] { theType }, + new AbstractInvocationHandler() { + protected Object handleInvocation(Object theProxy,Method theMethod,Object[] theArgs) + throws Throwable { + return theMethod.invoke(theInfo, theArgs); + } + }); + } + + /* + * This is targeted at lists of one entry maps seen in in the TOSCA spec + */ + public static T buildSeq(final List theInfo, Class theType) { + theInfo.replaceAll((value) -> { Map.Entry entry = (Map.Entry) + value.entrySet().iterator().next(); + return buildObject(entry.getKey(), entry.getValue(), typeArgument(theType)); + }); + return (T)java.lang.reflect.Proxy.newProxyInstance( + TOSCAProxy.class.getClassLoader(), + new Class[] { theType }, + new AbstractInvocationHandler() { + protected Object handleInvocation(Object theProxy,Method theMethod,Object[] theArgs) + throws Throwable { + //A Seq is nothing but a List so just propagate the call .. + return theMethod.invoke(theInfo, theArgs); + } + }); + } + + /* + * All entries in the map will become TOSCAObject instances (used for collections of constructs) + */ + public static T buildMap(String theName, Map theInfo, Class theType) { + theInfo.replaceAll((name, value) -> { + return + value instanceof Map ? + buildObject(name, (Map)value, typeArgument(theType)) : + buildObject(name, Collections.singletonMap("value", value), typeArgument(theType)); + }); + return buildObject(theName, theInfo, theType); + } + + public static T buildObject(String theName, Map theInfo, Class theType) { + return (T)java.lang.reflect.Proxy.newProxyInstance( + TOSCAProxy.class.getClassLoader(), + new Class[] { theType /*, Map.class*/ }, + new TOSCAProxy(theName, theInfo)); + } +/* + public static T build(String theName, Map theInfo, Class theType) { + if (TOSCAMap.class.isAssignableFrom(theType)) + return buildMap(theName, theInfo, (Class)theType); + else + return buildObject(theName, theInfo, theType); + } +*/ + private String name; + private Map info; + + protected TOSCAProxy(String theName, Map theInfo) { + this.name = theName; + this.info = theInfo; + } + + protected Object handleInvocation( + Object theProxy,Method theMethod,Object[] theArgs) + throws Throwable { + +//System.out.println(" ** proxy looking for " + theMethod + " in " + name + "&" + info); + + //if the method was declared in Map.class, just default to 'info' (we should make this read only) + if (Map.class.equals(theMethod.getDeclaringClass())) { + return theMethod.invoke(this.info, theArgs); + } + + if (theMethod.isDefault()) { + final Class declaringClass = theMethod.getDeclaringClass(); + return lookupHandleConstructor + .newInstance(declaringClass, MethodHandles.Lookup.PRIVATE) + .unreflectSpecial(theMethod, declaringClass) + .bindTo(theProxy) + .invokeWithArguments(theArgs); + } + + if ("info".equals(theMethod.getName())) { + return this.info; + } + + if ("name".equals(theMethod.getName())) { + return this.name; + } + + return this.info.get(theMethod.getName()); + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCASeq.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCASeq.java new file mode 100644 index 0000000..e335e5a --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TOSCASeq.java @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; + + +/* + * used to render TOSCA constructs that are list of actual constructs: + * - requirements + */ +public interface TOSCASeq> extends List { + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterface.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterface.java new file mode 100644 index 0000000..628e2f7 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterface.java @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.List; +import java.util.Map; + +import com.google.common.collect.Maps; + + +/* + * Interface definition used in templates (node, relationship) + */ +public interface TemplateInterface extends TOSCAObject { + + public String name(); + + public default Inputs inputs() { + return (Inputs)proxy("inputs", Inputs.class); + } + + /** + * See InterfaceType for the reason for the implementation below. + * Use the template specific operation definition, as per spec section 3.5.13.2.3 + */ + public default TemplateInterface.Operations operations() { + return (Operations) + TOSCAProxy.record(info(), + info -> TOSCAProxy.buildMap(null, + Maps.filterKeys((Map)info, + key -> !("inputs".equals(key))), + Operations.class)); + } + + /** + * Is this to be viewed as an 'operation assignment' ?? + */ + public interface Operations extends TOSCAMap { + } + + /* + * Template specific operation definition, section 3.5.13.2.3 + */ + public interface Operation extends TOSCAObject { + + public String name(); + + public String description(); + + public default PropertiesAssignments inputs() { + return (PropertiesAssignments)proxy("inputs", PropertiesAssignments.class); + } + + public default Implementation implementation() { + return (Implementation)proxy("implementation", Implementation.class); + } + + /** + */ + public interface Implementation extends TOSCAObject { + + public String primary(); + + public List dependencies(); + + } + + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterfaces.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterfaces.java new file mode 100644 index 0000000..b4b40a2 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TemplateInterfaces.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface TemplateInterfaces extends TOSCAMap { + +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TopologyTemplate.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TopologyTemplate.java new file mode 100644 index 0000000..88328ba --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TopologyTemplate.java @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface TopologyTemplate extends TOSCAObject { + + public String description(); + + public default Inputs inputs() { + return (Inputs)proxy("inputs", Inputs.class); + } + + public default Outputs outputs() { + return (Outputs)proxy("Outputs", Outputs.class); + } + + public default NodeTemplates node_templates() { + return (NodeTemplates)proxy("node_templates", NodeTemplates.class); + } + + public default RelationshipTemplates relationship_templates() { + return (RelationshipTemplates)proxy("relationship_templates", RelationshipTemplates.class); + } + + public default Groups groups() { + return (Groups)proxy("groups", Groups.class); + } + + public default Substitution substitution_mappings() { + return (Substitution)proxy("substitution_mappings", Substitution.class); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterface.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterface.java new file mode 100644 index 0000000..77836f6 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterface.java @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + +import java.util.Map; + +import com.google.common.collect.Maps; + +/* + * Interface definition used in types (node, relationship) + */ +public interface TypeInterface extends TOSCAObject { + + public String name(); + + public String type(); + + public default Inputs inputs() { + return (Inputs)proxy("inputs", Inputs.class); + } + + public default Operations operations() { + //not as obvious because we need to pick up all the keys EXCEPT "inputs" and "type"; + //as a result we cannot pick a key to attach the proxy to (and end up in the best case creating one every time ..). + //I wish I had a more elegant solution but in order to avoid building a new proxy every time we get here + //we register the long-lasting reference to proxy association .. + + return (Operations) + TOSCAProxy.record(info(), + info -> TOSCAProxy.buildMap(null, + Maps.filterKeys((Map)info, + key -> !("type".equals(key) || + "inputs".equals(key))), + Operations.class)); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterfaces.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterfaces.java new file mode 100644 index 0000000..fb271d7 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/TypeInterfaces.java @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.model; + + +public interface TypeInterfaces extends TOSCAMap { + +} + diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/package-info.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/package-info.java new file mode 100644 index 0000000..783389e --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/model/package-info.java @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ + +/** + * The package contains Java representations of TOSCA constructs. + * During processing (parsing, checking, cataloging) a POJO representation of the TOSCA yaml document content + * is being used. For any post-processing the user has the option of wrapping the POJO representations in these + * domain specific representations. + * Domain specific elements are created 'as needed' as the client navigates through the template content and the + * goal here was not to change the POJO-based structure, but just to wrap it in compatible elements (in this way + * all other methods of post processing stay compatible: jxpath, etc). + * The TOSCA specific representations are defined as interfaces for which implementations are built at run-time, + * implementations which delegate to the existing POJO representations resulting from the checker. + * + * The entry points are defined in {@link org.onap.tosca.checker.models.Models Models}. + * + * ToDo: more entry points needed, at least for catalog based navigation of available types. + */ + +package org.onap.tosca.checker.model; diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/package-info.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/package-info.java new file mode 100644 index 0000000..818eeab --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/package-info.java @@ -0,0 +1,142 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ + +/** + * The checker provides an api/tool for the verification of TOSCA yaml files. + *

+ * It provides a three stage processing of a tosca yaml file: + * - yaml verification: is the document a valid yaml document as per yaml.org/spec. In particular we're using + * the snakeyaml library for parsing the yaml document to a nested structure of java objects. Multiple yaml + * documents per storage element/yaml character stream (file, stream) are supported, see section 9 of yaml spec. + * - tosca yaml grammar validation: is the document a valid tosca yaml + * document, as per the the TOSCA simple profile for yaml. We use a modified + * version of the kwalify library for this task. The grammar for TOSCA yaml + * is itself a yaml document (found in the package in + * resources/tosca-schema.yaml). There are certain limitations on how far + * this grammar can go. + * - consistency verification: we check the type hierarchies for all TOSCA + * constructs (data types, capability types, node types, etc), the definition + * of all facets of a construct (properties, attributes, etc) across the type + * hierachies, the conformity of construct templates (node templates, ..) with + * their types, data valuations(input assignements, constants, function calls). + * + * Each stage is blocking, i.e. a stage will be performed only if the previous + * one completed successfully. Additional procesing can be added to each stage by + * providing code annotated as per the {@link org.onap.tosca.checker.annotations Annotations} package. + *

+ * The verification is done across all the imported documents. The common TOSCA + * types are by default made available to all documents being processed (the + * specification is in resources/tosca-common-types.yaml). Networking related + * types can be made available by importing resources/tosca-network-types.yaml + * while the tosca nfv profile definitions are available at + * resources/tosca-nfv-types.yaml. + *

+ * The main java classes exposed by the package are the Checker, Target + * and Report. A Target represents a document processed by the Checker. While + * the Checker starts with a top Target, through import statements it can end up + * processing a number of Targets. The results of processing a Target are made + * available through a Report which currently is nothing more that a list of + * recorded errors. + * + *

+ * {@code + * Checker checker = new Checker(); + * checker.check("tests/example.yaml"); + * + * for (Target t: checker.targets()) + * System.out.println(t.getLocation() + "\n" + t.getReport()); + * } + *
+ *

+ * A {@link org.onap.tosca.checker.Report report} consolidates the problems encountered + * in the processed documents per Target. + * The errors are recorded as instances of Exception, mostly due to the fact + * snakeyaml and kwalify do report errors as exceptions. As such there are 3 + * basic types of errros to be expected in a report: YAMLException (from + * snakeyaml, related to parsing), ValidationException (from kwalify, tosca + * grammar validation), TargetException (from the checker itself). This might + * change as we're looking to unify the way errors are reported. A Report + * object has a user friendly toString function. + * + * A CheckerException thrown during the checking process is an indication of a + * malfunction in the checker itself. + *

+ * The checker handles targets as URIs. The resolution of a target consists in + * going from a string representing some path/uri to the absolute URI. + * The Targets are found through a TargetLocator which interprets a location string + * as an URI. A custom TargetLocator (capable of interpreting particular URI schemes) + * can be plugged in the Checker. Locating a target is actually the first stage of + * target processing and and intrisic part of the process (resolving imported targets). + * The package provides a default {@link org.onap.tosca.checker.CommonLocator locator} + * which handles the common uri schemes (the jdk recognizes). + * The example above highlights the fact that even when processing starts with one target + * it could end up going over a larger set, through imports. + * + * The results of processing a TOSCA yaml document (all all related documents through + * the import relationship) are stored in a {@link org.onap.tosca.checker.Catalog catalog}. + * Catalogs are re-usable and the checker provides methods for processing targets with + * respect to an existing catalog. 'With respect to a catalog' means that all the types + * available in that catalog are visisble to the targets being processed; this facilitates + * the processing of TOSCA templates organized as a schema (types specifications) and + * topology templates. + *

+ * The most common entry point is {@link org.onap.tosca.checker.Checker#check(String) check}, + * where the argument is passed to thelocator and once a {@link org.onap.tosca.checker.Target target} + * has been obtained, processing continues with the other stages. + * The checker has processing entry points past the first two processing stages: an already located + * target
+ *

    + *
  • {@link org.onap.tosca.checker.Checker#check(Target) check}
  • + *
  • {@link org.onap.tosca.checker.Checker#check(Target, Catalog) check}
  • + *
+ * and after a target located and parsed
+ *
    + *
  • {@link org.onap.tosca.checker.Checker#validate(Target) check}
  • + *
  • {@link org.onap.tosca.checker.Checker#validate(Target, Catalog) check}
  • + *
+ *

+ *

+ * The build process provides an all-in-one jar to facilitate the usage as a tool: + *

+ * Other: + *

    + *
  • the checker performs during tosca grammar validation a 'normalization' + * process as the tosca yaml profile allows for short forms in the + * specification of a number of its constructs (see spec). The checker changes + * the actual structure of the parsed document such that only normalized + * (complete) forms of specification are present before the checking phase. + * (the kwalify library was extended in order to be able to specify these + * short forms in the grammar itself and process/tolerate them at validation + * time). + *
  • + *
  • the checker contains an internal catalog where the types and templates + * of different constructs are aggregated and indexed across all targets in + * order to facilitate the checking phase. Catalogs can be 'linked' and the + * resolution process delegated (the checker maintains a basic catalog with + * the core and common types and there is always a second catalog maintaining + * the information related to the current targets). + * The catalog is currently not exposed by the library. + *
  • + *
  • imports processing: the import statements present in a target are first + * 'detected' during tosca yaml grammar validation phase. At that stage all + * imports are (recursively) parsed and validated (first 2 phases). Checking + * off all imports (recursively) is done during stage 3. + *
  • + *
      + *

      + * ToDo: + * - nobody says the parsing cannot be pluggable: snakeyaml could be replaced by + * anything that can take a Reader and produce a POJO hierarchy for the next stages. + */ + +package org.onap.tosca.checker; diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/JSP.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/JSP.java new file mode 100644 index 0000000..4183c75 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/JSP.java @@ -0,0 +1,659 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.processing; + +import java.io.FileReader; +import java.io.IOException; +import java.io.File; + +import java.net.URI; + +import java.util.Set; +import java.util.Map; +import java.util.List; +import java.util.Arrays; +import java.util.Iterator; +import java.util.Collection; +import java.util.Collections; +import java.util.stream.Collectors; +import java.util.function.Consumer; +import java.util.function.BiFunction; +import java.util.function.Predicate; + +import javax.script.Invocable; +import javax.script.Compilable; +import javax.script.CompiledScript; +import javax.script.Bindings; +import javax.script.SimpleBindings; +import javax.script.ScriptContext; +import javax.script.SimpleScriptContext; +import javax.script.ScriptEngine; +import javax.script.ScriptEngineManager; +import javax.script.ScriptException; + +import jdk.nashorn.api.scripting.JSObject; +import jdk.nashorn.api.scripting.AbstractJSObject; + +import org.apache.commons.jxpath.JXPathContext; +import org.apache.commons.jxpath.JXPathException; + +import org.onap.tosca.checker.Target; +import org.onap.tosca.checker.Catalog; +import org.onap.tosca.checker.Report; +import org.onap.tosca.checker.Construct; +import org.onap.tosca.checker.Facet; + + +/** + * Java Script Processor + * Each script is represented by a Target and the JSP processor maintains a collection of Targets, i.e. scripts. + * A collection of targets can be used with only one JSP processor at a time (as the processor stores processor specific * compiled versions within the target). + */ +public class JSP implements Processor { + + private ScriptEngine engine; + private Collection targets; + + public JSP(String[] theScripts) { + this(Arrays.stream(theScripts) + .map(s -> new Target(s, new File(s).toURI())) + .collect(Collectors.toList())); + } + + public JSP(File[] theScripts) { + this(Arrays.stream(theScripts) + .map(s -> new Target(s.getName(), s.toURI())) + .collect(Collectors.toList())); + } + + public JSP(URI[] theScripts) { + this(Arrays.stream(theScripts) + .map(s -> new Target(s.toString(), s)) + .collect(Collectors.toList())); + } + + /** + * The given collection is allowed to change while used by the JSP engine but access to it needs to be synchronized. + * The engine uses the target field of each Target to store a compiled version of each script. An external reset of + * this field (maybe in order to indicate some change in the Target) will caue a re-compilation of the Target. + */ + public JSP(Collection theTargets) { + this.targets = theTargets; + ScriptEngineManager engineManager = new ScriptEngineManager(); + this.engine = engineManager.getEngineByName("nashorn"); + } + + public Collection targets() { + return this.targets; + } + + /* pre-compiles all known targets + */ + protected void compile() throws ProcessorException { + synchronized (this.targets) { + for (Target t: this.targets) + compile(t); + } + } + + protected CompiledScript compile(Target theTarget) throws ProcessorException { + + CompiledScript cs = null; + + synchronized(theTarget) { + try { + cs = (CompiledScript)theTarget.getTarget(); + } + catch(ClassCastException ccx) { + throw new ProcessorException(theTarget, "Unexpected target content"); + } + + if (cs == null) { + try { + cs = ((Compilable)this.engine).compile(theTarget.open()); + theTarget.setTarget(cs); + } + catch (IOException iox) { + throw new ProcessorException(theTarget, "Failed to read script", iox); + } + catch (ScriptException sx) { + throw new ProcessorException(theTarget, "Failed to compile script", sx); + } + } + } + + return cs; + } + + public ContextBuilder process(Catalog theCatalog) { + return new ContextBuilder( + this.engine.createBindings()) + //new DelegateBindings(this.engine.getBindings(ScriptContext.ENGINE_SCOPE))) + .with("catalog", new JSCatalog(theCatalog)); + } + + /** + */ + public class ContextBuilder implements ProcessBuilder { + + private ScriptContext context; + + protected ContextBuilder(Bindings theBindings) { + this.context = new SimpleScriptContext(); + this.context.setBindings(theBindings, Process.PROCESS_SCOPE /*ScriptContext.ENGINE_SCOPE*/); + } + + public ContextBuilder withPreprocessing(BiFunction thePreprocessing) { + this.context.setAttribute("preprocessor", thePreprocessing, Process.PROCESS_SCOPE); + return this; + } + + public ContextBuilder withPostprocessing(BiFunction thePostprocessing) { + this.context.setAttribute("postprocessor", thePostprocessing, Process.PROCESS_SCOPE); + return this; + } + + public ContextBuilder with(String theName, Object theValue) { + this.context.getBindings(Process.PROCESS_SCOPE).put(theName, theValue); + return this; + } + + public ContextBuilder withOpt(String theName, Object theValue) { + if (theValue != null) + this.context.getBindings(Process.PROCESS_SCOPE).put(theName, theValue); + return this; + } + + public JSProcess process() { + return new JSProcess(this.context); + } + + } + + /** + */ + public class JSProcess implements Process { + + private Report report = new Report(); + private Iterator scripts; + private JScriptInfo scriptInfo = new JScriptInfo(); + private Target script; //script currently being evaluated + private boolean stopped = false; + private ScriptContext context; + + private JSProcess(ScriptContext theContext) { + + this.context = theContext; + this.context.getBindings(Process.PROCESS_SCOPE) + .put("stop", new Consumer() { + public void accept(String theMsg) { + JSProcess.this.stopped = true; + //log the message?? + } + }); + this.context.getBindings(Process.PROCESS_SCOPE) + .put("report", new Consumer() { + public void accept(String theMsg) { + JSProcess.this.report.add(new ProcessorException(script, theMsg)); + } + }); + this.context.getBindings(Process.PROCESS_SCOPE) + .put("reportOnce", new Consumer() { + public void accept(String theMsg) { + JSProcess.this.report.addOnce(new ProcessorException(script, theMsg)); + } + }); + this.scripts = JSP.this.targets.iterator(); + } + + protected String infoName(Target theTarget) { + String name = theTarget.getName(); + return name.substring(0, name.indexOf(".")) + "_info"; + } + + public JSP processor() { + return JSP.this; + } + + public boolean hasNext() { + return !this.stopped && this.scripts.hasNext(); + } + + protected Target next() { + if (hasNext()) + return this.script = this.scripts.next(); + else + throw new RuntimeException("Process is completed"); + } + + protected boolean runProcessor(String theName) throws ProcessorException { + BiFunction proc = (BiFunction) + this.context.getAttribute(theName, Process.PROCESS_SCOPE); + if (proc != null) { + try { + return proc.apply(this.script, this.context).booleanValue(); + } + catch (Exception x) { + throw new ProcessorException(this.script, theName + "failed", x); + } + } + + return true; + } + + public Process runNext() throws ProcessorException { + Target target = next(); + synchronized(target) { + String name = infoName(target); + try { + if (runProcessor("preprocessor")) { + compile(target).eval(this.context); + runProcessor("postprocessor"); + } + } + catch (ScriptException sx) { + throw new ProcessorException(target, "Failed to execute validation script", sx); + } + } + + return this; + } + + public Process runNextSilently() { + try { + return runNext(); + } + catch (ProcessorException px) { + this.report.add(px); + } + return this; + } + + public Report run() { + while (hasNext()) + runNextSilently(); + return this.report; + } + + public void stop() { + this.stopped = true; + } + + public Report report() { + return this.report; + } + } + + private static class JScriptInfo implements TargetInfo { + + private JSObject info; + + protected JScriptInfo() { + } + + protected JScriptInfo setInfo(JSObject theInfo) { + this.info = theInfo; + return this; + } + + public Set entryNames() { + return this.info == null ? Collections.EMPTY_SET : this.info.keySet(); + } + + public boolean hasEntry(String theName) { + return this.info == null ? false : this.info.hasMember(theName); + } + + public Object getEntry(String theName) { + return this.info == null ? null : + this.info.hasMember(theName) ? this.info.getMember(theName) : null; + } + } + + + /* Exposes the catalog information in a more Java Script friendly manner. + */ + public static class JSCatalog { + + private Catalog catalog; + + private JSCatalog(Catalog theCatalog) { + this.catalog = theCatalog; + } + + /** */ + public JSTarget[] targets() { + return + this.catalog.targets() + .stream() + .map(t -> { return new JSTarget(t); }) + .toArray(size -> new JSTarget[size]); //or toArray(JSNode[]::new) + } + + public JSTarget[] topTargets() { + return + this.catalog.topTargets() + .stream() + .map(t -> { return new JSTarget(t); }) + .toArray(size -> new JSTarget[size]); //or toArray(JSNode[]::new) + } + + /** */ + public String[] types(String theConstruct) { + Set names = + this.catalog.getConstructTypes(Enum.valueOf(Construct.class,theConstruct)).keySet(); + return names.toArray(new String[names.size()]); + } + + /** */ + public boolean isDerivedFrom(String theConstruct, String theType, String theSuperType) { + return this.catalog.isDerivedFrom(Enum.valueOf(Construct.class,theConstruct), theType, theSuperType); + } + + /** */ + public JSObject facetDefinition(String theConstruct, String theType, String theFacet, String theName) { + return new JSElement(theName, + this.catalog.getFacetDefinition( + Enum.valueOf(Construct.class, theConstruct), theType, + Enum.valueOf(Facet.class, theFacet), theName)); + } + + + /** */ +/* + public JSElement[] targetNodes(Target theTarget) { + return + this.catalog.getTargetTemplates(theTarget, Construct.Node) + .entrySet() + .stream() + .map(e -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); //or toArray(JSNode[]::new) + } +*/ + + public class JSTarget { + + private Target tgt; + private JXPathContext jxPath; + + private JSTarget(Target theTarget) { + this.tgt = theTarget; + this.jxPath = JXPathContext.newContext(this.tgt.getTarget()); + this.jxPath.setLenient(true); + } + + public String getName() { return this.tgt.getName(); } + + public JSElement resolve(String thePath) { + Object res = jxPath.getValue(thePath); + if (res instanceof Map) { + return new JSElement(thePath, (Map)res); + } + //?? + return null; + } + + public JSElement[] getInputs() { + + Map inputs = (Map)jxPath.getValue("/topology_template/inputs"); + return (inputs == null) ? + new JSElement[0] + : inputs.entrySet() + .stream() + .map(e -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); + } + +// public JSElement[] getOutputs() { +// } + + public JSElement getMetadata() { + return new JSElement("metadata", (Map)jxPath.getValue("/metadata")); + } + + public JSElement[] getNodes() { + return + JSCatalog.this.catalog.getTargetTemplates(this.tgt, Construct.Node) + .entrySet() + .stream() + .map(e -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); //or toArray(JSElement[]::new) + } + +// public JSElement[] getPolicies() { +// } + + } + + + /* + */ + public class JSElement extends AbstractJSObject { + + + private String name; + private Map def; + + private JSElement(String theName, Object theDef) { +//System.out.println("JSObject " + theName + ":" + theDef); + this.name = theName; + this.def = theDef == null ? Collections.emptyMap() + : (theDef instanceof Map) ? (Map)theDef + : Collections.singletonMap("value",theDef); + } + + public String getName() { return this.name; } + + public boolean hasMember(String theMember) { +// System.out.println("hasMember " + theMember); + return this.def.containsKey(theMember); + } + + public Object getMember(final String theMember) { + Object val = this.def.get(theMember); +// System.out.println("getMember " + theMember + ": " + val); + if (val != null) { + if (val instanceof Map) { + return new JSElement(theMember, val); + /* + return ((Map)obj).entrySet() + .stream() + .map((Map.Entry e) -> { return new JSElement(e.getKey(),e.getValue()); }) + .toArray(size -> new JSElement[size]); + */ + } + + if (val instanceof List) { + //a property value can be a list of: primitive types or maps (for a user defined type) + //requirements are exposed as a list of maps + List lval = (List)val; + if (lval.get(0) instanceof Map) { + return lval + .stream() + .map((e) -> new JSElement(theMember, e)) + .toArray(size -> new JSElement[size]); + + /* + return val + .stream() + .map((e) -> { + System.out.println("Found in " + theMember + ": " + e); + Map.Entry re = ((Map)e).entrySet().iterator().next(); + return new JSElement(re.getKey(), re.getValue()); + }) + .toArray(size -> new JSElement[size]); + */ + } + } + + return val; + } + else { + if ("name".equals(theMember)) + return this.name; + if ("toString".equals(theMember)) + return _toString; + if ("hasOwnProperty".equals(theMember)) + return _hasOwnProperty; +// System.out.println("Could not find " + theMember + " in " + this.def); + return super.getMember(theMember); + } + } + /* TODO: we do not expose 'name' in here */ + public Set keySet() { +// System.out.println("keySet: " + this.def.keySet()); + return this.def.keySet(); + } + +/* having problem with getDefaultValue in Jenkins builds */ +/* + public Object getDefaultValue(Class hint) { +// System.out.println("getDefaultValue: " + hint); + if (String.class.equals(hint)) + return this.def.toString(); + + return super.getDefaultValue(hint); + } +*/ + + } + + + static final JSObject _toString = + new TracerJSObject("_toString") { + public Object call(Object thiz, Object... args) { + return ((JSElement)thiz).def.toString(); + } + + public boolean isFunction() { return true; } + }; + + static final JSObject _hasOwnProperty = + new TracerJSObject("_hasOwnProperty") { + public Object call(Object thiz, Object... args) { +// System.out.println("_hasOwnProperty: " + args.length); + return ((JSElement)thiz).def.containsKey(args[0]); + } + + public boolean isFunction() { return true; } + }; + + }//JSCatalog + + + + private static class TracerJSObject extends AbstractJSObject { + + private String mark; + + TracerJSObject(String theMark) { + this.mark = theMark; + } + + public Object call(Object thiz, Object... args) { + System.out.println(this.mark + ":call"); + return super.call(thiz, args); + } + + public Object newObject(Object... args) { + System.out.println(this.mark + ":newObject"); + return super.newObject(args); + } + + public Object eval(String s) { + System.out.println(this.mark + ":eval"); + return super.eval(s); + } + + public Object getMember(String name) { + System.out.println(this.mark + ":getMember"); + return super.getMember(name); + } + + public Object getSlot(int index) { + System.out.println(this.mark + ":getSlot"); + return super.getSlot(index); + } + + public boolean hasMember(String name) { + System.out.println(this.mark + ":hasMember"); + return super.hasMember(name); + } + + public boolean hasSlot(int slot) { + System.out.println(this.mark + ":hasSlot"); + return super.hasSlot(slot); + } + + public void removeMember(String name) { + System.out.println(this.mark + ":removeMember"); + super.removeMember(name); + } + + public void setMember(String name, Object value) { + System.out.println(this.mark + ":setMember"); + super.setMember(name,value); + } + + public void setSlot(int index, Object value) { + System.out.println(this.mark + ":setSlot"); + super.setSlot(index,value); + } + + public Set keySet() { + System.out.println(this.mark + "JSObject:keySet"); + return super.keySet(); + } + + public Collection values() { + System.out.println(this.mark + ":values"); + return super.values(); + } + + public boolean isInstance(Object instance) { + System.out.println(this.mark + ":isInstance"); + return super.isInstance(instance); + } + + public boolean isInstanceOf(Object clazz) { + System.out.println(this.mark + ":isInstanceOf"); + return super.isInstance(clazz); + } + + public String getClassName() { + System.out.println(this.mark + ":getClassName"); + return super.getClassName(); + } + + public boolean isFunction() { + System.out.println(this.mark + ":isFunction"); + return super.isFunction(); + } + + public boolean isStrictFunction() { + System.out.println(this.mark + ":isStrictFunction"); + return super.isStrictFunction(); + } + + public boolean isArray() { + System.out.println(this.mark + ":isArray"); + return super.isArray(); + } + + public Object getDefaultValue(Class hint) { + System.out.println(this.mark + ":getDefaultValue(" + hint + ")"); + return super.getDefaultValue(hint); + } + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Process.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Process.java new file mode 100644 index 0000000..865de06 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Process.java @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.processing; + +import java.util.function.Predicate; + +import org.onap.tosca.checker.Report; + +/** + * + */ +public interface Process { + + public static final int PROCESS_SCOPE = 100; + + /** + * the processor running this process + */ + public T processor(); + + /* */ + public boolean hasNext(); + + /* */ + public Process runNext() throws ProcessorException; + + /* execute all steps to completion + */ + public Report run(); + + /* execution report + */ + public Report report(); + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessBuilder.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessBuilder.java new file mode 100644 index 0000000..cb0b455 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessBuilder.java @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.processing; + +import org.onap.tosca.checker.Report; + + +/** + * Just in case you might want to do something with a template (set) once it was checked + */ +public interface ProcessBuilder { + + /* */ + public ProcessBuilder with(String theName, Object theValue); + + /* */ + public ProcessBuilder withOpt(String theName, Object theValue); + + /* */ + public Process process(); + + /* */ + default public Report run() { + return process() + .run(); + } + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Processor.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Processor.java new file mode 100644 index 0000000..94904d2 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/Processor.java @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.processing; + +import org.onap.tosca.checker.Catalog; + + +/** + * Just in case you might want to do something with a template (set) once it was checked + */ +public interface Processor> { + + /* */ + public ProcessBuilder process(Catalog theCatalog); +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessorException.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessorException.java new file mode 100644 index 0000000..2d9cd22 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/ProcessorException.java @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.processing; + +import org.onap.tosca.checker.Target; +import org.onap.tosca.checker.CheckerException; + + +/** + */ +public class ProcessorException extends CheckerException { + + private Target target; + + public ProcessorException(Target theTarget, String theMsg, Throwable theCause) { + super(theMsg, theCause); + this.target = theTarget; + } + + public ProcessorException(Target theTarget, String theMsg) { + super(theMsg); + this.target = theTarget; + } + + public Target getTarget() { + return this.target; + } + + @Override + public String getMessage() { + return this.target + ":" + super.getMessage() + (getCause() == null ? "" : ("(" + getCause() + ")")); + } +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/TargetInfo.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/TargetInfo.java new file mode 100644 index 0000000..f330df5 --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/TargetInfo.java @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.processing; + +import java.util.Set; + + +/** + * Exposes target properties. How they are obtained/calculated not of importance here. + */ +public interface TargetInfo { + + /** */ + public Set entryNames(); + + /** */ + public boolean hasEntry(String theName); + + /** */ + public Object getEntry(String theName); + +} diff --git a/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/package-info.java b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/package-info.java new file mode 100644 index 0000000..7a2d62f --- /dev/null +++ b/javatoscachecker/checker/src/main/java/org/onap/tosca/checker/processing/package-info.java @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ + +/** + * After checking and cataloging we assume some applications will do some sort of post-processing. + * While all such processing can be done in java by navigating catalog information this package focuses + * on processing based on different 'environments' through a so called 'processor'. The package offers one + * implementation, a Java Script processor that exposes Checker catalog information to java scripts executed + * through Nashorn, the Javs (8) built-in java script engine. + * + */ + +package org.onap.tosca.checker.processing; diff --git a/javatoscachecker/checker/src/main/resources/org/onap/tosca/checker/messages_en_US.properties b/javatoscachecker/checker/src/main/resources/org/onap/tosca/checker/messages_en_US.properties new file mode 100644 index 0000000..f1152cc --- /dev/null +++ b/javatoscachecker/checker/src/main/resources/org/onap/tosca/checker/messages_en_US.properties @@ -0,0 +1,18 @@ +EMPTY_TEMPLATE= Empty template + +# {0}: field, {1}: field value, {2}: construct +INVALID_CONSTRUCT_REFERENCE= {0} is expected to be a reference to a {2} type or {2} template, value ''{1}'' is none of those + +INVALID_TYPE_REFERENCE= ''{0}'' value ''{1}'' is expected to be a reference to a {2} type. No {2} type with this name was declared. +#{0}: field, {1} value, {2} construct +INVALID_TEMPLATE_REFERENCE= ''{0}'' value ''{1}'' is expected to be a reference to a {2} template. No such {2} template is in scope. +INVALID_INTERFACE_REFERENCE= {2} type ''{0}'' has no interface ''{1}'' +#INCOMPATIBLE_TYPE_REFERENCE= {2} type ''{0}'' value ''{1}'' is not type compatible with any declared {2} type + +# {0}: the target construct, {1}: target, {2}: target definition +INCOMPATIBLE_REQUIREMENT_TARGET= Required target {0} type {1} is not compatible with the target {0} type found in the requirement definition {2} + +Unknown " + theConstruct + " " + theFacet + " (not declared by the type " + theSpecType + ") were used: " + defs, + +#{0}: construct, {1}: facet, {2} type, {3} list of facets +INVALID_FACET_REFERENCE: Unknown {0} {1} (not available through the type {2}) were used: {3} \ No newline at end of file diff --git a/javatoscachecker/checker/src/main/resources/tosca/tosca-common-types.yaml b/javatoscachecker/checker/src/main/resources/tosca/tosca-common-types.yaml new file mode 100644 index 0000000..cacaa5b --- /dev/null +++ b/javatoscachecker/checker/src/main/resources/tosca/tosca-common-types.yaml @@ -0,0 +1,668 @@ +tosca_definitions_version: tosca_simple_yaml_1_1_0 +description: > + TOSCA simple profile common types. To be included by default in all templates. + + +data_types: + +#see 5.3.1 + tosca.datatypes.Root: + description: The TOSCA root Data Type all other TOSCA base Data Types derive from + +#from 5.3.2 + tosca.datatypes.Credential: + derived_from: tosca.datatypes.Root + properties: + protocol: + type: string + required: false + token_type: + type: string + default: password + token: + type: string + keys: + type: map + required: false + entry_schema: string + user: + type: string + required: false + +#from 5.3.3 + tosca.datatypes.TimeInterval: + derived_from: tosca.datatypes.Root + properties: + start_time: + type: timestamp + required: true + end_time: + type: timestamp + required: true + +#from 5.3.4 + tosca.datatypes.network.NetworkInfo: + derived_from: tosca.datatypes.Root + properties: + network_name: + type: string + network_id: + type: string + addresses: + type: list + entry_schema: string + +#from 5.3.5 + tosca.datatypes.network.PortInfo: + derived_from: tosca.datatypes.Root + properties: + port_name: + type: string + port_id: + type: string + network_id: + type: string + mac_address: + type: string + addresses: + type: list + entry_schema: string + +#from 5.3.6 + tosca.datatypes.network.PortDef: + derived_from: integer + constraints: + - in_range: [ 1, 65535 ] + +#from 5.3.7 + tosca.datatypes.network.PortSpec: + derived_from: tosca.datatypes.Root + properties: + protocol: + type: string + required: true + default: tcp + constraints: + - valid_values: [ udp, tcp, igmp ] + target: +#I think the intent was (same for source): +#type: tosca.datatypes.network.PortDef + type: integer + entry_schema: tosca.datatypes.network.PortDef + target_range: + type: range + constraints: + - in_range: [ 1, 65535 ] + source: + type: integer + entry_schema: tosca.datatypes.network.PortDef + source_range: + type: range + constraints: + - in_range: [ 1, 65535 ] + +capability_types: + +#from 5.5.1 + tosca.capabilities.Root: + description: The TOSCA root Capability Type all other TOSCA base Capability Types derive from + +#from 5.5.2 + tosca.capabilities.Node: + derived_from: tosca.capabilities.Root + +#from 5.5.3 + tosca.capabilities.Compute: + derived_from: tosca.capabilities.Root + properties: + name: + type: string + required: false + num_cpus: + type: integer + required: false + constraints: + - greater_or_equal: 1 + cpu_frequency: + type: scalar-unit.frequency + required: false + constraints: + - greater_or_equal: 0.1 GHz + disk_size: + type: scalar-unit.size + required: false + constraints: + - greater_or_equal: 0 MB + mem_size: + type: scalar-unit.size + required: false + constraints: + - greater_or_equal: 0 MB + +#from 5.5.4 + tosca.capabilities.Network: + derived_from: tosca.capabilities.Root + properties: + name: + type: string + required: false + +#from 5.5.5 + tosca.capabilities.Storage: + derived_from: tosca.capabilities.Root + properties: + name: + type: string + required: false + +#from 5.5.6 + tosca.capabilities.compute.Container: + derived_from: tosca.capabilities.Compute + + tosca.capabilities.Container: + derived_from: tosca.capabilities.Compute + +#from 5.5.7 + tosca.capabilities.Endpoint: + derived_from: tosca.capabilities.Root + properties: + protocol: + type: string + default: tcp + port: + type: tosca.datatypes.network.PortDef + required: false + secure: + type: boolean + default: false + url_path: + type: string + required: false + port_name: + type: string + required: false + network_name: + type: string + required: false + default: PRIVATE + initiator: + type: string + default: source + constraints: + - valid_values: [ source, target, peer ] + ports: + type: map + required: false + constraints: + - min_length: 1 + entry_schema: tosca.datatypes.network.PortSpec + attributes: + ip_address: + type: string + +#from 5.5.8 + tosca.capabilities.Endpoint.Public: + derived_from: tosca.capabilities.Endpoint + properties: + # Change the default network_name to use the first public network found + network_name: + type: string + default: PUBLIC + floating: + description: > + indicates that the public address should be allocated from a pool of floating IPs that are associated with the network. + type: boolean + default: false + status: experimental + dns_name: + description: The optional name to register with DNS + type: string + required: false + status: experimental + +#from 5.5.9 + tosca.capabilities.Endpoint.Admin: + derived_from: tosca.capabilities.Endpoint + # Change Endpoint secure indicator to true from its default of false + properties: + secure: + type: boolean + default: true + constraints: + - equal: true + +#from 5.5.10 + tosca.capabilities.Endpoint.Database: + derived_from: tosca.capabilities.Endpoint + +#from 5.5.11 + tosca.capabilities.Attachment: + derived_from: tosca.capabilities.Root + +#from 5.5.12 + tosca.capabilities.OperatingSystem: + derived_from: tosca.capabilities.Root + properties: + architecture: + type: string + required: false + type: + type: string + required: false + distribution: + type: string + required: false + version: + type: version + required: false + +#from 5.5.13 + tosca.capabilities.Scalable: + derived_from: tosca.capabilities.Root + properties: + min_instances: + type: integer + default: 1 + max_instances: + type: integer + default: 1 + default_instances: + type: integer + +#from C.3.11 + tosca.capabilities.network.Bindable: + derived_from: tosca.capabilities.Node + + +relationship_types: + +#from 5.7.1 + tosca.relationships.Root: + description: The TOSCA root Relationship Type all other TOSCA base Relationship Types derive from + attributes: + tosca_id: + type: string + tosca_name: + type: string + interfaces: + Configure: + type: tosca.interfaces.relationship.Configure + +#from 5.7.2 + tosca.relationships.DependsOn: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.Node ] + +#from 5.7.3 + tosca.relationships.HostedOn: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.compute.Container ] + +#from 5.7.4 + tosca.relationships.ConnectsTo: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.Endpoint ] + properties: + credential: + type: tosca.datatypes.Credential + required: false + +#from 5.7.5 + tosca.relationships.AttachesTo: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.Attachment ] + properties: + location: + type: string + constraints: + - min_length: 1 + device: + type: string + required: false + +#from 5.7.6 + tosca.relationships.RoutesTo: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.Endpoint ] + + +interface_types: + +#from 5.8.3 + tosca.interfaces.Root: +# derived_from: tosca.entity.Root + description: The TOSCA root Interface Type all other TOSCA base Interface Types derive from + +#from 5.8.4 + tosca.interfaces.node.lifecycle.Standard: + derived_from: tosca.interfaces.Root + create: + description: Standard lifecycle create operation. + configure: + description: Standard lifecycle configure operation. + start: + description: Standard lifecycle start operation. + stop: + description: Standard lifecycle stop operation. + delete: + description: Standard lifecycle delete operation. + +#from 5.8.5 + tosca.interfaces.relationship.Configure: + derived_from: tosca.interfaces.Root + pre_configure_source: + description: Operation to pre-configure the source endpoint. + pre_configure_target: + description: Operation to pre-configure the target endpoint. + post_configure_source: + description: Operation to post-configure the source endpoint. + post_configure_target: + description: Operation to post-configure the target endpoint. + add_target: + description: Operation to notify the source node of a target node being added via a relationship. + add_source: + description: Operation to notify the target node of a source node which is now available via a relationship. + target_changed: + description: Operation to notify source some property or attribute of the target changed + remove_target: + description: Operation to remove a target node. + + +node_types: + +#from 5.9.1 + tosca.nodes.Root: + description: The TOSCA Node Type all other TOSCA base Node Types derive from + attributes: + tosca_id: + type: string + tosca_name: + type: string + state: + type: string + capabilities: + feature: + type: tosca.capabilities.Node + requirements: + - dependency: + capability: tosca.capabilities.Node + node: tosca.nodes.Root + relationship: tosca.relationships.DependsOn + occurrences: [ 0, UNBOUNDED ] + interfaces: + Standard: + type: tosca.interfaces.node.lifecycle.Standard + +#from 5.9.2 + tosca.nodes.Compute: + derived_from: tosca.nodes.Root + attributes: + private_address: + type: string + public_address: + type: string + networks: + type: map +#entry schema for attribute has a string value as per A.5.9 .. +#the standard document defines it as a map similar to the property definition .. + entry_schema: tosca.datatypes.network.NetworkInfo + ports: + type: map + entry_schema: tosca.datatypes.network.PortInfo + requirements: + - local_storage: + capability: tosca.capabilities.Attachment + node: tosca.nodes.BlockStorage + relationship: tosca.relationships.AttachesTo + occurrences: [0, UNBOUNDED] + capabilities: + host: + type: tosca.capabilities.compute.Container + valid_source_types: [tosca.nodes.SoftwareComponent] + endpoint: + type: tosca.capabilities.Endpoint.Admin + os: + type: tosca.capabilities.OperatingSystem + scalable: + type: tosca.capabilities.Scalable + binding: + type: tosca.capabilities.network.Bindable + +#from 5.9.3 + tosca.nodes.SoftwareComponent: + derived_from: tosca.nodes.Root + properties: + # domain-specific software component version + component_version: + type: version + required: false + admin_credential: + type: tosca.datatypes.Credential + required: false + requirements: + - host: + capability: tosca.capabilities.compute.Container + node: tosca.nodes.Compute + relationship: tosca.relationships.HostedOn + +#from 5.9.4 + tosca.nodes.WebServer: + derived_from: tosca.nodes.SoftwareComponent + capabilities: + # Private, layer 4 endpoints + data_endpoint: tosca.capabilities.Endpoint + admin_endpoint: tosca.capabilities.Endpoint.Admin + host: + type: tosca.capabilities.compute.Container + valid_source_types: [ tosca.nodes.WebApplication ] + +#from 5.9.5 + tosca.nodes.WebApplication: + derived_from: tosca.nodes.Root + properties: + context_root: + type: string + capabilities: + app_endpoint: + type: tosca.capabilities.Endpoint + requirements: + - host: + capability: tosca.capabilities.compute.Container + node: tosca.nodes.WebServer + relationship: tosca.relationships.HostedOn + +#from 5.9.6 + tosca.nodes.DBMS: + derived_from: tosca.nodes.SoftwareComponent + properties: + root_password: + type: string + required: false + description: the optional root password for the DBMS service + port: + type: integer + required: false + description: the port the DBMS service will listen to for data and requests + capabilities: + host: + type: tosca.capabilities.compute.Container + valid_source_types: [ tosca.nodes.Database ] + +#from 5.9.7 + tosca.nodes.Database: + derived_from: tosca.nodes.Root + properties: + name: + type: string + description: the logical name of the database + port: + type: integer + description: the port the underlying database service will listen to for data + user: + type: string + description: the optional user account name for DB administration + required: false + password: + type: string + description: the optional password for the DB user account + required: false + requirements: + - host: + capability: tosca.capabilities.compute.Container + node: tosca.nodes.DBMS + relationship: tosca.relationships.HostedOn + capabilities: + database_endpoint: + type: tosca.capabilities.Endpoint.Database + +#from 5.9.8 + tosca.nodes.ObjectStorage: + derived_from: tosca.nodes.Root + properties: + name: + type: string + size: + type: scalar-unit.size + constraints: + - greater_or_equal: 0 GB + maxsize: + type: scalar-unit.size + constraints: + - greater_or_equal: 0 GB + capabilities: + storage_endpoint: + type: tosca.capabilities.Endpoint + +#from 5.9.9 + tosca.nodes.BlockStorage: + derived_from: tosca.nodes.Root + properties: + size: + type: scalar-unit.size + constraints: + - greater_or_equal: 1 MB + volume_id: + type: string + required: false + snapshot_id: + type: string + required: false + capabilities: + attachment: + type: tosca.capabilities.Attachment + +#from 5.9.10 + tosca.nodes.Container.Runtime: + derived_from: tosca.nodes.SoftwareComponent + capabilities: + host: + type: tosca.capabilities.compute.Container + scalable: + type: tosca.capabilities.Scalable + +#from 5.9.11 + tosca.nodes.Container.Application: + derived_from: tosca.nodes.Root + requirements: + - host: + capability: tosca.capabilities.compute.Container + # node: tosca.nodes.Container !invalid node reference! + relationship: tosca.relationships.HostedOn + +#from 5.9.12 + tosca.nodes.LoadBalancer: + derived_from: tosca.nodes.Root + properties: + # TBD + algorithm: + type: string + required: false + status: experimental + capabilities: + client: + type: tosca.capabilities.Endpoint.Public + occurrences: [0, UNBOUNDED] + description: the Floating (IP) clients on the public network can connect to + requirements: + - application: + capability: tosca.capabilities.Endpoint + relationship: tosca.relationships.RoutesTo + occurrences: [0, UNBOUNDED] +# correction by jora: requirement defintion does not allow for a description entry +# description: Connection to one or more load balanced applications + +artifact_types: + +#from 5.4.1 + tosca.artifacts.Root: + description: The TOSCA Artifact Type all other TOSCA Artifact Types derive from + +#from 5.4.2 + tosca.artifacts.File: + derived_from: tosca.artifacts.Root + +#from 5.4.3 + tosca.artifacts.Deployment: + derived_from: tosca.artifacts.Root + description: TOSCA base type for deployment artifacts + +#from 5.4.3.3 + tosca.artifacts.Deployment.Image: + derived_from: tosca.artifacts.Deployment + +#from 5.4.3.4 + tosca.artifacts.Deployment.Image.VM: + derived_from: tosca.artifacts.Deployment.Image + description: Virtual Machine (VM) Image + +#from 5.4.4 + tosca.artifacts.Implementation: + derived_from: tosca.artifacts.Root + description: TOSCA base type for implementation artifacts + +#from 5.4.4.3 + tosca.artifacts.Implementation.Bash: + derived_from: tosca.artifacts.Implementation + description: Script artifact for the Unix Bash shell + mime_type: application/x-sh + file_ext: [ sh ] + +#from 5.4.4.4 + tosca.artifacts.Implementation.Python: + derived_from: tosca.artifacts.Implementation + description: Artifact for the interpreted Python language + mime_type: application/x-python + file_ext: [ py ] + + +#from 5.9 +group_types: + + tosca.groups.Root: + description: The TOSCA Group Type all other TOSCA Group Types derive from + interfaces: + Standard: + type: tosca.interfaces.node.lifecycle.Standard + +#from 5.10 +policy_types: + + tosca.policies.Root: + description: The TOSCA Policy Type all other TOSCA Policy Types derive from + + tosca.policies.Placement: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern placement of TOSCA nodes or groups of nodes. + + tosca.policies.Scaling: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern scaling of TOSCA nodes or groups of nodes. + + tosca.policies.Update: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to govern update of TOSCA nodes or groups of nodes. + + tosca.policies.Performance: + derived_from: tosca.policies.Root + description: The TOSCA Policy Type definition that is used to declare performance requirements for TOSCA nodes or groups of nodes. + diff --git a/javatoscachecker/checker/src/main/resources/tosca/tosca-examples-types.yaml b/javatoscachecker/checker/src/main/resources/tosca/tosca-examples-types.yaml new file mode 100644 index 0000000..5eee538 --- /dev/null +++ b/javatoscachecker/checker/src/main/resources/tosca/tosca-examples-types.yaml @@ -0,0 +1,117 @@ +tosca_definitions_version: tosca_simple_yaml_1_0_0 +description: > + Non-normative type definitions, as per section 8 of TOSCA simple profile. + + +artifact_types: + + tosca.artifacts.Deployment.Image.Container.Docker: + derived_from: tosca.artifacts.Deployment.Image + description: Docker Container Image + + tosca.artifacts.Deployment.Image.VM.ISO: + derived_from: tosca.artifacts.Deployment.Image.VM + description: Virtual Machine (VM) image in ISO disk format + mime_type: application/octet-stream + file_ext: [ iso ] + + tosca.artifacts.Deployment.Image.VM.QCOW2: + derived_from: tosca.artifacts.Deployment.Image.VM + description: Virtual Machine (VM) image in QCOW v2 standard disk format + mime_type: application/octet-stream + file_ext: [ qcow2 ] + + +capability_types: + + tosca.capabilities.Container.Docker: + derived_from: tosca.capabilities.Container + properties: + version: + type: list + required: false + entry_schema: version + publish_all: + type: boolean + default: false + required: false + publish_ports: + type: list + entry_schema: tosca.datatypes.network.PortSpec + required: false + expose_ports: + type: list + entry_schema: tosca.datatypes.network.PortSpec + required: false + volumes: + type: list + entry_schema: string + required: false + + +node_types: + + tosca.nodes.Database.MySQL: + derived_from: tosca.nodes.Database + requirements: + - host: + capability: tosca.capabilities.Container + node: tosca.nodes.DBMS.MySQL + + tosca.nodes.DBMS.MySQL: + derived_from: tosca.nodes.DBMS + properties: + port: + type: integer + description: reflect the default MySQL server port + default: 3306 + root_password: + type: string + # MySQL requires a root_password for configuration + # Override parent DBMS definition to make this property required + required: true + capabilities: + # Further constrain the ‘host’ capability to only allow MySQL databases + host: + type: tosca.capabilities.Container + valid_source_types: [ tosca.nodes.Database.MySQL ] + + tosca.nodes.WebServer.Apache: + derived_from: tosca.nodes.WebServer + + tosca.nodes.WebApplication.WordPress: + derived_from: tosca.nodes.WebApplication + properties: + admin_user: + type: string + admin_password: + type: string + db_host: + type: string + requirements: + - database_endpoint: + capability: tosca.capabilities.Endpoint.Database + node: tosca.nodes.Database + relationship: tosca.relationships.ConnectsTo + + tosca.nodes.WebServer.Nodejs: + derived_from: tosca.nodes.WebServer + properties: + # Property to supply the desired implementation in the Github repository + github_url: + required: no + type: string + description: location of the application on the github. + default: https://github.com/mmm/testnode.git + interfaces: + Standard: + type: tosca.interfaces.node.lifecycle.Standard + inputs: + github_url: + type: string + + tosca.nodes.Container.Application.Docker: + derived_from: tosca.nodes.Container.Application + requirements: + - host: + capability: tosca.capabilities.Container.Docker \ No newline at end of file diff --git a/javatoscachecker/checker/src/main/resources/tosca/tosca-network-types.yaml b/javatoscachecker/checker/src/main/resources/tosca/tosca-network-types.yaml new file mode 100644 index 0000000..e4930e0 --- /dev/null +++ b/javatoscachecker/checker/src/main/resources/tosca/tosca-network-types.yaml @@ -0,0 +1,103 @@ +tosca_definitions_version: tosca_simple_yaml_1_0_0 +description: > + TOSCA simple profile for networking. + +metadata: + template_name: tosca_simple_networking + template_version: "1.0" + +#imports: +# - tosca-common-types.yaml + +node_types: + + tosca.nodes.network.Network: + derived_from: tosca.nodes.Root + properties: + ip_version: + type: integer + required: false + default: 4 + constraints: + - valid_values: [ 4, 6 ] + cidr: + type: string + required: false + start_ip: + type: string + required: false + end_ip: + type: string + required: false + gateway_ip: + type: string + required: false + network_name: + type: string + required: false + network_id: + type: string + required: false + segmentation_id: + type: string + required: false + network_type: + type: string + required: false + physical_network: + type: string + required: false + capabilities: + link: + type: tosca.capabilities.network.Linkable + + tosca.nodes.network.Port: + derived_from: tosca.nodes.Root + properties: + ip_address: + type: string + required: false + order: + type: integer + required: true + default: 0 + constraints: + - greater_or_equal: 0 + is_default: + type: boolean + required: false + default: false + ip_range_start: + type: string + required: false + ip_range_end: + type: string + required: false + requirements: + - link: + capability: tosca.capabilities.network.Linkable + relationship: tosca.relationships.network.LinksTo + - binding: + capability: tosca.capabilities.network.Bindable + relationship: tosca.relationships.network.BindsTo + + +capability_types: + + tosca.capabilities.network.Linkable: + derived_from: tosca.capabilities.Node + + # also part of common types - used in Compute node type + tosca.capabilities.network.Bindable: + derived_from: tosca.capabilities.Node + +relationship_types: + + tosca.relationships.network.LinksTo: + derived_from: tosca.relationships.DependsOn + valid_target_types: [ tosca.capabilities.network.Linkable ] + + tosca.relationships.network.BindsTo: + derived_from: tosca.relationships.DependsOn + valid_target_types: [ tosca.capabilities.network.Bindable ] + diff --git a/javatoscachecker/checker/src/main/resources/tosca/tosca-nfv-types.yaml b/javatoscachecker/checker/src/main/resources/tosca/tosca-nfv-types.yaml new file mode 100644 index 0000000..fd52f6b --- /dev/null +++ b/javatoscachecker/checker/src/main/resources/tosca/tosca-nfv-types.yaml @@ -0,0 +1,143 @@ +tosca_definitions_version: tosca_simple_profile_for_nfv_1_0_0 +description: > + TOSCA simple profile for Network Function Virtualization (NFV). + +metadata: + template_name: http://docs.oasis-open.org/tosca/tosca-nfv/v1.0/csd01/tosca-nfv-v1.0-csd01.pdf + template_version: "1.0" + +#imports: +# - tosca-common-types.yaml + +capability_types: + +#from 6.3 + tosca.capabilities.nfv.VirtualLinkable: + derived_from: tosca.capabilities.Root + +#from 7.2.1 + tosca.capabilities.nfv.VirtualBindable: + derived_from: tosca.capabilities.Root +# breaks the virtualbinding requirement in the node type nfv.CP +# valid_source_types: [ tosca.nodes.nfv.VDU ] + +#from 7.2.2 + tosca.capabilities.nfv.HA: + derived_from: tosca.capabilities.Root + valid_source_types: [ tosca.nodes.nfv.VDU ] + +#from 7.2.3 + tosca.capabilities.nfv.HA.ActiveActive: + derived_from: tosca.capabilities.nfv.HA + +#from 7.2.4 + tosca.capabilities.nfv.HA.ActivePassive: + derived_from: tosca.capabilities.nfv.HA + +#from 7.2.5 + tosca.capabilities.nfv.Metric: + derived_from: tosca.capabilities.Root + + +relationship_types: + +#from 6.4 + tosca.relationships.nfv.VirtualLinksTo: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.nfv.VirtualLinkable ] + +#from 7.3.1 + tosca.relationships.nfv.VirtualBindsTo: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.nfv.VirtualBindable] + +#from 7.3.2 + tosca.relationships.nfv.HA: + derived_from: tosca.relationships.Root + valid_target_types: [ tosca.capabilities.nfv.HA] + +#from 7.3.3 + tosca.relationships.nfv.Monitor: + derived_from: tosca.relationships.ConnectsTo + valid_target_types: [ tosca.capabilities.nfv.Metric] + + +node_types: + +#from 7.4.1 + tosca.nodes.nfv.VNF: + derived_from: tosca.nodes.Root + properties: + id: + type: string + description: ID of this VNF + vendor: + type: string + description: name of the vendor who generate this VNF + version: + type: version + description: version of the software for this VNF + requirements: + - virtualLink: + capability: tosca.capabilities.nfv.VirtualLinkable + +#from 7.4.2 + tosca.nodes.nfv.VDU: + derived_from: tosca.nodes.SoftwareComponent + capabilities: + high_availability: + type: tosca.capabilities.nfv.HA + Virtualbinding: + type: tosca.capabilities.nfv.VirtualBindable + monitoring_parameter: + type: tosca.capabilities.nfv.Metric + requirements: + - high_availability: + capability: tosca.capabilities.nfv.HA + relationship: tosca.relationships.nfv.HA + occurrences: [ 0, 1 ] + - host: + capability: tosca.capabilities.Container + node: tosca.nodes.Compute + relationship: tosca.relationships.HostedOn + +#from 7.4.3 + tosca.nodes.nfv.CP: + derived_from: tosca.nodes.Root + properties: + type: + type: string + required: false + requirements: + - virtualLink: + capability: tosca.capabilities.nfv.VirtualLinkable + - virtualbinding: + capability: tosca.capabilities.nfv.VirtualBindable + attributes: + IP_address: + type: string +#!attributes do not take required .. required: false + +#from 8.1 + tosca.nodes.nfv.VL: + derived_from: tosca.nodes.Root + properties: + vendor: + type: string + required: true + description: name of the vendor who generate this VL + capabilities: + virtual_linkable: + type: tosca.capabilities.nfv.VirtualLinkable + +#from 8.2 + tosca.nodes.nfv.VL.ELine: + derived_from: tosca.nodes.nfv.VL + +#from 8.3 + tosca.nodes.nfv.VL.ELAN: + derived_from: tosca.nodes.nfv.VL + +#from + tosca.nodes.nfv.VL.ETree: + derived_from: tosca.nodes.nfv.VL diff --git a/javatoscachecker/checker/src/main/resources/tosca/tosca_simple_yaml_1_0.grammar b/javatoscachecker/checker/src/main/resources/tosca/tosca_simple_yaml_1_0.grammar new file mode 100644 index 0000000..56f1eb0 --- /dev/null +++ b/javatoscachecker/checker/src/main/resources/tosca/tosca_simple_yaml_1_0.grammar @@ -0,0 +1,1262 @@ +_status_values: &status_values + enum: + - supported + - unsupported + - experimental + - deprecated + +#I do not know that the lists and maps qualify as 'primitive' .. +_primitive_types: &primitive_types + enum: [string,integer,float,boolean,timestamp,list,map,version,range,scalar-unit.size,scalar_unit.frequency,scalar_unit.time] + +#needs custom validation as we have to make sure there are 2 elements and allow for the +#UNBOUNDED keyword as second element +_range_definition: &range_definition + type: seq + name: range_definition + sequence: + - type: scalar + +#see A.5.2 +#this is where the need of verifying the size of a collection (sequence/map) came from +#this is specified as a sequence where each entry is a map with one entry?? +_constraints_sequence: &constraints_sequence + name: constraints_sequence + short: 0 + type: seq + sequence: + - type: map +# length: 1 + mapping: + equal: + desc: "Constrains a property or parameter to a value equal to the value declared." + type: any + required: no + greater_than: + desc: "Constrains a property or parameter to a value greater than the value declared" + type: scalar + required: no + greater_or_equal: + desc: "Constrains a property or parameter to a value greater than or equal to the value declared." + type: scalar + required: no + less_than: + desc: "Constrains a property or parameter to a value less than the value declared" + type: scalar + required: no + less_or_equal: + desc: "Constrains a property or parameter to a value less than or equal to the value declared." + type: scalar + required: no + in_range: + desc: "Constrains a property or parameter to a value in range of (inclusive) the two values declared. +" + type: seq +# length: 2 + sequence: + - type: scalar + required: no + valid_values: + desc: "Constrains a property or parameter to a value that is in the list of declared values" + type: seq + sequence: + - type: scalar + required: no + length: + desc: "Constrains the property or parameter to a value of a given length." + type: int + required: no + min_length: + desc: "Constrains the property or parameter to a value to a minimum length" + type: scalar + required: no + max_length: + desc: "Constrains the property or parameter to a value to a maximum length" + type: scalar + required: no + pattern: + desc: "Constrains the property or parameter to a value that is allowed by the provided regular expression." + type: str + required: no + +# section A.5.3 property_filter_definition +# it is a constraints sequence that gets attached to a property .. +_property_filter_definition: &property_filter_definition + name: property_filter_definition + type: map + mapping: + =: + *constraints_sequence + +#section A.5.4 node_filter_definition +_node_filter_definition: &node_filter_definition + type: map + name: node_filter_definition + mapping: + properties: + desc: "property names to constraints to be applied to those properties" + required: no + type: seq + sequence: + - *property_filter_definition +# - type: map +# mapping: +# =: +# *constraints_sequence + capabilities: + desc: "" + required: no + type: seq + sequence: + - type: map + name: node_filter_capabilities_sequence + desc: "the key is a capability name or type" + mapping: + =: + name: node_filter_capabilities_entry + type: map + mapping: + properties: + desc: "the capability properties and their constraints" + name: node_filter_capabilities_properties + type: seq + sequence: + - type: map + name: node_filter_capabilities_property + mapping: + =: *constraints_sequence + +#used in property and attribute definitions +_entry_schema_definition: &entry_schema_definition + desc: "The optional key that is used to declare the name of the Datatype definition for entries of set types such as the TOSCA list or map" + name: entry_schema_definition + required: no + type: map + short: type + mapping: + "type": + desc: "collection element type" + required: yes + type: str + description: + required: no + type: str + constraints: + *constraints_sequence + +# see section A.5.5 +_artifact_definition: &artifact_definition + type: map + name: artifact_definition + short: implementation # assumes type can be inferred .. + mapping: + "type": + desc: "The required artifact type for the artifact definition" + required: yes + type: str + description: + desc: "The optional description for the artifact definition" + required: no + type: str + implementation: + desc: "The optional URI string (relative or absolute) which can be used to locate the artifacts file. +" + required: no + type: str + repository: + desc: "The optional name of the repository definition which contains the location of the external repository that contains the artifact" + required: no + type: str + deploy_path: + desc: "The file path the associated file would be deployed into within the target nodes container." + required: no + type: str + +# see section A.5.6 +_repository_definition: &repository_definition + type: map + name: repository_definition + short: url + mapping: + description: + desc: "The optional description for the repository." + required: no + type: str + url: + desc: "The required URL or network address used to access the repository" + required: yes + type: str + credential: + desc: "The optional Credential used to authorize access to the repository" + required: no + type: str + +#see section 3.5.7 +_import_definition: &import_definition + type: map + name: import_definition + short: file + mapping: + file: + desc: "file URI" + required: yes + type: str + repository: + desc: "symbolic name of the repository definition where the imported file can be found" + required: no + type: str + namespace_uri: + desc: "namespace URI to that will be applied to type definitions found within the imported file" + required: no + type: str + namespace_prefix: + desc: "optional namespace prefix (alias) that will be used to indicate the namespace_uri when forming a qualified name (i.e., qname) when referencing type definitions from the imported" + required: no + type: str + +#see section A.5.7 +_property_definition: &property_definition + type: map + name: property_definition + mapping: + "type": + type: str + required: yes +#not as easy, it can be an user defined data type +# <<: *primitive_types + description: + type: str + required: no + constraints: + desc: "The optional list of sequenced constraint clauses for the Data Type." + required: no + <<: *constraints_sequence + default: + type: any + required: no + "required": + type: bool + required: no + status: + type: str + required: no + <<: *status_values + entry_schema: + <<: *entry_schema_definition +# desc: "used to declare the name of the Datatype definition for entries of set types such as the TOSCA list or map." +# type: str +# required: no + +#see section A.5.8 +#_property_assignment_definition: &property_assignment_definition + +#see A.5.9 +_attribute_definition: &attribute_definition + type: map + name: attribute_definition + mapping: + "type": + type: str + required: yes +# <<: *primitive_types + description: + type: str + required: no + default: + type: any + required: no + status: + desc: "The optional status of the attribute relative to the specification or implementation" + type: str + required: no + <<: *status_values + entry_schema: + <<: *entry_schema_definition + +#see section A.5.10 +#here again, we must support the short form which is the most common +_attribute_assignment_definition: &attribute_assignment_definition + type: map + name: attribute_assignment_definition + mapping: + description: + desc: "The optional description of the attribute." + required: no + type: str + value: +#actually 'value | value_expression' + desc: "represent the type-compatible value to assign to the named attribute. Attribute values may be provided as the result from the evaluation of an expression or a function" + required: yes + type: any + + +# see spec section A.5.11 + +# see spec section A.5.11.1: variant to be used in node or relationship type definitions +_type_operation_definition: &type_operation_definition + type: map + name: type_operation_definition + short: implementation + mapping: + description: + desc: "The optional description string for the associated named operation." + required: no + type: str + implementation: + desc: "The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR file)" + required: no + type: str + inputs: + desc: "" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a input value" + name: property_assignment + type: any + +# from A.5.11.2 +_template_operation_definition: &template_operation_definition + type: map + name: template_operation_definition + short: implementation + mapping: + description: + desc: "The optional description string for the associated named operation." + required: no + type: str + implementation: + desc: "The optional implementation artifact name (e.g., a script file name within a TOSCA CSAR file)" + name: template_operation_implementation_definition + required: no + short: primary + type: map + mapping: + primary: + desc: "The optional implementation artifact name (e.g., the primary script file name within a TOSCA CSAR file). " + required: no + type: str + dependencies: + desc: "The optional list of one or more dependent or secondary implementation artifact name which are referenced by the primary implementation artifact (e.g., a library the script installs or a secondary script)" + required: no + type: seq + sequence: + - type: str + inputs: + desc: "" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a input value" + name: property_assignment + type: any + + +# see section A.5.12, specifically A.5.12.2.1 : definition to be used in node or relationship type definition +_type_interface_definition: &type_interface_definition + type: map + name: type_interface_definition + mapping: + "type": + desc: "represents the required name of the Interface Type for the interface definition +" + required: yes + type: str + inputs: + desc: "The optional list of input property definitions available to all defined operations" + type: map + mapping: + =: + *property_definition + =: + *type_operation_definition + +# see section A.5.12.2.2, extended notation to be used in node or relationship template definitions +_template_interface_definition: &template_interface_definition + type: map + name: template_interface_definition + mapping: + inputs: + desc: "The optional list of input property definitions available to all defined operations" + type: map + mapping: + =: + desc: "a property value or an expression providing a property value" + name: property_assignment + type: any + =: + *template_operation_definition + + +# A.6 section: type specific definitions + +# see section A.6.1 +_capability_definition: &capability_definition + type: map + name: capability_definition + short: type + mapping: + "type": + desc: "The required name of the Capability Type the capability definition is based upon" + required: yes + type: str + description: + desc: "The optional description of the Capability definition" + required: no + type: str + properties: + desc: "" + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of property definitions for the Capability definition" + required: no + type: map + mapping: + =: + *attribute_definition + valid_source_types: + desc: "" + required: no + type: seq + sequence: + - type: str + occurrences: + desc: "The optional minimum and maximum occurrences for the capability." + required: no + <<: *range_definition + +# see section A.6.2 +# +_requirement_definition: &requirement_definition + type: map + name: requirement_definition + short: capability #as per A.6.2.2.1 + mapping: + capability: + desc: "The required reserved keyname used that can be used to provide the name of a valid Capability Type that can fulfil the requirement" + required: yes + type: str + node: + desc: "The optional reserved keyname used to provide the name of a valid Node Type that contains the capability definition that can be used to fulfil the requirement. " + required: no + type: str + relationship: +# and from section A.6.2.1, this one is an oddball + desc: "The optional reserved keyname used to provide the name of a valid Relationship Type to construct when fulfilling the requirement." + required: no + name: requirement_relationship_definition + short: type + type: map + mapping: + type: + desc: "The optional reserved keyname used to provide the name of the Relationship Type for the requirement definitions relationship keyname. +" + required: yes + type: str + interfaces: + #not clear which interface definition is to be used here + desc: "allows augmentation (additional properties and operations) of the interfaces defined by the relationship type indicated above" + required: no + type: map + mapping: + =: + *type_interface_definition + occurrences: + desc: "The optional minimum and maximum occurrences for the requirement." + required: no + <<: *range_definition + +# see section A.6.3 +_artifact_type_definition: &artifact_type_definition + type: map + name: artifact_type_definition + mapping: + derived_from: + desc: "An optional parent Artifact Type name the Artifact Type derives from" + required: no + type: str + description: + desc: "An optional description for the Artifact Type." + required: no + type: str + mime_type: + desc: "The required mime type property for the Artifact Type." + required: no + type: str + file_ext: + desc: "The required file extension property for the Artifact Type" + required: no + type: seq + sequence: + - type: str + properties: + desc: "An optional list of property definitions for the Artifact Type" + required: no + type: map + mapping: + =: + *property_definition + +#see spec section #A.6.4 +_interface_type_definition: &interface_type_definition + type: map + name: interface_type_definition + mapping: + inputs: + desc: "The optional list of input property definitions available to all defined operations" + type: map + mapping: + =: + type: str + desc: "property_name to property_value(_expression) mapping" + =: + *type_operation_definition + +# A.6.5 +_data_type_definition: &data_type_definition + type: map + name: data_type_definition + mapping: + derived_from: + desc: "The optional key used when a datatype is derived from an existing TOSCA Data Type. +" + required: no + type: str + description: + desc: "The optional description for the Data Type. +" + required: no + type: str + constraints: + desc: "The optional list of sequenced constraint clauses for the Data Type." + <<: *constraints_sequence + properties: + desc: "The optional list property definitions that comprise the schema for a complex Data Type in TOSCA" + type: map + mapping: + =: + *property_definition + +# see section A.6.6 +_capability_type_definition: &capability_type_definition + type: map + name: capability_type_definition + mapping: + derived_from: + desc: "An optional parent capability type name this new Capability Type derives from." + required: no + type: str + description: + desc: "An optional description for the Capability Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Capability Type." + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of attribute definitions for the Capability Type" + required: no + type: map + mapping: + =: + *attribute_definition + valid_source_types: + desc: "An optional list of one or more valid names of Node Types that are supported as valid sources of any relationship established to the declared Capability Type" + required: no + type: seq + sequence: + - type: str + +# section A.6.7 requirement definition: TOSCA YAML profile relies on capability types to +# define requirements + +# see section A.6.9 +_relationship_type_definition: &relationship_type_definition + type: map + name: relationship_type_definition + mapping: + derived_from: + desc: "An optional parent Relationship Type name the Relationship Type derives from" + required: no + type: str + description: + desc: "An optional description for the Relationship Type." + required: no + type: str + properties: + desc: "An optional list of property definitions for the Relationship Type" + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of attribute definitions for the Relationship Type" + required: no + type: map + mapping: + =: + *attribute_definition + interfaces: + desc: "An optional list of interface definitions interfaces supported by the Relationship Type" + required: no + type: map + mapping: + =: + *type_interface_definition + valid_target_types: + desc: "An optional list of one or more names of Capability Types that are valid targets for this relationship. " + required: no + type: seq + sequence: + - type: str + +#see section 3.6.10 +_group_type_definition: &group_type_definition + type: map + name: group_type_definition + mapping: + derived_from: + desc: "An optional parent Group Type name this new Group Type derives from" + required: no + type: str + version: + desc: "An optional version for the Group Type definition" + required: no + type: str + description: + desc: "An optional description for the Group Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Group Type." + required: no + type: map + mapping: + =: + *property_definition + targets: + desc: "An optional list of one or more names of Node Types that are valid +(allowed) as members of the Group Type." + required: no + type: seq + sequence: + - type: str + interfaces: + desc: "An optional list of interface definitions supported by the Group Type" + required: no + type: map + mapping: + =: + *type_interface_definition + +#see section 3.6.11 +_policy_type_definition: &policy_type_definition + type: map + name: policy_type_definition + mapping: + derived_from: + desc: "An optional parent Policy Type name this new Policy Type derives from" + required: no + type: str + version: + desc: "An optional version for the Policy Type definition" + required: no + type: str + description: + desc: "An optional description for the Policy Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Policy Type." + required: no + type: map + mapping: + =: + *property_definition + targets: + desc: "An optional list of valid Node Types or Group Types the Policy Type +can be applied to" + required: no + type: seq + sequence: + - type: str + +# see section A.6.8 +_node_type_definition: &node_type_definition + type: map + name: node_type_definition + mapping: + derived_from: + desc: "An optional parent Node Type name this new Node Type derives from" + required: no + type: str + description: + desc: "An optional description for the Node Type" + required: no + type: str + properties: + desc: "An optional list of property definitions for the Node Type." + required: no + type: map + mapping: + =: + *property_definition + attributes: + desc: "An optional list of attribute definitions for the Node Type. +" + required: no + type: map + mapping: + =: + *attribute_definition + requirements: + desc: "An optional sequenced list of requirement definitions for the Node Type. +" + required: no + type: seq + sequence: + - type: map + mapping: + =: + *requirement_definition + capabilities: + desc: "An optional list of capability definitions for the Node Type" + required: no + type: map + mapping: + =: + *capability_definition + interfaces: + desc: "" + required: no + type: map + mapping: + =: + *type_interface_definition + artifacts: + desc: "An optional list of named artifact definitions for the Node Type" + required: no + type: map + mapping: + =: + *artifact_definition + +# A.7 Template specific definitions + +# see section A.7.1 +_capability_assignment_definition: &capability_assignment_definition + type: map + name: capability_assignment_definition + mapping: + properties: + # list of property assignments + desc: "An optional list of property definitions for the Capability definition" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a property value" + name: property_assignment + type: any + attributes: + # list of attribute assignments + desc: "An optional list of attribute definitions for the Capability definition" + required: no + type: map + mapping: + =: + desc: "" + name: attribute_assignment + type: any + +# see section A.7.2 +_requirement_assignment_definition: &requirement_assignment_definition + type: map + name: requirement_assignment_definition + short: node + mapping: + capability: + desc: " used to provide the name of either a: Capability definition within a target node template that can fulfill the requirement or Capability Type that the provider will use to select a type-compatible target node template to fulfill the requirement at runtime." + required: no + type: str + node: +#why is this a reference to a node type and not to a node template?? + desc: "used to identify the target node of a relationship: Node Template name that can fulfil the target node requirement or Node Type name that the provider will use to select a type-compatible node template to fulfil the requirement at runtime" + required: no + type: str + relationship: + desc: "" + required: no +#fins a better name name: relationship_definition + type: map + short: type + mapping: + "type": + desc: "The optional reserved keyname used to provide the name of the Relationship Type for the requirement assignments relationship keyname" + required: no + type: str + properties: + desc: "" + required: no + type: map + mapping: + =: + desc: "a property value or an expression providing a property value" + name: property_assignment + type: any + interfaces: + desc: "from A.5.12.2.2, right?" + required: no + type: map + mapping: + =: + *template_interface_definition + node_filter: + desc: "The optional filter definition that TOSCA orchestrators or providers would use to select a type-compatible target node that can fulfill the associated abstract requirement at runtime." + required: no + <<: *node_filter_definition + +# see section A.7.3 +_node_template_definition: &node_template_definition + type: map + name: node_template_definition + mapping: + "type": + desc: "The required name of the Node Type the Node Template is based upon" + required: yes + type: str + description: + desc: "An optional description for the Node Template" + required: no + type: str + directives: + desc: "An optional list of directive values to provide processing instructions to orchestrators and tooling." + required: no + type: seq + sequence: + - type: str + properties: +#custom check needs to be added: the value or expression providing the property value +#needs to be compatible with the property definition + desc: "An optional list of property value assignments for the Node Template." + required: no + type: map + mapping: + =: + type: any + name: property_assignment + desc: "a property value or an expression providing a property value" + attributes: + desc: "An optional list of attribute value assignments for the Node Template" + required: no + type: map + mapping: + =: + *attribute_assignment_definition + requirements: + desc: "An optional sequenced list of requirement assignments for the Node Template." + required: no + type: seq + sequence: + - type: map + mapping: + =: + *requirement_assignment_definition + capabilities: + desc: "An optional list of capability assignments for the Node Template." + required: no + type: map + mapping: + =: + *capability_assignment_definition + interfaces: + desc: "An optional list of named interface definitions for the Node Template" + required: no + type: map + mapping: + =: + *template_interface_definition + artifacts: + desc: "An optional list of named artifact definitions for the Node Template. +" + required: no + type: map + mapping: + =: + *artifact_definition + node_filter: + desc: "The optional filter definition that TOSCA orchestrators would use to select the correct target node. This keyname is only valid if the directive has the value of 'selectable' set." + required: no + <<: *node_filter_definition + copy: + desc: "The optional (symbolic) name of another node template to copy into (all keynames and values) and use as a basis for this node template." + required: no + type: str + +# see section A.7.4 +_relationship_template_definition: &relationship_template_definition + type: map + name: relationship_template_definition + mapping: + "type": + desc: "The required name of the Relationship Type the Relationship Template is based upon" + required: yes + type: str + alias: + desc: "The optional name of a different Relationship Template definition whose values are (effectively) copied into the definition for this Relationship Template (prior to any other overrides)." + required: no + type: str + description: + desc: "An optional description for the Relationship Template" + required: no + type: str + properties: + desc: "An optional list of property assignments for the Relationship Template." + required: no + name: properties_assignment_validation + type: map + mapping: + =: + type: any +#scalar + desc: "an expression providing a property value" + attributes: + desc: "An optional list of attribute value assignments for the Relationship Template" + required: no + name: attributes_assignment_validation + type: map + mapping: + =: + type: scalar + desc: "an expression providing an attribute value" + interfaces: + desc: "An optional list of named interface definitions for the Relationship Template ('augmentation' is allowed here)" + required: no + type: map + mapping: + =: + *template_interface_definition + copy: + desc: "The optional (symbolic) name of another relationship template to copy into (all keynames and values) and use as a basis for this relationship template." + required: no + type: str + + +# see section 3.7.5 +_group_definition: &group_definition + type: map + name: group_definition + mapping: + "type": + desc: "The required name of the group type the group definition is based upon" + required: yes + type: str + description: + desc: "The optional description for the group definition" + required: no + properties: + desc: " represents the optional list of property assignments for the group definition that provide values for properties defined in its declared Group Type" + required: no + type: map + mapping: + =: + type: any + name: property_assignment + targets: + desc: "contains the required list of one or more node template names (within the same topology template) that are members of this logical group" + required: yes + type: seq + sequence: + - type: str + interfaces: + desc: "represents the optional list of interface definitions for the group definition that augment those provided by its declared Group Type" + required: no + type: map + mapping: + =: + *template_interface_definition + +# see section 3.7.6 +_policy_template_definition: &policy_template_definition + type: map + name: policy_definition + mapping: + "type": + desc: "The required name of the policy type the policy definition is based upon" + required: yes + type: str + description: + desc: "The optional description for the policy definition" + required: no + properties: + desc: "represents the optional list of property assignments for the policy definition that provide values for properties defined in its declared Policy Type" + required: no + type: map + mapping: + =: + type: any + name: property_assignment + targets: + desc: "represents the optional list of names of node templates or groups that the policy is to applied to" + required: no + type: seq + sequence: + - type: str + +# see section 3.8 Topology Template definition: defines the topology template of a cloud application. +# described as a a reusable grammar as it can be a part of a service template definition +_topology_template_definition: &topology_template_definition + type: map + name: topology_template_definition + mapping: + description: + desc: "a description of the topology template" + required: no + type: str + inputs: + desc: "definition of input parameters for the topology template" + name: inputs + required: no + type: map + mapping: + =: + *property_definition + node_templates: + desc: "definition of the node templates of the topology" + name: node_templates + required: no + type: map + mapping: + =: + *node_template_definition + relationship_templates: + desc: "definition of the relationship templates of the topology" + required: no + name: relationship_templates + type: map + mapping: + =: + *relationship_template_definition + outputs: + desc: "definition of output parameters for the topology template" + name: outputs + required: no + type: map + mapping: + =: + *attribute_assignment_definition + groups: + desc: "An optional list of Group definitions whose members are node templates defined within this same Topology Template" + name: groups + required: no + type: map + mapping: + =: + *group_definition + policies: + # see 8.2.3, initially the list is not described as sequenced but then the grammar shows it as such !? + desc: "An optional sequenced?? list of Policy definitions for the Topology Template." + name: policies + required: no + type: seq + sequence: + - type: map + mapping: + =: + *policy_template_definition + substitution_mappings: +# one possible short-coming that is visible here is that the definition of the capability +# and requirements mappings are given in the spec only with the short/inline version of a +# YAML list/sequence, which cannot be enforced here .. + desc: " a description of the topology template" + name: substitution_mappings + required: no + type: map + mapping: + node_type: + desc: "node type name" + required: yes + type: str + capabilities: + desc: "map_of_capability_mappings_to_expose" + type: map + mapping: + =: + type: seq + sequence: + - type: str + requirements: + desc: "map_of_requirement_mapping_to_expose" + type: map + mapping: + =: + type: seq + sequence: + - type: str + + +# see A.9 Service Template definition: A TOSCA Service Template (YAML) document contains +# element definitions of building blocks for cloud application, or complete models of cloud applications. + +type: map +name: service_template_definition +mapping: + tosca_definitions_version: + desc: "Required TOSCA Definitions version string" + required: yes + type: str + enum: [tosca_simple_yaml_1_0, tosca_simple_yaml_1_0_0] + + tosca_default_namespace: + desc: "Optional. default namespace (for type schema)" + required: no + type: str + + metadata: + desc: "Optional metadata keyname: value pairs" + name: metadata + required: no + type: map + mapping: + template_name: + desc: "Optional name of this service template" + required: no + type: str + template_author: + desc: "Optional author of this service template" + required: no + type: str + template_version: + desc: "Optional version of this service template" + required: no + type: str + =: + desc: "User defined entry" + required: no + type: str + +#to add, the spec says: "Optional list of domain or profile specific metadata keynames" + + description: + desc: "Optional description of the definitions inside the file" + required: no + type: str + + imports: + desc: "ordered list of import statements for importing other definitions files" + name: imports + required: no + type: seq + sequence: + - type: map + mapping: + =: + *import_definition + + dsl_definitions: + desc: "list of YAML alias anchors (or macros)" + name: dsl_definitions + required: no + type: map + mapping: + =: + desc: "some piece of valid yaml that makes the anchor/alias definition" + type: any + required: no + + repositories: + desc: "list of external repository definitions which host TOSCA artifacts" + name: repositories + required: no + type: map + mapping: + =: + *repository_definition + + data_types: + desc: "list of TOSCA datatype definitions" + name: data_types + required: no + type: map + mapping: + =: + *data_type_definition + + node_types: + desc: "list of node type definitions" + name: node_types + required: no + type: map + mapping: + =: + *node_type_definition + + capability_types: + desc: "list of capability type definitions" + name: capability_types + required: no + type: map + mapping: + =: + *capability_type_definition + + relationship_types: + desc: "list of relationship type definitions" + name: relationship_types + required: no + type: map + mapping: + =: + *relationship_type_definition + + artifact_types: + desc: "list of artifact type definitions" + name: artifact_types + required: no + type: map + mapping: + =: + *artifact_type_definition + + interface_types: + desc: "list of interface type definitions" + name: interface_types + required: no + type: map + mapping: + =: + *interface_type_definition + + group_types: + desc: "list of group type definitions" + name: group_types + required: no + type: map + mapping: + =: + *group_type_definition + + policy_types: + desc: "list of policy type definitions" + name: policy_types + required: no + type: map + mapping: + =: + *policy_type_definition + + topology_template: + desc: "topology template definition of the cloud application or service" + required: no + <<: *topology_template_definition diff --git a/javatoscachecker/checker/src/main/resources/tosca/tosca_simple_yaml_1_1.grammar b/javatoscachecker/checker/src/main/resources/tosca/tosca_simple_yaml_1_1.grammar new file mode 100644 index 0000000..e199680 --- /dev/null +++ b/javatoscachecker/checker/src/main/resources/tosca/tosca_simple_yaml_1_1.grammar @@ -0,0 +1,1646 @@ +_status_values: &status_values + enum: + - supported + - unsupported + - experimental + - deprecated + +#I do not know that the lists and maps qualify as 'primitive' .. +_primitive_types: &primitive_types + enum: [string,integer,float,boolean,timestamp,list,map,version,range,scalar-unit.size,scalar_unit.frequency,scalar_unit.time] + +#needs custom validation as we have to make sure there are 2 elements and allow for the +#UNBOUNDED keyword as second element +_range_definition: &range_definition + desc: "Section used to declare additional metadata information" + required: no + type: seq + size: 2 + name: range_definition + sequence: + - type: scalar + +_version_definition: &version_definition + desc: "An optional TOSCA version number" + required: no + type: str + name: version_definition +# pattern: .[.[.[- value mapping became valid + required: false + type: string + endTimeField: + description: time when the key --> value mapping ceased to be valid + required: false + type: string + + +####################################################################### +#### Things which describe how a data source can be accessed + + + + dataDelivery: + description: the means of connecting to a data source. very incomplete. + derived_from: dcaeCatalogNodes + properties: + deliveryMechanism: + description: the general method by which data is delivered from a source. + required: true + type: string + constraints: + - valid_values: [stream, file, database, table ] + polling: + description: push vs. pull + required: true + type: string + constraints: + - valid_values: [ push, pull] + capabilities: + dataDelivery: + type: dcaeCatalog.capabilities.dataDelivery + + streamDataDelivery: + description: a data source that is a streaming source. + derived_from: dataDelivery + properties: + deliveryMechanism: + description: the general method by which data is delivered from a source. + required: true + type: string + constraints: + - equal: stream + polling: + description: push vs. pull + required: true + type: string + constraints: + - equal: push + + + genericCDAPDelivery: + description: a streaming data source delivered via the CDAP generic + derived_from: streamDataDelivery + + + +# A UEB stream is kafka plus security TBD + dmaapMessageDelivery: + description: a streaming data source delivered via a kafka queue + derived_from: streamDataDelivery + properties: + servers: + description: the servers to contact to access the kafka queue. + required: true + type: list + entry_schema: + type: string + constraints: + topic: + description: the stream identifier (topic in kafka terminology) + required: true + type: string + numPartitions: + description: the number of partitions for the kafka topic + required: false + default: 1 + type: integer + PartitioningMethod: + description: the mechanism that assigns records to partitions + required: false + type: string + partitionSubscriptions: + description: if specified, the set of partitions that contain a particular substream + required: false + type: list + entry_schema: + type: integer + + + dmaapDataRouterDelivery: + description: a data source that is delivered via files. + derived_from: dataDelivery + properties: + deliveryMechanism: + description: the general method by which data is delivered from a source. + required: true + type: string + constraints: + - valid_values: [file] + fileName: + description: a name (or naming scheme) of the file + required: true + type: string + + fileDataDelivery: + description: a data source that is delivered via files. + derived_from: dataDelivery + properties: + deliveryMechanism: + description: the general method by which data is delivered from a source. + required: true + type: string + constraints: + - valid_values: [file] + fileName: + description: a name (or naming scheme) of the file + required: true + type: string + + pulledFileDelivery: + description: a data source that is delivered via files pulled from an external source + derived_from: fileDataDelivery + properties: + uri: + description: the thing to access to get a new file + required: true + type: string + trigger: + description: a description of the thing that triggers pulling a new file. + required: true + type: string + + + +#################################### +### Description of a data source (data output) + + dataSource: + description: a generic data source description, link to schema, properties, delivery + derived_from: dcaeCatalogNodes +# properties: + requirements: + - dataFormat: + capability: dcaeCatalog.capabilities.dataFormat + - dataDelivery: + capability: dcaeCatalog.capabilities.dataDelivery + - dataProperties: + capability: dcaeCatalog.capabilities.dataProperties + capabilities: + dataSource: + type: dcaeCatalog.capabilities.dataSource + occurrences: [0, UNBOUNDED] + +########################## +### Description of a data requirement (data input) + dataRequirement: + description: a generic data source description, link to schema, properties, delivery + derived_from: dcaeCatalogNodes +# properties: + requirements: + - dataFormat: + capability: dcaeCatalog.capabilities.dataFormat + occurrences: [1, UNBOUNDED] + - dataProperties: + capability: dcaeCatalog.capabilities.dataProperties + - dataDelivery: + capability: dcaeCatalog.capabilities.dataDelivery + capabilities: + dataRequirement: + type: dcaeCatalog.capabilities.dataRequirement + occurrences: [0, UNBOUNDED] + + + + + +#################################### +### Description of a processingModuleParameters (module parameters excluding input/output descriptions +### allows any baseDataFormat derived input + processingModuleParameters: + description: + derived_from: baseDataFormat + properties: + dataType: + required: true + type: string + capabilities: + moduleParameters: + type: dcaeCatalog.capabilities.processingModuleParameters + + +################################ +### Module Dependencies + + instantiationDependency: + description: code dependencies (possible transitive) + derived_from: dcaeCatalogNodes + properties: + dependencyType: + required: false + type: string + constraints: + - valid_values: [R, Python, CDAP] + dependencyValue: + required: true + type: string + capabilities: + dataProperties: + type: dcaeCatalog.capabilities.instantiationDependency + occurrences: [0, UNBOUNDED] + requirements: + - dependencies: + capability: dcaeCatalog.capabilities.instantiationDependency + occurrences: [0, UNBOUNDED] + +#################################### +### Description of a processingModuleDeployment - deployment attributes + + processingModuleDeployment: + description: deployment attributes + derived_from: dcaeCatalogNodes + properties: + language: + required: true + type: string + version: + type: string + required: true + deploymentType: + required: true + type: string + capabilities: + moduleDeployment: + type: dcaeCatalog.capabilities.processingModuleDeployment + occurrences: [0, UNBOUNDED] + requirements: + - dependencies: + capability: dcaeCatalog.capabilities.instantiationDependency + occurrences: [0, UNBOUNDED] + +#################################### +### Description of a processingModuleMetaData - search tags + + processingModuleMetadata: + description: metadata tags + derived_from: dcaeCatalogNodes + properties: + metaDataList: + required: true + type: list + entry_schema: + type: string + constraints: + capabilities: + metadata: + type: dcaeCatalog.capabilities.processingModuleMetadata + occurrences: [0, UNBOUNDED] + + +#################################### +### Description of a processingModule + + processingModule: + description: full processing module description + derived_from: dcaeCatalogNodes + properties: + moduleDescription: + required: true + type: string + requirements: + - processingModuleparameters: + capability: dcaeCatalog.capabilities.processingModuleParameters + occurrences: [0, UNBOUNDED] + - processingModuleDeployment: + capability: dcaeCatalog.capabilities.processingModuleDeployment + occurrences: [1, UNBOUNDED] + - processingModuleMetadata: + capability: dcaeCatalog.capabilities.processingModuleMetadata + occurrences: [1, UNBOUNDED] + - dataRequirementsList: + capability: dcaeCatalog.capabilities.dataRequirement + occurrences: [1, UNBOUNDED] + - dataSourcesList: + capability: dcaeCatalog.capabilities.dataSource + occurrences: [1, UNBOUNDED] + +capability_types: + dcaeCatalog.capabilities.dataFormat: + derived_from: tosca.capabilities.Root + dcaeCatalog.capabilities.dataProperties: + derived_from: tosca.capabilities.Root + dcaeCatalog.capabilities.dataDelivery: + derived_from: tosca.capabilities.Root + dcaeCatalog.capabilities.instantiationDependency: + derived_from: tosca.capabilities.Root + dcaeCatalog.capabilities.processingModuleMetadata: + derived_from: tosca.capabilities.Root + dcaeCatalog.capabilities.processingModuleParameters: + derived_from: tosca.capabilities.Root + dcaeCatalog.capabilities.processingModuleDeployment: + derived_from: tosca.capabilities.Root + dcaeCatalog.capabilities.dataRequirement: + derived_from: tosca.capabilities.Root + dcaeCatalog.capabilities.dataSource: + derived_from: tosca.capabilities.Root + + diff --git a/javatoscachecker/checker/src/test/tosca/mini_ceil_tosca.yaml b/javatoscachecker/checker/src/test/tosca/mini_ceil_tosca.yaml new file mode 100644 index 0000000..9879746 --- /dev/null +++ b/javatoscachecker/checker/src/test/tosca/mini_ceil_tosca.yaml @@ -0,0 +1,108 @@ +tosca_definitions_version: tosca_simple_yaml_1_0_0 + +imports: + - dcae_uservice_tosca.yaml + +topology_template: + + node_templates: + + HostComputeStats_Ceilometer: + type: delimitedAsciiDataFormat + properties: + name: HostComputeStats_Ceilometer + UUID: HostComputeStats_Ceilometer + version: 1.0.0 + delimiter: "|" + fieldList: + - + name: DATETIME_FTP_COLLECTION + type: date_time +# subType: 1.0 + subType: "yyyymmddhhmmss" + - + name: DATETIME_OSS + type: date_time + subType: "yyyymmddhhmmss" + - + name: DATETIMEUTC + type: date_time + subType: "yyyymmddhhmmss" + - + name: DATETIMEZONE + type: date_time + subType: "yyyymmddhhmmss zzz" + - + name: EMS_NAME + type: string + comment: "Element Manager System Name?" + - + name: NE_VERSION + type: string + comment: "Network Element Version????" + - + name: PERIOD + type: int + comment: "Sample period in seconds" + - + name: SUSPECTFLAG + type: string + comment: "Data suspect? Null means ok?" + - + name: SERVICE + type: string + - + name: resourceid + type: string + - + name: hardware.cpu.load.1min + type: float + comment: "CPU load in in the last one minute" + - + name: hardware.cpu.load.5min + type: float + comment: "CPU load in in the last five minutes" + - + name: hardware.cpu.load.15min + type: float + comment: "CPU load in in the last fifteen minutes" + - + name: hardware.cpu.memory.total + type: float + comment: "Total memory size in KB" + - + name: hardware.cpu.memory.avail + type: float + comment: "Memory available in KB" + - + name: hardware.cpu.swap.total + type: float + comment: "Total swap size in KB" + - + name: hardware.cpu.swap.avail + type: float + comment: "Swap available in KB" + - + name: hardware.network.ip.incoming.datagrams + type: float + comment: "Cumulative incoming datagrams" + - + name: hardware.network.ip.outgoing.datagrams + type: float + comment: "Cumulative outgoing datagrams" + - + name: hardware.network.ip.incoming.blocks + type: float + comment: "Cumulative incoming blocks" + - + name: hardware.network.ip.outgoing.blocks + type: float + comment: "Cumulative outgoing blocks" + - + name: hardware.system_states.cpu.idle + type: float + comment: "CPU Percent idle" + formatType: fixed_format + formatSubType: delimitedAscii + fullList: true + numFields: 22 diff --git a/javatoscachecker/checker/src/test/tosca/ucpe_schema.yml b/javatoscachecker/checker/src/test/tosca/ucpe_schema.yml new file mode 100644 index 0000000..5c5ac5a --- /dev/null +++ b/javatoscachecker/checker/src/test/tosca/ucpe_schema.yml @@ -0,0 +1,403 @@ +tosca_definitions_version: tosca_simple_yaml_1_0_0 +description: CCD uCPE specific data types + +metadata: + template_name: uCPE_schema + template_version: "0.1" + +imports: + - tosca/tosca-network-types.yaml + +data_types: + + routing-rule: + properties: + logical-interface-name: + type: string + ip-version: + type: string + mtu: + type: integer + is-unnumbered: + type: boolean + unnumbered-local-address: + type: string + unnumbered-remote-address: + type: string + cos-model: + type: string + pe-shaping: + type: string + bgp-v4-neighbor-address: + type: string + bgp-v4-peer-as: + type: string + bgp-v4-md5-key: + type: string + bgp-v4-my-as: + type: string + bgp-v6-neighbor-address: + type: string + bgp-v6-peer-as: + type: string + bgp-v6-md5-key: + type: string + bgp-v6-my-as: + type: string + bgp-v4-keep-alive-timer: + type: string + bgp-v6-keep-alive-timer: + type: string + bgp-v4-hold-timer: + type: string + bgp-v6-hold-timer: + type: string + v4-isp-lan-ip-blocks: + type: list + entry_schema: string + v6-isp-lan-ip-blocks: + type: list + entry_schema: string + +node_types: + + com.att.d2.resource.ucpe.networkInterface: + derived_from: tosca.nodes.Root + properties: + service_port_name: + type: string + physical_port_name: + type: string + physical_speed: + type: integer + physical_speed_units: + type: string + default: "Gbps" + requirements: + - host: + capability: com.att.d2.capabilities.hostedOn +# node: com.att.d2.resource.uCPE + capabilities: + link: tosca.capabilities.network.Linkable + +#do not know to what degree we still need the two specalized versions below + com.att.d2.resource.ucpe.lan_networkInterface: + derived_from: com.att.d2.resource.ucpe.networkInterface + requirements: + - host: + capability: com.att.d2.capabilities.hostedOn + node: com.att.d2.resource.uCPE + + com.att.d2.resource.ucpe.wan_networkInterface: + derived_from: com.att.d2.resource.ucpe.networkInterface + properties: + internet_connection_type: + type: string + default: "AVPN" + constraints: + - valid_values: ["AVPN", "AVPNIVLAN", "3RDPARTYINTERNET", "MIS", "GMIS", "ASE", "ASEOD", "3RDPARTYETHERNET", "3RDPARTYMPLS", "HSIA-E"] + requirements: + - host: + capability: com.att.d2.capabilities.hostedOn + node: com.att.d2.resource.uCPE + + +#the participation of a network interface in a VLAN needs to carry a 'tag' attribute (tagged/untagged values) +#previously we attached this attribute to the Port +# com.att.d2.resource.ucpe.lan_port: +# properties: +# tag: +# type: string +# default: "tagged" +# constraints: +# - valid_values: [tagged, untagged] +# +# now it has to go in the relationship between the networkInterface and the VLAN, i.e. LinksTo +# + + com.att.d2.resource.ucpe.vlan: +# can't use as a base type anymore as it has a Linkable capability +# derived_from: tosca.nodes.network.Network + properties: + vlan_id: + type: string + requirements: + - link: + capability: tosca.capabilities.network.Linkable + node: com.att.d2.resource.ucpe.networkInterface + relationship: tosca.relationships.network.LinksTo + - binding: + capability: tosca.capabilities.network.Bindable + node: com.att.d2.resource.ucpe.VNF + relationship: tosca.relationships.network.BindsTo + + + #virtual network functions to be hosted by a uCPE + com.att.d2.resource.ucpe.VNF: + derived_from: tosca.nodes.Root + properties: + att-part-number: + type: string + constraints: + - max_length: 255 + + vendor-name: + type: string + constraints: + - max_length: 255 + vendor-part-number: + type: string + constraints: + - max_length: 255 + vendor-model: + type: string + constraints: + - max_length: 255 + vendor-model-description: + type: string + constraints: + - max_length: 255 + + vcpu-default: + type: integer + constraints: + - less_or_equal: 16 + vcpu-min: + type: integer + constraints: + - less_or_equal: 16 + vcpu-max: + type: integer + constraints: + - less_or_equal: 16 + + vmemory-default: + type: integer + vmemory-units: + type: string + default: "GB" + vmemory-min: + type: integer + constraints: + - less_or_equal: 16 + vmemory-max: + type: integer + constraints: + - less_or_equal: 16 + + vdisk-default: + type: integer + vdisk-units: + type: string + default: "GB" + vdisk-min: + type: integer + constraints: + - less_or_equal: 16 + vdisk-max: + type: integer + constraints: + - less_or_equal: 128 + +#what is this guy?? + vnf-type: + type: string + + software-version: + type: version + software-version-state: + type: integer + software-file-name: + type: string + constraints: + - max_length: 255 + + vnf-feature: + type: list + entry_schema: string + + vnf-instance-id: + type: string + constraints: + - max_length: 255 + + management-option: + type: string + default: ATT + constraints: + - valid_values: ["CUSTOMER", "ATT"] + + requirements: + - host: + capability: com.att.d2.capabilities.hostedOn + + capabilities: + binding: + type: tosca.capabilities.network.Bindable + occurrences: [1,UNBOUNDED] + + + com.att.d2.resource.uCPE: # base node type for uCPE + derived_from: tosca.nodes.Root + properties: + att-part-number: + type: string + vendor-name: + type: string + required: true + vendor-model: + type: string + required: true + + total-vcpu: + type: integer + description: number of vCPUs + total-memory: + type: integer + description: GB + total-disk: + type: integer + description: GB + + base-system-image-file-name: + type: string + linux-host-vendor: + type: string + linux-host-os-version: + type: version + base-system-software: + type: string + jdm-vcpu: + type: integer + jdm-memory: + type: integer + description: GB + jdm-disk: + type: integer + description: GB + jdm-version: + type: string + jcp-vcpu: + type: integer + jcp-memory: + type: integer + description: GB + jcp-disk: + type: integer + description: GB + jcp-version: + type: version + + capabilities: + vnf_hosting: + type: com.att.d2.capabilities.hostedOn + valid_source_types: [com.att.d2.resource.ucpe.VNF] + description: Provides hosting capability for VNFs + WAN_connectivity: + type: com.att.d2.capabilities.hostedOn + valid_source_types: [com.att.d2.resource.ucpe.wan_networkInterface] + description: external WAN1 n/w interface + occurrences: [1,2] + LAN_connectivity: + type: com.att.d2.capabilities.hostedOn + valid_source_types: [com.att.d2.resource.ucpe.lan_networkInterface] + description: external LAN n/w interface + occurrences: [1,8] + +# specific VNFs + + com.att.d2.resource.vRouter: + derived_from: com.att.d2.resource.ucpe.VNF + properties: + management-v6-address: + type: string + nm-lan-v6-address: + type: string + nm-lan-v6-prefix-length: + type: string + management-v4-address: + type: string + nm-lan-v4-address: + type: string + nm-lan-v4-prefix-length: + type: string + + routing-instance-name: + type: string + routing-instances: + type: map + entry_schema: + type: routing-rule + + com.att.d2.resource.vWANx: + derived_from: com.att.d2.resource.ucpe.VNF + properties: +# att-part-number: +# type: string +# default: "VCX-SUB-00255-U" + vendor-name: + type: string + default: "VendorX" + vendor-model: + type: string + default: "WANx-VM00" + + vcpu-default: + type: integer + default: 1 + vcpu-min: + type: integer + default: 1 + vcpu-max: + type: integer + default: 1 + + vmemory-default: + type: integer + default: 1 + vmemory-min: + type: integer + default: 1 + vmemory-max: + type: integer + default: 1 + + vdisk-default: + type: integer + default: 20 + vdisk-min: + type: integer + default: 2 + vdisk-max: + type: integer + default: 20 + + vnf-type: + type: string + default: "Advanced WANx" + software-version: + description: "The WANx software version. The value must match the AT&T part number." + type: version + default: 9.2.0 + + software-file-name: + type: string + default: "vnfImageFilename" + + vnf-instance-id: + type: string + description: "The WANx hostname." + + com.att.d2.resource.vFW: + derived_from: com.att.d2.resource.ucpe.VNF + + + +############################ +# Customized capability types definitions +############################ +capability_types: + + com.att.d2.capabilities.hostedOn: + derived_from: tosca.capabilities.Root + diff --git a/javatoscachecker/checker/src/test/tosca/workflow_1.yaml b/javatoscachecker/checker/src/test/tosca/workflow_1.yaml new file mode 100644 index 0000000..fda6a27 --- /dev/null +++ b/javatoscachecker/checker/src/test/tosca/workflow_1.yaml @@ -0,0 +1,46 @@ +tosca_definitions_version: tosca_simple_yaml_1_1_0 + +imports: + - example: tosca/tosca-examples-types.yaml + +topology_template: + + inputs: + software_version: + type: version + default: 0.1 + management_option: + type: string + default: ATT + constraints: + - valid_values: [ ATT, CUSTOMER ] + + node_templates: + my_server: + type: tosca.nodes.Compute + mysql: + type: tosca.nodes.DBMS.MySQL + requirements: + - host: my_server + interfaces: + tosca.interfaces.nodes.custom.Backup: +# operations: + backup: backup.sh + workflows: + backup: + description: Performs a snapshot of the MySQL data. + preconditions: + - target: my_server + condition: + - assert: + - state: [{equal: available}] + - target: mysql + condition: + - assert: + - state: [{valid_values: [started, available]}] + - my_attribute: [{equal: ready }] + steps: + my_step: + target: mysql + activities: + - call_operation: tosca.interfaces.nodes.custom.Backup.backup diff --git a/javatoscachecker/kwalify/LICENSE b/javatoscachecker/kwalify/LICENSE new file mode 100644 index 0000000..369d782 --- /dev/null +++ b/javatoscachecker/kwalify/LICENSE @@ -0,0 +1,20 @@ +copyright(c) 2005 kuwata-lab all rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/javatoscachecker/kwalify/pom.xml b/javatoscachecker/kwalify/pom.xml new file mode 100644 index 0000000..7acfa80 --- /dev/null +++ b/javatoscachecker/kwalify/pom.xml @@ -0,0 +1,104 @@ + + 4.0.0 + + + org.onap.tosca + checker + 0.0.1-SNAPSHOT + + kwalify + jar + kwalify + + + src/main/java + + + maven-compiler-plugin + 3.1 + + 1.8 + 1.8 + ${project.build.sourceEncoding} + + + + org.apache.maven.plugins + maven-dependency-plugin + 2.10 + + + copy-dependencies + package + + copy-dependencies + + + ${project.build.directory}/deps + false + false + true + + + + + + org.codehaus.mojo + buildnumber-maven-plugin + 1.4 + + + validate + + create + + + + + false + false + + + + org.apache.maven.plugins + maven-jar-plugin + 2.1 + + + + true + + + ${buildNumber} + + + + + + + + com.blackducksoftware.integration + hub-maven-plugin + 2.0.0 + false + + ${project.name} + ${project.basedir} + false + + + + create-bdio-file + package + + build-bom + + + + + + + + + + diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/BaseException.java b/javatoscachecker/kwalify/src/main/java/kwalify/BaseException.java new file mode 100644 index 0000000..c2cc83b --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/BaseException.java @@ -0,0 +1,51 @@ +/* + * @(#)BaseException.java $Rev: 3 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * base class of ValidationException and SchemaException. + * + * @revision $Rev: 3 $ + * @release $Release: 0.5.1 $ + */ +public abstract class BaseException extends KwalifyRuntimeException implements Comparable { + + String _ypath; + Object _value; + Rule _rule; + String _errorSymbol; + int _linenum = -1; + + public BaseException(String message, String ypath, Object value, Rule rule, String errorSymbol) { + super(message); + _ypath = ypath; + _value = value; + _rule = rule; + _errorSymbol = errorSymbol; + } + + public String getPath() { return _ypath.equals("") ? "/" : _ypath; } + //public void setPath(String ypath) { _ypath = ypath; } + + public Object getValue() { return _value; } + //public void setValue(Object value) { _value = value; } + + public Rule getRule() { return _rule; } + // + //public void setRule(Rule rule) { _rule = rule; } + + public String getErrorSymbol() { return _errorSymbol; } + //public void setErrorSymbol(String errorSymbol) { _errorSymbol = errorSymbol; } + + public int getLineNumber() { return _linenum; } + public void setLineNumber(int linenum) { _linenum = linenum; } + + public int compareTo(Object obj) { + int n = ((ValidationException)obj).getLineNumber(); + return _linenum - n; + } +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/CommandOptionException.java b/javatoscachecker/kwalify/src/main/java/kwalify/CommandOptionException.java new file mode 100644 index 0000000..e35be85 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/CommandOptionException.java @@ -0,0 +1,33 @@ +/* + * @(#)CommandOptionException.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * exception class thrown if command-line option is wrong + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class CommandOptionException extends KwalifyException { + private static final long serialVersionUID = 6433387612335104714L; + + private String _error_symbol = null; + private char _option; + + public CommandOptionException(String message, char option, String error_symbol) { + super(message); + _option = option; + _error_symbol = error_symbol; + } + + public String getErrorSymbol() { return _error_symbol; } + public void setErrorSymbol(String error_symbol) { _error_symbol = error_symbol; } + + public char getOption() { return _option; } + public void setOption(char option) { _option = option; } + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/Defaultable.java b/javatoscachecker/kwalify/src/main/java/kwalify/Defaultable.java new file mode 100644 index 0000000..7e7c692 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/Defaultable.java @@ -0,0 +1,18 @@ +/* + * @(#)Defaultable.java $Rev: 3 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * interface to have default value + * + * @revision $Rev: 3 $ + * @release $Release: 0.5.1 $ + */ +public interface Defaultable { + public Object getDefault(); + public void setDefault(Object value); +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/DefaultableHashMap.java b/javatoscachecker/kwalify/src/main/java/kwalify/DefaultableHashMap.java new file mode 100644 index 0000000..0009205 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/DefaultableHashMap.java @@ -0,0 +1,48 @@ +/* + * @(#)DefaultableHashMap.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.util.Map; +import java.util.HashMap; + +/** + * hash map which can have default value + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class DefaultableHashMap extends HashMap implements Defaultable { + + private static final long serialVersionUID = -5224819562023897380L; + + private Object _default = null; + + public DefaultableHashMap() { + super(); + } + + public DefaultableHashMap(int initialCapacity) { + super(initialCapacity); + } + + public DefaultableHashMap(int initialCapacity, float loadFactor) { + super(initialCapacity, loadFactor); + } + + public DefaultableHashMap(Map m) { + super(m); + } + + public Object getDefault() { return _default; } + + public void setDefault(Object value) { _default = value; } + + public Object get(Object key) { + return containsKey(key) ? super.get(key) : _default; + } + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/InvalidPathException.java b/javatoscachecker/kwalify/src/main/java/kwalify/InvalidPathException.java new file mode 100644 index 0000000..94eeca2 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/InvalidPathException.java @@ -0,0 +1,23 @@ +/* + * @(#)InvalidPathException.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * exception class thrown by YamlParser#setErrorsLineNumber() when path is wrong + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class InvalidPathException extends KwalifyRuntimeException { + private static final long serialVersionUID = -4601461998104850880L; + + //private int _linenum; + + public InvalidPathException(String message) { + super(message); + } +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/InvalidTypeException.java b/javatoscachecker/kwalify/src/main/java/kwalify/InvalidTypeException.java new file mode 100644 index 0000000..fe60ca0 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/InvalidTypeException.java @@ -0,0 +1,21 @@ +/* + * @(#)InvalidTypeException.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * exception class thrown by Util.compareValues() when comparing different type values. + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class InvalidTypeException extends KwalifyRuntimeException { + private static final long serialVersionUID = -6937887618526171845L; + + public InvalidTypeException(String message) { + super(message); + } +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/KwalifyException.java b/javatoscachecker/kwalify/src/main/java/kwalify/KwalifyException.java new file mode 100644 index 0000000..976a263 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/KwalifyException.java @@ -0,0 +1,20 @@ +/* + * @(#)KwalifyException.java $Rev: 3 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * base exception class of all exception in Kwalify + * + * @revision $Rev: 3 $ + * @release $Release: 0.5.1 $ + * @see KwalifyRuntimeException + */ +public abstract class KwalifyException extends Exception { + public KwalifyException(String message) { + super(message); + } +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/KwalifyRuntimeException.java b/javatoscachecker/kwalify/src/main/java/kwalify/KwalifyRuntimeException.java new file mode 100644 index 0000000..75e4764 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/KwalifyRuntimeException.java @@ -0,0 +1,19 @@ +/* + * @(#)KwalifyRuntimeException.java $Rev: 3 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * base class of all runtime exception class in Kwalify + * + * @revision $Rev: 3 $ + * @release $Release: 0.5.1 $ + */ +public abstract class KwalifyRuntimeException extends RuntimeException { + public KwalifyRuntimeException(String message) { + super(message); + } +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/Main.java b/javatoscachecker/kwalify/src/main/java/kwalify/Main.java new file mode 100644 index 0000000..d2c39e2 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/Main.java @@ -0,0 +1,305 @@ +/* + * @(#)Main.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.util.List; +import java.util.Map; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Collections; +import java.util.regex.Matcher; +import java.io.IOException; + +/** + * class for main program + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class Main { + + private String _command; + private boolean _flag_help = false; // print help + private boolean _flag_version = false; // print version + private boolean _flag_silent = false; // suppress messages + private boolean _flag_meta = false; // meta validation + private boolean _flag_untabify = false; // expand tab charactor to spaces + private boolean _flag_emacs = false; // show errors in emacs style + private boolean _flag_linenum = false; // show line number on where errors happened + private boolean _flag_debug = false; // internal use only + private String _schema_filename = null; // schema filename + private Map _properties = new HashMap(); + + + boolean isDebug() { return _flag_debug; } + + + public String inspect() { + StringBuffer sb = new StringBuffer(); + sb.append("command : ").append(_command ).append('\n'); + sb.append("flag_help : ").append(_flag_help ).append('\n'); + sb.append("flag_version : ").append(_flag_version ).append('\n'); + sb.append("flag_silent : ").append(_flag_silent ).append('\n'); + sb.append("flag_meta : ").append(_flag_meta ).append('\n'); + sb.append("flag_untabify : ").append(_flag_untabify ).append('\n'); + sb.append("flag_emacs : ").append(_flag_emacs ).append('\n'); + sb.append("flag_linenum : ").append(_flag_linenum ).append('\n'); + sb.append("flag_debug : ").append(_flag_debug ).append('\n'); + sb.append("schema_filename : ").append(_schema_filename).append('\n'); + sb.append("properties:\n"); + for (Iterator it = _properties.keySet().iterator(); it.hasNext(); ) { + Object key = it.next(); + Object val = _properties.get(key); + sb.append(" ").append(key).append(": ").append(val).append('\n'); + } + return sb.toString(); + } + + + private static final String REVISION = "$Release: 0.5.1 $"; + private static final String HELP = "" + + "Usage1: %s [-hvstlE] -f schema.yaml doc.yaml [doc2.yaml ...]\n" + + "Usage2: %s [-hvstlE] -m schema.yaml [schema2.yaml ...]\n" + + " -h, --help : help\n" + + " -v : version\n" + + " -s : silent\n" + + " -f schema.yaml : schema definition file\n" + + " -m : meta-validation mode\n" + + " -t : expand tab character automatically\n" + + " -l : show linenumber when errored (experimental)\n" + + " -E : show errors in emacs-style (implies '-l')\n" + ; + + + public Main(String command) { + _command = command; + } + + + public String execute(String[] args) throws IOException, CommandOptionException, SyntaxException { + // parse command-line options + String[] filenames = parseOptions(args); + + // help or version + StringBuffer sb = new StringBuffer(); + if (_flag_version) { + sb.append(version()).append('\n'); + } + if (_flag_help) { + sb.append(help()); + } + if (sb.length() > 0) { + return sb.toString(); + } + + // main + String s = null; + if (_flag_meta) { + s = metaValidate(filenames); + } else if (_schema_filename == null) { + throw optionError("command.option.noaction", '\0'); + } else if (_flag_debug) { + s = inspectSchemaFile(_schema_filename); + } else { + s = validate(filenames, _schema_filename); + } + + // + return s; + } + + + private String[] parseOptions(String[] args) throws CommandOptionException { + Object[] ret = null; + try { + ret = Util.parseCommandOptions(args, "hvsmtlED", "f", null); + } catch (CommandOptionException ex) { + String error_symbol = ex.getErrorSymbol(); + if (error_symbol.equals("command.option.noarg")) { + switch (ex.getOption()) { + case 'f': error_symbol = "command.option.noschema"; break; + default: + assert false; + } + } + throw optionError(error_symbol, ex.getOption()); + } + // + Map options = (Map)ret[0]; + Map properties = (Map)ret[1]; + String[] filenames = (String[])ret[2]; + // + _flag_help = options.get("h") != null; + _flag_version = options.get("v") != null; + _flag_silent = options.get("s") != null; + _flag_meta = options.get("m") != null; + _flag_untabify = options.get("t") != null; + _flag_emacs = options.get("E") != null; + _flag_linenum = options.get("l") != null || _flag_emacs; + _flag_debug = options.get("D") != null; + _schema_filename = (String)options.get("f"); + // + // + _properties = properties; + if (_properties.get("help") != null) { + _flag_help = true; + } + // + return filenames; + } + + + private String validate(String[] filenames, String schema_filename) throws IOException, SyntaxException { + String str = Util.readFile(schema_filename); + if (_flag_untabify) { + str = Util.untabify(str); + } + YamlParser parser = new YamlParser(str); + Object schema = parser.parse(); + Validator validator = new Validator(schema); + String s = validateFiles(validator, filenames); + return s; + } + + + private String validateFiles(Validator validator, String[] filenames) throws IOException, SyntaxException { + if (filenames.length == 0) { + filenames = new String[] { null }; + } + StringBuffer sb = new StringBuffer(); + for (int j = 0; j < filenames.length; j++) { + String filename = filenames[j]; + String str = null; + if (filename == null) { + str = Util.readInputStream(System.in); + filename = "(stdin)"; + } else { + str = Util.readFile(filename); + } + if (_flag_untabify) { + str = Util.untabify(str); + } + YamlParser parser = new YamlParser(str); + int i = 0; + while (parser.hasNext()) { + Object doc = parser.parse(); + validateDocument(sb, validator, doc, filename, i, parser); + i++; + } + } + return sb.toString(); + } + + + private void validateDocument(StringBuffer sb, Validator validator, Object doc, String filename, int i, YamlParser parser) { + if (doc == null) { + Object[] args = { filename, new Integer(i) }; + String msg = Messages.buildMessage("validation.empty", null, args); + sb.append(msg).append('\n'); + return; + } + List errors = validator.validate(doc); + Object[] args = { filename, new Integer(i) }; + if (errors == null || errors.size() == 0) { + if (! _flag_silent) { + String msg = Messages.buildMessage("validation.valid", args); + sb.append(msg).append('\n'); + } + } else { + String msg = Messages.buildMessage("validation.invalid", args); + sb.append(msg).append('\n'); + if (_flag_linenum) { + assert parser != null; + parser.setErrorsLineNumber(errors); + Collections.sort(errors); + } + for (Iterator it = errors.iterator(); it.hasNext(); ) { + ValidationException error = (ValidationException)it.next(); + if (_flag_emacs) { + assert _flag_linenum; + sb.append(filename).append(":").append(error.getLineNumber()).append(":"); + } else if (_flag_linenum) { + sb.append(" - (line ").append(error.getLineNumber()).append(")"); + } else { + sb.append(" -"); + } + sb.append(" [").append(error.getPath()).append("] ").append(error.getMessage()).append('\n'); + } + } + } + + + private String metaValidate(String[] filenames) throws IOException, SyntaxException { + Validator meta_validator = MetaValidator.instance(); + String s = validateFiles(meta_validator, filenames); + return s; + } + + + private String inspectSchemaFile(String schema_filename) throws IOException, SyntaxException { + String filename = schema_filename; + String content = filename != null ? Util.readFile(filename) : Util.readInputStream(System.in); + YamlParser parser = new YamlParser(content); + Object schema = parser.parse(); + if (schema == null) { + return null; + } + Validator validator = new Validator(schema); // SchemaException is thrown when schema is wrong + String s = validator.getRule().inspect(); + if (s.charAt(s.length() - 1) != '\n') { + s = s + '\n'; + } + return s; + } + + + private static CommandOptionException optionError(String error_symbol, char option) { + Object[] args = { Character.toString(option) }; + String message = Messages.buildMessage(error_symbol, null, args); + return new CommandOptionException(message, option, error_symbol); + } + + + private String version() { + Matcher m = Util.matcher(REVISION, "[.\\d]+"); + m.find(); + String version = m.group(0); + return version; + } + + + private String help() { + String help_msg = Messages.buildMessage("command.help", null, new Object[] { _command, _command }); + //String help = HELP.replaceAll("%s", _command); + return help_msg; + } + + + public static void main(String[] args) throws Exception { + int status = 0; + Main main = null; + try { + main = new Main("kwalify-java"); + String result = main.execute(args); + if (result != null) { + System.out.println(result); + } + } catch (Exception ex) { + if (main != null && main.isDebug()) { + throw ex; + } + if ( ex instanceof CommandOptionException + || ex instanceof SyntaxException + || ex instanceof IOException) { + System.err.println("ERROR: " + ex.getMessage()); + status = 1; + } + } + System.exit(status); + } + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/Messages.java b/javatoscachecker/kwalify/src/main/java/kwalify/Messages.java new file mode 100644 index 0000000..b77f04b --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/Messages.java @@ -0,0 +1,51 @@ +/* + * @(#)Messages.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.util.ResourceBundle; +//import java.util.Locale; + +/** + * set of utility methods around messages. + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class Messages { + + private static final String __basename = "kwalify.messages"; + private static ResourceBundle __messages = ResourceBundle.getBundle(__basename); + //private static ResourceBundle __messages = ResourceBundle.getBundle(__basename, Locale.getDefault()); + + public static String message(String key) { + return __messages.getString(key); + } + + public static String buildMessage(String key, Object[] args) { + return buildMessage(key, null, args); + } + + public static String buildMessage(String key, Object value, Object[] args) { + String msg = message(key); + assert msg != null; + if (args != null) { + for (int i = 0; i < args.length; i++) { // don't use MessageFormat + msg = msg.replaceFirst("%[sd]", escape(args[i])); + } + } + if (value != null && !Types.isCollection(value)) { + msg = "'" + escape(value) + "': " + msg; + } + return msg; + } + + private static String escape(Object obj) { + //return obj.toString().replaceAll("\\", "\\\\").replace("\n", "\\n"); // J2SK1.4 doesn't support String#replace(CharSequence, CharSequence)! + return obj.toString().replaceAll("\\\\", "\\\\\\\\").replaceAll("\\n", "\\\\n"); + } + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/MetaValidator.java b/javatoscachecker/kwalify/src/main/java/kwalify/MetaValidator.java new file mode 100644 index 0000000..9ce05bd --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/MetaValidator.java @@ -0,0 +1,475 @@ +/* + * @(#)MetaValidator.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.util.Map; +import java.util.List; +import java.util.Iterator; +import java.util.regex.Pattern; +import java.util.regex.Matcher; +import java.util.regex.PatternSyntaxException; + +/** + * meta validator to validate schema definition + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class MetaValidator extends Validator { + + public static final String META_SCHEMA = "" + + "name: MAIN\n" + + "type: map\n" + + "required: yes\n" + + "mapping: &main-rule\n" + + " \"name\":\n" + + " type: str\n" + + " \"desc\":\n" + + " type: str\n" + + " \"type\":\n" + + " type: str\n" + + " #required: yes\n" + + " enum:\n" + + " - seq\n" + + " #- sequence\n" + + " #- list\n" + + " - map\n" + + " #- mapping\n" + + " #- hash\n" + + " - str\n" + + " #- string\n" + + " - int\n" + + " #- integer\n" + + " - float\n" + + " - number\n" + + " #- numeric\n" + + " - bool\n" + + " #- boolean\n" + + " - text\n" + + " - date\n" + + " - time\n" + + " - timestamp\n" + + " #- object\n" + + " - any\n" + + " - scalar\n" + + " #- collection\n" + + " \"required\":\n" + + " type: bool\n" + + " \"enum\":\n" + + " type: seq\n" + + " sequence:\n" + + " - type: scalar\n" + + " unique: yes\n" + + " \"pattern\":\n" + + " type: str\n" + + " \"assert\":\n" + + " type: str\n" + + " pattern: /\\bval\\b/\n" + + " \"range\":\n" + + " type: map\n" + + " mapping:\n" + + " \"max\":\n" + + " type: scalar\n" + + " \"min\":\n" + + " type: scalar\n" + + " \"max-ex\":\n" + + " type: scalar\n" + + " \"min-ex\":\n" + + " type: scalar\n" + + " \"length\":\n" + + " type: map\n" + + " mapping:\n" + + " \"max\":\n" + + " type: int\n" + + " \"min\":\n" + + " type: int\n" + + " \"max-ex\":\n" + + " type: int\n" + + " \"min-ex\":\n" + + " type: int\n" + + " \"ident\":\n" + + " type: bool\n" + + " \"unique\":\n" + + " type: bool\n" + + " \"sequence\":\n" + + " name: SEQUENCE\n" + + " type: seq\n" + + " sequence:\n" + + " - type: map\n" + + " mapping: *main-rule\n" + + " name: MAIN\n" + + " #required: yes\n" + + " \"mapping\":\n" + + " name: MAPPING\n" + + " type: map\n" + + " mapping:\n" + + " =:\n" + + " type: map\n" + + " mapping: *main-rule\n" + + " name: MAIN\n" + + " #required: yes\n" + ; + + + /** + * + * ex. + *
      +     *  MetaValidator meta_validator = MetaValidator();
      +     *  Map schema = YamlUtil.loadFile("schema.yaml");
      +     *  List errors = meta_validator.validate(schema);
      +     *  if (errors != null && errors.size() > 0) {
      +     *    for (Iterator it = errors.iterator(); it.hasNext(); ) {
      +     *      ValidationException error = (ValidationException)it.next();
      +     *      System.err.println(" - [" + error.getPath() + "] " + error.getMessage());
      +     *    }
      +     *  }
      +     * 
      + */ + + private static Validator __instance; + + public static Validator instance() { + // should not use double checked pattern? + // but it would work well because __instance is read-only. + if (__instance == null) { + synchronized (MetaValidator.class) { + if (__instance == null) { + try { + Map schema = (Map)YamlUtil.load(META_SCHEMA); + __instance = new MetaValidator(schema); + } catch (SyntaxException ex) { + assert false; + } + } + } + } + return __instance; + } + + private MetaValidator(Map schema) { + super(schema); + } + + public void postValidationHook(Object value, Rule rule, ValidationContext theContext) { + if (value == null) { + return; // realy? + } + if (! "MAIN".equals(rule.getName())) { + return; + } + // + assert value instanceof Map; + Map map = (Map)value; + String type = (String)map.get("type"); + if (type == null) { + type = Types.getDefaultType(); + } + //Class type_class = Types.typeClass(type); + //if (type_class == null) { + // theContext.addError(validationError("type.unknown", rule, path + "/type", type, null)); + //} + // + //String pattern; + //if ((pattern = (String)map.get("pattern")) != null) { + if (map.containsKey("pattern")) { + String pattern = (String)map.get("pattern"); + Matcher m = Util.matcher(pattern, "\\A\\/(.*)\\/([mi]?[mi]?)\\z"); + String pat = m.find() ? m.group(1) : pattern; + try { + Pattern.compile(pat); + } catch (PatternSyntaxException ex) { + theContext.addError("pattern.syntaxerr", rule, "pattern", pattern, null); + } + } + // + //List enum_list; + //if ((enum_list = (List)map.get("enum")) != null) { + if (map.containsKey("enum")) { + List enum_list = (List)map.get("enum"); + if (Types.isCollectionType(type)) { + theContext.addError("enum.notscalar", rule, "enum:", (Object[])null); + } else { + for (Iterator it = enum_list.iterator(); it.hasNext(); ) { + Object elem = it.next(); + if (! Types.isCorrectType(elem, type)) { + theContext.addError("enum.type.unmatch", rule, "enum", elem, new Object[] { Types.typeName(type) }); + } + } + } + } + // + //String assert_str; + //if ((assert_str = (String)map.get("assert")) != null) { + if (map.containsKey("assert")) { + System.err.println("*** warning: sorry, 'assert:' is not supported in current version of Kwalify-java."); + //String assert_str = (String)map.get("assert"); + //if (! Util.matches(assert_str, "\\bval\\b")) { + // theContext.addError(validationError("assert.noval", rule, path + "/assert", assert_str, null); + //} + //try { + // Expression.parse(assert_str); + //} catch (InvalidExpressionException ex) { + // theContext.addError(validationError("assert.syntaxerr", rule, path + "/assert", assert_str, null)); + //} + } + // + //Map range; + //if ((range = (Map)map.get("range")) != null) { + if (map.containsKey("range")) { + Map range = (Map)map.get("range"); + //if (! (range instanceof Map)) { + // theContext.addError(validtionError("range.notmap", rule, path + "/range", range, null)); + //} else + if (Types.isCollectionType(type) || type.equals("bool") || type.equals("any")) { + theContext.addError("range.notscalar", rule, "range:", null, null); + } else { + for (Iterator it = range.keySet().iterator(); it.hasNext(); ) { + String k = (String)it.next(); + Object v = range.get(k); + if (! Types.isCorrectType(v, type)) { + theContext.addError("range.type.unmatch", rule, "range/" + k, v, new Object[] { Types.typeName(type) }); + } + } + } + if (range.containsKey("max") && range.containsKey("max-ex")) { + theContext.addError("range.twomax", rule, "range", null, null); + } + if (range.containsKey("min") && range.containsKey("min-ex")) { + theContext.addError("range.twomin", rule, "range", null, null); + } + Object max = range.get("max"); + Object min = range.get("min"); + Object max_ex = range.get("max-ex"); + Object min_ex = range.get("min-ex"); + Object[] args = null; + //String error_symbol = null; + if (max != null) { + if (min != null && Util.compareValues(max, min) < 0) { + args = new Object[] { max, min }; + theContext.addError("range.maxltmin", rule, "range", null, args); + } else if (min_ex != null && Util.compareValues(max, min_ex) <= 0) { + args = new Object[] { max, min_ex }; + theContext.addError("range.maxleminex", rule, "range", null, args); + } + } else if (max_ex != null) { + if (min != null && Util.compareValues(max_ex, min) <= 0) { + args = new Object[] { max_ex, min }; + theContext.addError("range.maxexlemin", rule, "range", null, args); + } else if (min_ex != null && Util.compareValues(max_ex, min_ex) <= 0) { + args = new Object[] { max_ex, min_ex }; + theContext.addError("range.maxexleminex", rule, "range", null, args); + } + } + } + // + //Map length; + //if ((length = (Map)map.get("length")) != null) { + if (map.containsKey("length")) { + Map length = (Map)map.get("length"); + //if (! (length instanceof Map)) { + // theContext.addError(validtionError("length.notmap", rule, path + "/length", length, null)); + //} else + if (! (type.equals("str") || type.equals("text"))) { + theContext.addError("length.nottext", rule, "length:", (Object[])null); + } + //for (Iterator it = length.keySet().iterator(); it.hasNext(); ) { + // String k = (String)it.next(); + // Object v = length.get(k); + // if (k == null || ! (k.equals("max") || k.equals("min") || k.equals("max-ex") || k.equals("min-ex"))) { + // theContext.addError(validationError("length.undefined", rule, path + "/length/" + k, "" + k + ":", null)); + // } else if (! (v instanceof Integer)) { + // theContext.addError(validationError("length.notint", rule, path + "/length/" + k, v, null)); + // } + //} + if (length.containsKey("max") && length.containsKey("max-ex")) { + theContext.addError("length.twomax", rule, "length", (Object[])null); + } + if (length.containsKey("min") && length.containsKey("min-ex")) { + theContext.addError("length.twomin", rule, "length", (Object[])null); + } + Integer max = (Integer)length.get("max"); + Integer min = (Integer)length.get("min"); + Integer max_ex = (Integer)length.get("max-ex"); + Integer min_ex = (Integer)length.get("min-ex"); + Object[] args = null; + //String error_symbol = null; + if (max != null) { + if (min != null && max.compareTo(min) < 0) { + args = new Object[] { max, min }; + theContext.addError("length.maxltmin", rule, "length", null, args); + } else if (min_ex != null && max.compareTo(min_ex) <= 0) { + args = new Object[] { max, min_ex }; + theContext.addError("length.maxleminex", rule, "length", null, args); + } + } else if (max_ex != null) { + if (min != null && max_ex.compareTo(min) <= 0) { + args = new Object[] { max_ex, min }; + theContext.addError("length.maxexlemin", rule, "length", null, args); + } else if (min_ex != null && max_ex.compareTo(min_ex) <= 0) { + args = new Object[] { max_ex, min_ex }; + theContext.addError("length.maxexleminex", rule, "length", null, args); + } + } + } + // + //Boolean unique; + //if ((unique = (Boolean)map.get("unique")) != null) { + if (map.containsKey("unique")) { + Boolean unique = (Boolean)map.get("unique"); + if (unique.booleanValue() == true && Types.isCollectionType(type)) { + theContext.addError("unique.notscalar", rule, "unique:", (Object[])null); + } + if (theContext.getPath().length() == 0) { + theContext.addError("unique.onroot", rule, "", "unique:", null); + } + } + // + //Boolean ident; + //if ((ident = (Boolean)map.get("ident")) != null) { + if (map.containsKey("ident")) { + Boolean ident = (Boolean)map.get("ident"); + if (ident.booleanValue() == true && Types.isCollectionType(type)) { + theContext.addError("ident.notscalar", rule, "ident:", (Object[])null); + } + if (theContext.getPath().length() == 0) { + theContext.addError("ident.onroot", rule, "/", "ident:", (Object[])null); + } + } + // + //List seq; + //if ((seq = (List)map.get("sequence")) != null) { + if (map.containsKey("sequence")) { + List seq = (List)map.get("sequence"); + //if (! (seq instanceof List)) { + // theContext.addError(validationError("sequence.notseq", rule, path + "/sequence", seq, null)); + //} else + if (seq == null || seq.size() == 0) { + theContext.addError("sequence.noelem", rule, "sequence", seq, null); + } else if (seq.size() > 1) { + theContext.addError("sequence.toomany", rule, "sequence", seq, null); + } else { + Object item = seq.get(0); + assert item instanceof Map; + Map m = (Map)item; + Boolean ident2 = (Boolean)m.get("ident"); + if (ident2 != null && ident2.booleanValue() == true && ! "map".equals(m.get("type"))) { + theContext.addError("ident.notmap", null, "sequence/0", "ident:", null); + } + } + } + // + //Map mapping; + //if ((mapping = (Map)map.get("mapping")) != null) { + if (map.containsKey("mapping")) { + Map mapping = (Map)map.get("mapping"); + //if (mapping != null && ! (mapping instanceof Map)) { + // theContext.addError(validationError("mapping.notmap", rule, path + "/mapping", mapping, null)); + //} else + Object default_value = null; + if (mapping != null && mapping instanceof Defaultable) { + default_value = ((Defaultable)mapping).getDefault(); + } + if (mapping == null || (mapping.size() == 0 && default_value == null)) { + theContext.addError("mapping.noelem", rule, "mapping", mapping, null); + } + } + // + if (type.equals("seq")) { + if (! map.containsKey("sequence")) { + theContext.addError("seq.nosequence", rule, null, (Object[])null); + } + //if (map.containsKey("enum")) { + // theContext.addError(validationError("seq.conflict", rule, path, "enum:", null)); + //} + if (map.containsKey("pattern")) { + theContext.addError("seq.conflict", rule, "pattern:", (Object[])null); + } + if (map.containsKey("mapping")) { + theContext.addError("seq.conflict", rule, "mapping:", (Object[])null); + } + //if (map.containsKey("range")) { + // theContext.addError(validationError("seq.conflict", rule, path, "range:", null)); + //} + //if (map.containsKey("length")) { + // theContext.addError(validationError("seq.conflict", rule, path, "length:", null)); + //} + } else if (type.equals("map")) { + if (! map.containsKey("mapping")) { + theContext.addError("map.nomapping", rule, null, (Object[])null); + } + //if (map.containsKey("enum")) { + // theContext.addError(validationError("map.conflict", rule, path, "enum:", null)); + //} + if (map.containsKey("pattern")) { + theContext.addError("map.conflict", rule, "pattern:", (Object[])null); + } + if (map.containsKey("sequence")) { + theContext.addError("map.conflict", rule, "sequence:", (Object[])null); + } + //if (map.containsKey("range")) { + // theContext.addError(validationError("map.conflict", rule, path, "range:", null)); + //} + //if (map.containsKey("length")) { + // theContext.addError(validationError("map.conflict", rule, path, "length:", null)); + //} + } else { + if (map.containsKey("sequence")) { + theContext.addError("scalar.conflict", rule, "sequence:", (Object[])null); + } + if (map.containsKey("mapping")) { + theContext.addError("scalar.conflict", rule, "mapping:", (Object[])null); + } + if (map.containsKey("enum")) { + if (map.containsKey("range")) { + theContext.addError("enum.conflict", rule, "range:", (Object[])null); + } + if (map.containsKey("length")) { + theContext.addError("enum.conflict", rule, "length:", (Object[])null); + } + if (map.containsKey("pattern")) { + theContext.addError("enum.conflict", rule, "pattern:", (Object[])null); + } + } + } + } + +/* + public static void main(String[] args) { + try { + // parse schema + String filename = args.length > 0 ? args[0] : "schema.yaml"; + String schema_str = Util.readFile(filename); + YamlParser parser = new YamlParser(schema_str); + Object schema = parser.parse(); + + // validate schema + Validator meta_validator = MetaValidator.instance(); + List errors = meta_validator.validate(schema); + + // show errors + if (errors != null && errors.size() > 0) { + parser.setErrorsLineNumber(errors); + for (Iterator it = errors.iterator(); it.hasNext(); ) { + ValidationException error = (ValidationException)it.next(); + int linenum = error.getLineNumber(); + String path = error.getPath(); + String msg = error.getMessage(); + System.out.println("- line " + linenum + ": [" + path + "] " + msg); + } + } else { + System.out.println("meta validation: OK."); + } + } catch (SyntaxException ex) { + ex.printStackTrace(); + } catch (java.io.IOException ex) { + ex.printStackTrace(); + } + } +*/ + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/Parser.java b/javatoscachecker/kwalify/src/main/java/kwalify/Parser.java new file mode 100644 index 0000000..53c6272 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/Parser.java @@ -0,0 +1,19 @@ +/* + * @(#)Parser.java $Rev: 3 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * interface for any parser + * + * @revision $Rev: 3 $ + * @release $Release: 0.5.1 $ + */ +public interface Parser { + + public Object parse() throws SyntaxException; + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/PlainYamlParser.java b/javatoscachecker/kwalify/src/main/java/kwalify/PlainYamlParser.java new file mode 100644 index 0000000..6224044 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/PlainYamlParser.java @@ -0,0 +1,870 @@ +/* + * @(#)PlainYamlParser.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.util.List; +import java.util.ArrayList; +import java.util.Map; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.regex.Matcher; +import java.util.Calendar; +import java.util.Date; +import java.util.TimeZone; + +/** + * plain yaml parser class which is a parent of YamlParser class. + * + * ex. + *
      + *  String str = kwalify.Util.readFile("document.yaml");
      + *  kwalify.Parser parser = new kwalify.PlainYamlParser(str);
      + *  Object doc = parser.parse();
      + * 
      + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class PlainYamlParser implements Parser { + + public static class Alias { + private String _label; + private int _linenum; + + public Alias(String label, int linenum) { + _label = label; + _linenum = linenum; + } + + public String getLabel() { return _label; } + public void setLabel(String label) { _label = label; } + + public int getLineNumber() { return _linenum; } + public void setLineNumber(int linenum) { _linenum = linenum; } + } + + + private String[] _lines; + private String _line = null; + private int _linenum = 0; + private Map _anchors = new HashMap(); + private Map _aliases = new HashMap(); // key: label, value: Integer + private Object _end_flag = null; + private String _sbuf = null; + private int _index = 0; + + public PlainYamlParser(String yaml_str) { + // split yaml_str into _lines + List list = Util.toListOfLines(yaml_str); + int len = list.size(); + _lines = new String[len + 1]; + for (int i = 0; i < len; i++) { + _lines[i + 1] = (String)list.get(i); + } + } + + public Object parse() throws SyntaxException { + Object data = parseChild(0); + if (data == null && _end_flag == ENDFLAG_DOC_BEGIN) { + data = parseChild(0); + } + if (_aliases.size() > 0) { + resolveAliases(data); + } + //System.err.println("*** debug: data = " + Util.inspect(data)); + //System.err.println("*** debug: data = " + data.toString()); + return data; + } + + public boolean hasNext() { + return _end_flag != ENDFLAG_EOF; + } + + public Object[] parseAll() throws SyntaxException { + List docs = new ArrayList(); + while (hasNext()) { + Object doc = parse(); + docs.add(doc); + } + return docs.toArray(); + } + + + protected List createSequence(int linenum) { + return new ArrayList(); + } + + //private List createSequence() { + // return createSequence(_linenum); + //} + + protected void addSequenceValue(List seq, Object value, int linenum) { + seq.add(value); + } + + protected void setSequenceValueAt(List seq, int index, Object value, int linenum) { + seq.set(index, value); + } + + protected Map createMapping(int linenum) { + return new DefaultableHashMap(); + } + + //private Map createMapping() { + // return createMapping(_linenum); + //} + + protected void setMappingValueWith(Map map, Object key, Object value, int linenum) { + map.put(key, value); + } + + protected void setMappingDefault(Map map, Object value, int linenum) { + if (map instanceof Defaultable) { + ((Defaultable)map).setDefault(value); + } + } + + protected void mergeMapping(Map map, Map map2, int linenum) { + for (Iterator it = map2.keySet().iterator(); it.hasNext(); ) { + Object key = it.next(); + if (! map.containsKey(key)) { + Object value = map2.get(key); + map.put(key, value); + } + } + } + + protected void mergeList(Map map, List maplist, int linenum) throws SyntaxException { + for (Iterator it = maplist.iterator(); it.hasNext(); ) { + Object elem = it.next(); + mergeCollection(map, elem, linenum); + } + } + + protected void mergeCollection(Map map, Object collection, int linenum) throws SyntaxException { + if (collection instanceof Map) { + mergeMapping(map, (Map)collection, linenum); + } else if (collection instanceof List) { + mergeList(map, (List)collection, linenum); + } else { + throw syntaxError("'<<' requires collection (mapping, or sequence of mapping)."); + } + } + + protected Object createScalar(Object value, int linenum) { + return value; + } + + private Object createScalar(Object value) { + return createScalar(value, _linenum); + } + + protected String currentLine() { + return _line; + } + + protected int currentLineNumber() { + return _linenum; + } + + protected String getLine() { + String line; + do { + line = _getLine_(); + } while (line != null && Util.matches(line, "^\\s*($|#)")); + return line; + } + + protected String _getLine_() { + if (++_linenum < _lines.length) { + _line = _lines[_linenum]; + if (Util.matches(_line, "^\\.\\.\\.$")) { + _line = null; + _end_flag = ENDFLAG_DOC_END; + } else if (Util.matches(_line, "^---( [!%].*)?$")) { + _line = null; + _end_flag = ENDFLAG_DOC_BEGIN; + } + } else { + _line = null; + _end_flag = ENDFLAG_EOF; + } + return _line; + } + + protected static final String ENDFLAG_EOF = ""; + protected static final String ENDFLAG_DOC_BEGIN = "---"; + protected static final String ENDFLAG_DOC_END = "..."; + + private void resetBuffer(String str) { + _sbuf = str.charAt(str.length() - 1) == '\n' ? str : str + "\n"; + _index = -1; + } + + + private int _getChar_() { + if (_index + 1 < _sbuf.length()) { + _index++; + } else { + String line = getLine(); + if (line == null) return -1; + resetBuffer(line); + _index++; + } + int ch = _sbuf.charAt(_index); + return ch; + } + + private int getChar() { + int ch; + do { + ch = _getChar_(); + } while (ch >= 0 && isWhite(ch)); + return ch; + } + + private int getCharOrNewline() { + int ch; + do { + ch = _getChar_(); + } while (ch >= 0 && isWhite(ch) && ch != '\n'); + return ch; + } + + private int currentChar() { + return _sbuf.charAt(_index); + } + + private SyntaxException syntaxError(String message, int linenum) { + return new YamlSyntaxException(message, linenum); + } + + private SyntaxException syntaxError(String message) { + return new SyntaxException(message, _linenum); + } + + private Object parseChild(int column) throws SyntaxException { + String line = getLine(); + if (line == null) { + return createScalar(null); + } + Matcher m = Util.matcher(line, "^( *)(.*)"); + if (! m.find()) { + assert false; + return null; + } + int indent = m.group(1).length(); + if (indent < column) { + return createScalar(null); + } + String value = m.group(2); + return parseValue(column, value, indent); + } + + private Object parseValue(int column, String value, int value_start_column) throws SyntaxException { + Object data; + if (Util.matches(value, "^-( |$)")) { + data = parseSequence(value_start_column, value); + } else if (Util.matches(value, "^((?::?[-.\\w]+|'.*?'|\".*?\"|=|<<) *):(( +)(.*))?$")) { + data = parseMapping(value_start_column, value); + } else if (Util.matches(value, "^[\\[\\{]")) { + data = parseFlowStyle(column, value); + } else if (Util.matches(value, "^\\&[-\\w]+( |$)")) { + data = parseAnchor(column, value); + } else if (Util.matches(value, "^\\*[-\\w]+( |$)")) { + data = parseAlias(column, value); + } else if (Util.matches(value, "^[|>]")) { + data = parseBlockText(column, value); + } else if (Util.matches(value, "^!")) { + data = parseTag(column, value); + } else if (Util.matches(value, "^\\#")) { + data = parseChild(column); + } else { + data = parseScalar(column, value); + } + return data; + } + + private static boolean isWhite(int ch) { + return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r'; + } + + + private Object parseFlowStyle(int column, String value) throws SyntaxException { + resetBuffer(value); + getChar(); + Object data = parseFlow(0); + int ch = currentChar(); + assert ch == ']' || ch == '}'; + ch = getCharOrNewline(); + if (ch != '\n' && ch != '#' && ch >= 0) { + throw syntaxError("flow style sequence is closed buf got '" + ((char)ch) + "'."); + } + if (ch >= 0) getLine(); + return data; + } + + private Object parseFlow(int depth) throws SyntaxException { + int ch = currentChar(); + //ch = getChar(); + if (ch < 0) { + throw syntaxError("found EOF when parsing flow style."); + } + Object data; + if (ch == '[') { + data = parseFlowSequence(depth); + } else if (ch == '{') { + data = parseFlowMapping(depth); + } else { + data = parseFlowScalar(depth); + } + return data; + } + + private List parseFlowSequence(int depth) throws SyntaxException { + assert currentChar() == '['; + List seq = createSequence(_linenum); + int ch = getChar(); + if (ch != '}') { + int linenum = currentLineNumber(); + //seq.add(parseFlowSequenceItem(depth+1); + addSequenceValue(seq, parseFlowSequenceItem(depth + 1), linenum); + while ((ch = currentChar()) == ',') { + ch = getChar(); + if (ch == '}') { + throw syntaxError("sequence item required (or last comma is extra)."); + } + //if (ch == '?') break; + linenum = currentLineNumber(); + //seq.add(parseFlowSequenceItem(depth+1); + addSequenceValue(seq, parseFlowSequenceItem(depth + 1), linenum); + } + } + if (currentChar() != ']') { + throw syntaxError("flow style sequence requires ']'."); + } + if (depth > 0) getChar(); + return seq; + } + + private Object parseFlowSequenceItem(int depth) throws SyntaxException { + return parseFlow(depth); + } + + private Map parseFlowMapping(int depth) throws SyntaxException { + assert currentChar() == '{'; + Map map = createMapping(_linenum); + int ch = getChar(); + if (ch != '}') { + int linenum = currentLineNumber(); + Object[] pair = parseFlowMappingItem(depth + 1); + Object key = pair[0]; + Object value = pair[1]; + //map[ke] = value + setMappingValueWith(map, key, value, linenum); + while ((ch = currentChar()) == ',') { + ch = getChar(); + if (ch == '}') { + throw syntaxError("mapping item required (or last comman is extra."); + } + //if (ch == '}') break; + linenum = currentLineNumber(); + pair = parseFlowMappingItem(depth + 1); + key = pair[0]; + value = pair[1]; + //map.put(key) = value; + setMappingValueWith(map, key, value, linenum); + } + } + if (currentChar() != '}') { + throw syntaxError("flow style mapping requires '}'."); + } + if (depth > 0) getChar(); + return map; + } + + private Object[] parseFlowMappingItem(int depth) throws SyntaxException { + Object key = parseFlow(depth); + int ch = currentChar(); + if (ch != ':') { + String s = ch >= 0 ? "'" + ((char)ch) + "'" : "EOF"; + throw syntaxError("':' expected but got " + s); + } + getChar(); + Object value = parseFlow(depth); + return new Object[] { key, value }; + } + + private Object parseFlowScalar(int depth) throws SyntaxException { + int ch = currentChar(); + Object scalar = null; + StringBuffer sb = new StringBuffer(); + if (ch == '"' || ch == '\'') { + int endch = ch; + while ((ch = _getChar_()) >= 0 && ch != endch) { + sb.append((char)ch); + } + getChar(); + scalar = sb.toString(); + } else { + sb.append((char)ch); + while ((ch = _getChar_()) >= 0 && ch != ':' && ch != ',' && ch != ']' && ch != '}') { + sb.append((char)ch); + } + scalar = toScalar(sb.toString().trim()); + } + return createScalar(scalar); + } + + private Object parseTag(int column, String value) throws SyntaxException { + assert Util.matches(value, "^!\\S+"); + Matcher m = Util.matcher(value, "^!(\\S+)((\\s+)(.*))?$"); + if (! m.find()) { + assert false; + return null; + } + String tag = m.group(1); + String space = m.group(3); + String value2 = m.group(4); + Object data; + if (value2 != null && value2.length() > 0) { + int value_start_column = column + 1 + tag.length() + space.length(); + data = parseValue(column, value2, value_start_column); + } else { + data = parseChild(column); + } + return data; + } + + private Object parseAnchor(int column, String value) throws SyntaxException { + assert Util.matches(value, "^\\&([-\\w]+)(( *)(.*))?$"); + Matcher m = Util.matcher(value, "^\\&([-\\w]+)(( *)(.*))?$"); + if (! m.find()) { + assert false; + return null; + } + String label = m.group(1); + String space = m.group(3); + String value2 = m.group(4); + Object data; + if (value2 != null && value2.length() > 0) { + int value_start_column = column + 1 + label.length() + space.length(); + data = parseValue(column, value2, value_start_column); + } else { + data = parseChild(column); + } + registerAnchor(label, data); + return data; + } + + private void registerAnchor(String label, Object data) throws SyntaxException { + if (_anchors.containsKey(label)) { + throw syntaxError("anchor '" + label + "' is already used."); + } + _anchors.put(label, data); + } + + private Object parseAlias(int column, String value) throws SyntaxException { + assert value.matches("^\\*([-\\w]+)(( *)(.*))?$"); + Matcher m = Util.matcher(value, "^\\*([-\\w]+)(( *)(.*))?$"); + if (! m.find()) { + assert false; + return null; + } + String label = m.group(1); + //String space = m.group(3); + String value2 = m.group(4); + if (value2 != null && value2.length() > 0 && value2.charAt(0) != '#') { + throw syntaxError("alias cannot take any data."); + } + Object data = _anchors.get(label); + if (data == null) { + //throw syntaxError("anchor '" + label "' not found (cannot refer to backward or child anchor)."); + data = registerAlias(label); + } + getLine(); + return data; + } + + private Alias registerAlias(String label) throws SyntaxException { + Integer count = (Integer)_aliases.get(label); + if (count == null) { + _aliases.put(label, new Integer(1)); + } else { + _aliases.put(label, new Integer(count.intValue() + 1)); + } + return new Alias(label, _linenum); + } + + + private void resolveAliases(Object data) throws SyntaxException { // List or Map + Map resolved = new IdentityHashMap(); + resolveAliases(data, resolved); + } + + + private void resolveAliases(Object data, Map resolved) throws SyntaxException { + if (resolved.containsKey(data)) { + return; + } + resolved.put(data, data); + if (data instanceof List) { + resolveAliases((List)data, resolved); + } else if (data instanceof Map) { + resolveAliases((Map)data, resolved); + } else { + assert !(data instanceof Alias); + } + if (data instanceof Defaultable) { + Object default_value = ((Defaultable)data).getDefault(); + if (default_value != null) { + resolveAliases(default_value, resolved); + } + } + } + + private void resolveAliases(List seq, Map resolved) throws SyntaxException { + int len = seq.size(); + for (int i = 0; i < len; i++) { // don't use itrator not to raise java.util.ConcurrentModificationException + Object val = seq.get(i); + if (val instanceof Alias) { + Alias alias = (Alias)val; + String label = alias.getLabel(); + if (_anchors.containsKey(label)) { + //seq.set(i, _anchors.get(label); + setSequenceValueAt(seq, i, _anchors.get(label), alias.getLineNumber()); + } else { + throw syntaxError("anchor '" + alias.getLabel() + "' not found."); + } + } else if (val instanceof List || val instanceof Map) { + resolveAliases(val, resolved); + } + } + } + + private void resolveAliases(Map map, Map resolved) throws SyntaxException { + for (Iterator it = map.keySet().iterator(); it.hasNext(); ) { + Object key = it.next(); + Object val = map.get(key); + if (val instanceof Alias) { + Alias alias = (Alias)val; + String label = alias.getLabel(); + if (_anchors.containsKey(label)) { + //map.put(key, _anchors.get(label)); + setMappingValueWith(map, key, _anchors.get(label), alias.getLineNumber()); + } else { + throw syntaxError("anchor '" + alias.getLabel() + "' not found.", alias.getLineNumber()); + } + } else if (val instanceof List || val instanceof Map) { + resolveAliases(val, resolved); + } + } + } + + + +/* + private Object parseBlockText(int column, String value) throws SyntaxException { + assert Util.matches(value, "^[>|\\|]"); + Matcher m = Util.matcher(value, "^([>|\\|])([-+]?)\\s*(.*)$"); + if (! m.find()) { + assert false; + return null; + } + String blockchar = m.group(1); + String indicator = m.group(2); + String sep = blockchar.equals("|") ? "\n" : " "; + //String text = m.group(3).length() > 0 ? "" : m.group(3) + sep; + String text = m.group(3); + StringBuffer sb = new StringBuffer(); + StringBuffer empty = new StringBuffer(); + int min_indent = -1; + String line; + Pattern pat2 = Pattern.compile("^( *)(.*)"); + while ((line = _getLine_()) != null) { + (m = pat2.matcher(line)).find(); + int indent = m.group(1).length(); + if (m.group(2).length() == 0) { + empty.append("\n"); + } else if (indent < column) { + break; + } else { + if (min_indent < 0 || min_indent > indent) { + min_indent = indent; + } + sb.append(empty.toString()); + sb.append(line); + empty.delete(0, empty.length()); + } + } + if (indicator.equals("+")) { + sb.append(empty); + } else if (indicator.equals("-")) { + sb.deleteCharAt(sb.length() - 1); + } + String s; + if (min_indent <= 0) { + s = sb.toString(); + } else { + StringBuffer regex = new StringBuffer("(?m)^"); + for (int i = 0; i < min_indent; i++) regex.append(" "); + s = sb.toString().replaceAll(regex.toString(), ""); + } + if (blockchar.equals(">")) { + StringBuffer sb2 = new StringBuffer(); + int len = s.length(); + int n = 0; + for (int i = 0; i < len; i++) { + char ch = s.charAt(i); + if (ch == '\n') { + n++; + } else { + if (n == 1) { + sb2.append(' '); n = 0; + } else if (n > 1) { + sb2.append('\n'); n = 0; + } + sb2.append(ch); + } + } + s = sb2.toString(); + } + if (currentLine() != null && Util.matches(currentLine(), "^\\s*#")) getLine(); + return createScalar(text + s); + } +*/ + + private Object parseBlockText(int column, String value) throws SyntaxException { + assert Util.matches(value, "^[>|]"); + Matcher m = Util.matcher(value, "^([>|])([-+]?)(\\d*)\\s*(.*)$"); + if (! m.find()) { + assert false; + return null; + } + char blockchar = m.group(1).length() > 0 ? m.group(1).charAt(0) : '\0'; + char indicator = m.group(2).length() > 0 ? m.group(2).charAt(0) : '\0'; + int indent = m.group(3).length() > 0 ? Integer.parseInt(m.group(3)) : -1; + String text = m.group(4); + char sep = blockchar == '|' ? '\n' : ' '; + String line; + StringBuffer sb = new StringBuffer(); + int n = 0; + while ((line = _getLine_()) != null) { + m = Util.matcher(line, "^( *)(.*)$"); + m.find(); + String space = m.group(1); + String str = m.group(2); + if (indent < 0) indent = space.length(); + if (str.length() == 0) { // empty line + n++; + } else { + int slen = space.length(); + if (slen < column) { + break; + } else if (slen < indent) { + throw syntaxError("invalid indent in block text."); + } else { + if (n > 0) { + if (blockchar == '>' && sb.length() > 0) { + sb.deleteCharAt(sb.length() - 1); + } + for (int i = 0; i < n; i++) { + sb.append('\n'); + } + n = 0; + } + str = line.substring(indent); + } + } + sb.append(str); + if (blockchar == '>') { + if (sb.charAt(sb.length() - 1) == '\n') { + sb.setCharAt(sb.length() - 1, ' '); + } + } + } + if (line != null && Util.matches(line, "^ *#")) { + getLine(); + } + switch (indicator) { + case '+': + if (n > 0) { + if (blockchar == '>') { + sb.setCharAt(sb.length() - 1, '\n'); + } + for (int i = 0; i < n; i++) { + sb.append('\n'); + } + } + break; + case '-': + if (sb.charAt(sb.length() - 1) == sep) { + sb.deleteCharAt(sb.length() - 1); + } + break; + default: + if (blockchar == '>') { + sb.setCharAt(sb.length() - 1, '\n'); + } + } + return createScalar(text + sb.toString()); + } + + + private List parseSequence(int column, String value) throws SyntaxException { + assert Util.matches(value, "^-(( +)(.*))?$"); + List seq = createSequence(_linenum); + while (true) { + Matcher m = Util.matcher(value, "^-(( +)(.*))?$"); + if (! m.find()) { + throw syntaxError("sequence item is expected."); + } + String space = m.group(2); + String value2 = m.group(3); + int column2 = column + 1; + int linenum = currentLineNumber(); + // + Object elem; + if (value2 == null || value2.length() == 0) { + elem = parseChild(column2); + } else { + int value_start_column = column2 + space.length(); + elem = parseValue(column2, value2, value_start_column); + } + addSequenceValue(seq, elem, linenum); + // + String line = currentLine(); + if (line == null) break; + Matcher m2 = Util.matcher(line, "^( *)(.*)"); + m2.find(); + int indent = m2.group(1).length(); + if (indent < column) { + break; + } else if (indent > column) { + throw syntaxError("invalid indent of sequence."); + } + value = m2.group(2); + } + return seq; + } + + + private Map parseMapping(int column, String value) throws SyntaxException { + assert Util.matches(value, "^((?::?[-.\\w]+|'.*?'|\".*?\"|=|<<) *):(( +)(.*))?$"); + Map map = createMapping(_linenum); + while (true) { + Matcher m = Util.matcher(value, "^((?::?[-.\\w]+|'.*?'|\".*?\"|=|<<) *):(( +)(.*))?$"); + if (! m.find()) { + throw syntaxError("mapping item is expected."); + } + String v = m.group(1).trim(); + Object key = toScalar(v); + String value2 = m.group(4); + int column2 = column + 1; + int linenum = currentLineNumber(); + // + Object elem; + if (value2 == null || value2.length() == 0) { + elem = parseChild(column2); + } else { + int value_start_column = column2 + m.group(1).length() + m.group(3).length(); + elem = parseValue(column2, value2, value_start_column); + } + if (v.equals("=")) { + setMappingDefault(map, elem, linenum); + } else if (v.equals("<<")) { + mergeCollection(map, elem, linenum); + } else { + setMappingValueWith(map, key, elem, linenum); + } + // + String line = currentLine(); + if (line == null) { + break; + } + Matcher m2 = Util.matcher(line, "^( *)(.*)"); + m2.find(); + int indent = m2.group(1).length(); + if (indent < column) { + break; + } else if (indent > column) { + throw syntaxError("invalid indent of mapping."); + } + value = m2.group(2); + } + return map; + } + + + private Object parseScalar(int indent, String value) throws SyntaxException { + Object data = createScalar(toScalar(value)); + getLine(); + return data; + } + + + private Object toScalar(String value) { + Matcher m; + if ((m = Util.matcher(value, "^\"(.*)\"([ \t]*#.*$)?")).find()) { + return m.group(1); + } else if ((m = Util.matcher(value, "^'(.*)'([ \t]*#.*$)?")).find()) { + return m.group(1); + } else if ((m = Util.matcher(value, "^(.*\\S)[ \t]*#")).find()) { + value = m.group(1); + } + // + if (Util.matches(value, "^-?0x\\d+$")) return new Integer(Integer.parseInt(value, 16)); + else if (Util.matches(value, "^-?0\\d+$")) return new Integer(Integer.parseInt(value, 8)); + else if (Util.matches(value, "^-?\\d+$")) return new Integer(Integer.parseInt(value, 10)); + else if (Util.matches(value, "^-?\\d+\\.\\d+$")) return new Double(Double.parseDouble(value)); + else if (Util.matches(value, "^(true|yes|on)$")) return Boolean.TRUE; + else if (Util.matches(value, "^(false|no|off)$")) return Boolean.FALSE; + else if (Util.matches(value, "^(null|~)$")) return null; + else if (Util.matches(value, "^:(\\w+)$")) return value; + else if ((m = Util.matcher(value, "^(\\d\\d\\d\\d)-(\\d\\d)-(\\d\\d)$")).find()) { + int year = Integer.parseInt(m.group(1)); + int month = Integer.parseInt(m.group(2)); + int day = Integer.parseInt(m.group(3)); + Calendar cal = Calendar.getInstance(); + cal.set(year, month, day, 0, 0, 0); + Date date = cal.getTime(); + return date; + } else if ((m = Util.matcher(value, "^(\\d\\d\\d\\d)-(\\d\\d)-(\\d\\d)(?:[Tt]|[ \t]+)(\\d\\d?):(\\d\\d):(\\d\\d)(\\.\\d*)?(?:Z|[ \t]*([-+]\\d\\d?)(?::(\\d\\d))?)?$")).find()) { + int year = Integer.parseInt(m.group(1)); + int month = Integer.parseInt(m.group(2)); + int day = Integer.parseInt(m.group(3)); + int hour = Integer.parseInt(m.group(4)); + int min = Integer.parseInt(m.group(5)); + int sec = Integer.parseInt(m.group(6)); + //int usec = Integer.parseInt(m.group(7)); + //int tzone_h = Integer.parseInt(m.group(8)); + //int tzone_m = Integer.parseInt(m.group(9)); + String timezone = "GMT" + m.group(8) + ":" + m.group(9); + Calendar cal = Calendar.getInstance(); + cal.set(year, month, day, hour, min, sec); + cal.setTimeZone(TimeZone.getTimeZone(timezone)); + Date date = cal.getTime(); + return date; + } else { + return value; + } + } + +/* + public static void main(String[] args) throws Exception { + String filename = args.length > 0 ? args[0] : "test.yaml"; + String s = Util.readFile(filename); + PlainYamlParser parser = new PlainYamlParser(s); + while (parser.hasNext()) { + Object doc = parser.parse(); + System.out.println(Util.inspect(doc)); + } + } +*/ + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/Rule.java b/javatoscachecker/kwalify/src/main/java/kwalify/Rule.java new file mode 100644 index 0000000..558525d --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/Rule.java @@ -0,0 +1,673 @@ +/* + * @(#)Rule.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.util.List; +import java.util.ArrayList; +import java.util.Map; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.regex.Pattern; +import java.util.regex.Matcher; +import java.util.regex.PatternSyntaxException; + +/** + * rule for validation. + * Validator class generates rule instances. + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class Rule { + + /* + * instance variables + */ + + private Rule _parent; + private String _name = null; + private String _desc = null; + private String _short = null; //added by jora: only used for map types + private String _ref = null; //added by jora: only used for ref types (references to other rules by name) + private boolean _required = false; + private String _type = null; + private Class _type_class = null; + private String _pattern = null; + private Pattern _pattern_regexp = null; + private List _enum = null; + private List _sequence = null; + private DefaultableHashMap _mapping = null; + private String _assert = null; + private Map _range = null; + private Map _length = null; + private boolean _ident = false; + private boolean _unique = false; + + + /* + * accessors + */ + + public String getName() { return _name; } + public void setName(String name) { _name = name; } + + public String getDesc() { return _desc; } + public void setDesc(String desc) { _desc = desc; } + + public String getShort() { return _short; } + public void setShort(String val) { _short = val; } + + public String getReference() { return _ref; } + public void setReference(String ref) { _ref = ref; } + + public boolean isRequired() { return _required; } + public void setRequired(boolean required) { _required = required; } + + public String getType() { return _type; } + public void setType(String type) { _type = type; } + + public Class getTypeClass() { return _type_class; } + public void setTypeClass(Class type_class) { _type_class = type_class; } + + public String getPattern() { return _pattern; } + public void setPattern(String pattern) { _pattern = pattern; } + + public Pattern getPatternRegexp() { return _pattern_regexp; } + public void setPatternRegexp(Pattern patternRegexp) { _pattern_regexp = patternRegexp; } + + public List getEnum() { return _enum; } + public void setEnum(List enumList) { _enum = enumList; } + + public List getSequence() { return _sequence; } + public void setSequence(List sequence) { _sequence = sequence; } + + public DefaultableHashMap getMapping() { return _mapping; } + public void setMapping(DefaultableHashMap mapping) { _mapping = mapping; } + + public String getAssert() { return _assert; } + public void setAssert(String assertString) { _assert = assertString; } + + public Map getRange() { return _range; } + public void setRange(Map range) { _range = range; } + + public Map getLength() { return _length; } + public void setLength(Map length) { _length = length; } + + public boolean isIdent() { return _ident; } + public void setIdent(boolean ident) { _ident = ident; } + + public boolean isUnique() { return _unique; } + public void setUnique(boolean unique) { _unique = unique; } + + + /* + * constructors + */ + + public Rule(Object schema, Rule parent) throws SchemaException { + if (schema != null) { + if (! (schema instanceof Map)) { + throw schemaError("schema.notmap", null, "/", null, null); + } + Map rule_table = new IdentityHashMap(); + init((Map)schema, "", rule_table); + } + _parent = parent; + } + + public Rule(Object schema) throws SchemaException { + this(schema, null); + } + + public Rule(Map schema, Rule parent) throws SchemaException { + if (schema != null) { + Map rule_table = new IdentityHashMap(); + init(schema, "", rule_table); + } + _parent = parent; + } + + public Rule(Map schema) throws SchemaException { + this(schema, null); + } + + public Rule() throws SchemaException { + this(null, null); + } + + /* + * constants + */ + + private static final int CODE_NAME = "name".hashCode(); + private static final int CODE_DESC = "desc".hashCode(); + private static final int CODE_SHORT = "short".hashCode(); //jora + private static final int CODE_RULE = "rule".hashCode(); //jora + private static final int CODE_REQUIRED = "required".hashCode(); + private static final int CODE_TYPE = "type".hashCode(); + private static final int CODE_PATTERN = "pattern".hashCode(); + private static final int CODE_ENUM = "enum".hashCode(); + private static final int CODE_SEQUENCE = "sequence".hashCode(); + private static final int CODE_MAPPING = "mapping".hashCode(); + private static final int CODE_ASSERT = "assert".hashCode(); + private static final int CODE_RANGE = "range".hashCode(); + private static final int CODE_LENGTH = "length".hashCode(); + private static final int CODE_IDENT = "ident".hashCode(); + private static final int CODE_UNIQUE = "unique".hashCode(); + + + + /* + * instance methods + */ + public Rule getParent() { // by jora + return this._parent; + } + + + private static SchemaException schemaError(String errorSymbol, Rule rule, String path, Object value, Object[] args) { + String msg = Messages.buildMessage(errorSymbol, value, args); + return new SchemaException(msg, path, value, rule, errorSymbol); + } + + + private void init(Object elem, String path, Map rule_table) throws SchemaException { + assert elem != null; + if (! (elem instanceof Map)) { + if (path == null || path.equals("")) { + path = "/"; + } + throw schemaError("schema.notmap", null, path, null, null); + } + init((Map)elem, path, rule_table); + + } + + + private void init(Map hash, String path, Map rule_table) throws SchemaException { + Rule rule = this; + rule_table.put(hash, rule); + + // 'type:' entry + Object type = hash.get("type"); + initTypeValue(type, rule, path); + + // other entries + for (Iterator it = hash.keySet().iterator(); it.hasNext(); ) { + Object key = it.next(); + Object value = hash.get(key); + int code = key.hashCode(); + + if (code == CODE_TYPE && key.equals("type")) { + // done + } else if (code == CODE_NAME && key.equals("name")) { + initNameValue(value, rule, path); + } else if (code == CODE_DESC && key.equals("desc")) { + initDescValue(value, rule, path); + } else if (code == CODE_SHORT && key.equals("short")) { + initShortValue(value, rule, path); + } else if (code == CODE_REQUIRED && key.equals("required")) { + initRequiredValue(value, rule, path); + } else if (code == CODE_PATTERN && key.equals("pattern")) { + initPatternValue(value, rule, path); + } else if (code == CODE_ENUM && key.equals("enum")) { + initEnumValue(value, rule, path); + } else if (code == CODE_ASSERT && key.equals("assert")) { + initAssertValue(value, rule, path); + } else if (code == CODE_RANGE && key.equals("range")) { + initRangeValue(value, rule, path); + } else if (code == CODE_LENGTH && key.equals("length")) { + initLengthValue(value, rule, path); + } else if (code == CODE_IDENT && key.equals("ident")) { + initIdentValue(value, rule, path); + } else if (code == CODE_UNIQUE && key.equals("unique")) { + initUniqueValue(value, rule, path); + } else if (code == CODE_SEQUENCE && key.equals("sequence")) { + rule = initSequenceValue(value, rule, path, rule_table); + } else if (code == CODE_MAPPING && key.equals("mapping")) { + rule = initMappingValue(value, rule, path, rule_table); + } else if (code == CODE_RULE && key.equals("rule")) { + rule = initReferenceValue(value, rule, path, rule_table); + } else { + // removed by 'jora': interferes with the usage of YAML anchor/alias in grammar files + // throw schemaError("key.unknown", rule, path + "/" + key, key.toString() + ":", null); + } + } + + // confliction check + checkConfliction(hash, rule, path); + } + + + private void initTypeValue(Object value, Rule rule, String path) throws SchemaException { + if (value == null) { + value = Types.getDefaultType(); + } + if (! (value instanceof String)) { + throw schemaError("type.notstr", rule, path + "/type", _type, null); + } + _type = (String)value; + _type_class = Types.typeClass(_type); + if (! Types.isBuiltinType(_type)) { + throw schemaError("type.unknown", rule, path + "/type", _type, null); + } + } + + + private void initNameValue(Object value, Rule rule, String path) throws SchemaException { + _name = value.toString(); + } + + + private void initDescValue(Object value, Rule rule, String path) throws SchemaException { + _desc = value.toString(); + } + + private void initShortValue(Object value, Rule rule, String path) throws SchemaException { + + //the short form specification is to be interpreted as key if the type is a map or as an + //index if the target is a sequence (as index 0 actually) + if (!Types.isCollectionType(_type)) { + throw schemaError("range.notcollection", rule, path + "/short", value, null); + } + //we should also verify that it points to a declared key of the mapping .. not really, as it would + //fail the overall grammar + _short = value.toString(); + } + + private void initRequiredValue(Object value, Rule rule, String path) throws SchemaException { + if (! (value instanceof Boolean)) { + throw schemaError("required.notbool", rule, path + "/required", value, null); + } + _required = ((Boolean)value).booleanValue(); + } + + + private void initPatternValue(Object value, Rule rule, String path) throws SchemaException { + if (! (value instanceof String)) { + throw schemaError("pattern.notstr", rule, path + "/pattern", value, null); + } + _pattern = (String)value; + Matcher m = Util.matcher(_pattern, "\\A/(.*)/([mi]?[mi]?)\\z"); + if (! m.find()) { + throw schemaError("pattern.notmatch", rule, path + "/pattern", value, null); + } + String pat = m.group(1); + String opt = m.group(2); + int flag = 0; + if (opt.indexOf('i') >= 0) { + flag += Pattern.CASE_INSENSITIVE; + } + if (opt.indexOf('m') >= 0) { + flag += Pattern.DOTALL; // not MULTILINE + } + try { + _pattern_regexp = Pattern.compile(pat, flag); + } catch (PatternSyntaxException ex) { + throw schemaError("pattern.syntaxerr", rule, path + "/pattern", value, null); + } + } + + + private void initEnumValue(Object value, Rule rule, String path) throws SchemaException { + if (! (value instanceof List)) { + throw schemaError("enum.notseq", rule, path + "/enum", value, null); + } + _enum = (List)value; + if (Types.isCollectionType(_type)) { + throw schemaError("enum.notscalar", rule, path, "enum:", null); + } + Map elem_table = new HashMap(); + for (Iterator it = _enum.iterator(); it.hasNext(); ) { + Object elem = it.next(); + if (! Util.isInstanceOf(elem, _type_class)) { + throw schemaError("enum.type.unmatch", rule, path + "/enum", elem, new Object[] { Types.typeName(_type) }); + } + if (elem_table.containsKey(elem)) { + throw schemaError("enum.duplicate", rule, path + "/enum", elem, null); + } + elem_table.put(elem, Boolean.TRUE); + } + } + + + private void initAssertValue(Object value, Rule rule, String path) throws SchemaException { + if (! (value instanceof String)) { + throw schemaError("assert.notstr", rule, path + "/assert", value, null); + } + _assert = (String)value; + if (! Util.matches(_assert, "\\bval\\b")) { + throw schemaError("assert.noval", rule, path + "/assert", value, null); + } + } + + + private void initRangeValue(Object value, Rule rule, String path) throws SchemaException { + if (! (value instanceof Map)) { + throw schemaError("range.notmap", rule, path + "/range", value, null); + } + if (Types.isCollectionType(_type) || _type.equals("bool")) { + throw schemaError("range.notscalar", rule, path, "range:", null); + } + _range = (Map)value; + for (Iterator it = _range.keySet().iterator(); it.hasNext(); ) { + Object rkey = it.next(); + Object rval = _range.get(rkey); + if (rkey.equals("max") || rkey.equals("min") || rkey.equals("max-ex") || rkey.equals("min-ex")) { + if (! Util.isInstanceOf(rval, _type_class)) { + String typename = Types.typeName(_type); + throw schemaError("range.type.unmatch", rule, path + "/range/" + rkey, rval, new Object[] { typename }); + } + } else { + throw schemaError("range.undefined", rule, path + "/range/" + rkey, rkey.toString() + ":", null); + } + } + if (_range.containsKey("max") && _range.containsKey("max-ex")) { + throw schemaError("range.twomax", rule, path + "/range", null, null); + } + if (_range.containsKey("min") && _range.containsKey("min-ex")) { + throw schemaError("range.twomin", rule, path + "/range", null, null); + } + // + Object max = _range.get("max"); + Object min = _range.get("min"); + Object max_ex = _range.get("max-ex"); + Object min_ex = _range.get("min-ex"); + Object[] args = null; + //String error_symbol = null; + if (max != null) { + if (min != null && Util.compareValues(max, min) < 0) { + args = new Object[] { max, min }; + throw schemaError("range.maxltmin", rule, path + "/range", null, args); + } else if (min_ex != null && Util.compareValues(max, min_ex) <= 0) { + args = new Object[] { max, min_ex }; + throw schemaError("range.maxleminex", rule, path + "/range", null, args); + } + } else if (max_ex != null) { + if (min != null && Util.compareValues(max_ex, min) <= 0) { + args = new Object[] { max_ex, min }; + throw schemaError("range.maxexlemin", rule, path + "/range", null, args); + } else if (min_ex != null && Util.compareValues(max_ex, min_ex) <= 0) { + args = new Object[] { max_ex, min_ex }; + throw schemaError("range.maxexleminex", rule, path + "/range", null, args); + } + } + } + + + private void initLengthValue(Object value, Rule rule, String path) throws SchemaException { + if (! (value instanceof Map)) { + throw schemaError("length.notmap", rule, path + "/length", value, null); + } + _length = (Map)value; + if (! (_type.equals("str") || _type.equals("text"))) { + throw schemaError("length.nottext", rule, path, "length:", null); + } + for (Iterator it = _length.keySet().iterator(); it.hasNext(); ) { + Object k = it.next(); + Object v = _length.get(k); + if (k.equals("max") || k.equals("min") || k.equals("max-ex") || k.equals("min-ex")) { + if (! (v instanceof Integer)) { + throw schemaError("length.notint", rule, path + "/length/" + k, v, null); + } + } else { + throw schemaError("length.undefined", rule, path + "/length/" + k, k + ":", null); + } + } + if (_length.containsKey("max") && _length.containsKey("max-ex")) { + throw schemaError("length.twomax", rule, path + "/length", null, null); + } + if (_length.containsKey("min") && _length.containsKey("min-ex")) { + throw schemaError("length.twomin", rule, path + "/length", null, null); + } + // + Integer max = (Integer)_length.get("max"); + Integer min = (Integer)_length.get("min"); + Integer max_ex = (Integer)_length.get("max-ex"); + Integer min_ex = (Integer)_length.get("min-ex"); + Object[] args = null; + //String error_symbol = null; + if (max != null) { + if (min != null && max.compareTo(min) < 0) { + args = new Object[] { max, min }; + throw schemaError("length.maxltmin", rule, path + "/length", null, args); + } else if (min_ex != null && max.compareTo(min_ex) <= 0) { + args = new Object[] { max, min_ex }; + throw schemaError("length.maxleminex", rule, path + "/length", null, args); + } + } else if (max_ex != null) { + if (min != null && max_ex.compareTo(min) <= 0) { + args = new Object[] { max_ex, min }; + throw schemaError("length.maxexlemin", rule, path + "/length", null, args); + } else if (min_ex != null && max_ex.compareTo(min_ex) <= 0) { + args = new Object[] { max_ex, min_ex }; + throw schemaError("length.maxexleminex", rule, path + "/length", null, args); + } + } + } + + + private void initIdentValue(Object value, Rule rule, String path) throws SchemaException { + if (value == null || ! (value instanceof Boolean)) { + throw schemaError("ident.notbool", rule, path + "/ident", value, null); + } + _ident = ((Boolean)value).booleanValue(); + _required = true; + if (Types.isCollectionType(_type)) { + throw schemaError("ident.notscalar", rule, path, "ident:", null); + } + if (path.equals("")) { + throw schemaError("ident.onroot", rule, "/", "ident:", null); + } + if (_parent == null || ! _parent.getType().equals("map")) { + throw schemaError("ident.notmap", rule, path, "ident:", null); + } + } + + + private void initUniqueValue(Object value, Rule rule, String path) throws SchemaException { + if (! (value instanceof Boolean)) { + throw schemaError("unique.notbool", rule, path + "/unique", value, null); + } + _unique = ((Boolean)value).booleanValue(); + if (Types.isCollectionType(_type)) { + throw schemaError("unique.notscalar", rule, path, "unique:", null); + } + if (path.equals("")) { + throw schemaError("unique.onroot", rule, "/", "unique:", null); + } + //if (_parent == null || _parent.getType() == "map") { + // throw schemaError("sequence.notseq", rule, path + "/unique", value); + //} + } + + + private Rule initSequenceValue(Object value, Rule rule, String path, Map rule_table) throws SchemaException { + if (value != null && ! (value instanceof List)) { + throw schemaError("sequence.notseq", rule, path + "/sequence", value.toString(), null); + } + _sequence = (List)value; + if (_sequence == null || _sequence.size() == 0) { + throw schemaError("sequence.noelem", rule, path + "/sequence", value, null); + } + if (_sequence.size() > 1) { + throw schemaError("sequence.toomany", rule, path + "/sequence", value, null); + } + Object elem = _sequence.get(0); + if (elem == null) { + elem = new HashMap(); + } + int i = 0; + // Rule rule; + rule = (Rule)rule_table.get(elem); + if (rule == null) { + rule = new Rule(null, this); + rule.init(elem, path + "/sequence/" + i, rule_table); + } + _sequence = new ArrayList(); + _sequence.add(rule); + return rule; + } + + + private Rule initMappingValue(Object value, Rule rule, String path, Map rule_table) throws SchemaException { + + // error check + if (value != null && !(value instanceof Map)) { + throw schemaError("mapping.notmap", rule, path + "/mapping", value.toString(), null); + } + Object default_value = null; + if (value instanceof Defaultable) { + default_value = ((Defaultable)value).getDefault(); + } + if (value == null || ((Map)value).size() == 0 && default_value == null) { + throw schemaError("mapping.noelem", rule, path + "/mapping", value, null); + } + // create hash of rule + _mapping = new DefaultableHashMap(); + if (default_value != null) { + rule = (Rule)rule_table.get(default_value); + if (rule == null) { + rule = new Rule(null, this); + rule.init(default_value, path + "/mapping/=", rule_table); + } + _mapping.setDefault(rule); + } + // put rules into _mapping + Map map = (Map)value; + for (Iterator it = map.keySet().iterator(); it.hasNext(); ) { + Object k = it.next(); + Object v = map.get(k); // DefaultableHashMap + if (v == null) { + v = new DefaultableHashMap(); + } + rule = (Rule)rule_table.get(v); + if (rule == null) { + rule = new Rule(null, this); + rule.init(v, path + "/mapping/" + k, rule_table); + } + if (k.equals("=")) { + _mapping.setDefault(rule); + } else { + _mapping.put(k, rule); + } + } + return rule; + } + + private Rule initReferenceValue(Object value, Rule rule, String path, Map rule_table) throws SchemaException { + + this._ref = (String)value; + if (this._ref == null) + throw schemaError("required.novalue", rule, path, "rule", null); + //make sure a rule with this name is in scope + Rule refed = (Rule) + rule_table.values().stream() + .filter(val -> ((Rule)val).getName() != null && ((Rule)val).getName().equals(this._ref)) + .findFirst() + .orElse(null); + if (null == refed) + throw schemaError("ref.nosuchrule", rule, path, "reference", new Object[] { value }); + + return rule; + } + + private void checkConfliction(Map hash, Rule rule, String path) { + if (_type.equals("seq")) { + if (! hash.containsKey("sequence")) throw schemaError("seq.nosequence", rule, path, null, null); + if (_enum != null) throw schemaError("seq.conflict", rule, path, "enum:", null); + if (_pattern != null) throw schemaError("seq.conflict", rule, path, "pattern:", null); + if (_mapping != null) throw schemaError("seq.conflict", rule, path, "mapping:", null); + if (_range != null) throw schemaError("seq.conflict", rule, path, "range:", null); + if (_length != null) throw schemaError("seq.conflict", rule, path, "length:", null); + } else if (_type.equals("map")) { + if (! hash.containsKey("mapping")) throw schemaError("map.nomapping", rule, path, null, null); + if (_enum != null) throw schemaError("map.conflict", rule, path, "enum:", null); + if (_pattern != null) throw schemaError("map.conflict", rule, path, "pattern:", null); + if (_sequence != null) throw schemaError("map.conflict", rule, path, "sequence:", null); + if (_range != null) throw schemaError("map.conflict", rule, path, "range:", null); + if (_length != null) throw schemaError("map.conflict", rule, path, "length:", null); + } else { + if (_sequence != null) throw schemaError("scalar.conflict", rule, path, "sequence:", null); + if (_mapping != null) throw schemaError("scalar.conflict", rule, path, "mapping:", null); + if (_enum != null) { + if (_range != null) throw schemaError("enum.conflict", rule, path, "range:", null); + if (_length != null) throw schemaError("enum.conflict", rule, path, "length:", null); + if (_pattern != null) throw schemaError("enum.conflict", rule, path, "pattern:", null); + } + } + } + + + public String inspect() { + StringBuffer sb = new StringBuffer(); + int level = 0; + Map done = new IdentityHashMap(); + inspect(sb, level, done); + return sb.toString(); + } + + private void inspect(StringBuffer sb, int level, Map done) { + done.put(this, Boolean.TRUE); + String indent = Util.repeatString(" ", level); + if (_name != null) { sb.append(indent).append("name: ").append(_name).append("\n"); } + if (_desc != null) { sb.append(indent).append("desc: ").append(_desc).append("\n"); } + if (_type != null) { sb.append(indent).append("type: ").append(_type).append("\n"); } + if (_required) { sb.append(indent).append("required: ").append(_required).append("\n"); } + if (_pattern != null) { sb.append(indent).append("pattern: ").append(_pattern).append("\n"); } + if (_pattern_regexp != null) { sb.append(indent).append("regexp: ").append(_pattern_regexp).append("\n"); } + if (_assert != null) { sb.append(indent).append("assert: ").append(_assert).append("\n"); } + if (_ident) { sb.append(indent).append("ident: ").append(_ident).append("\n"); } + if (_unique) { sb.append(indent).append("unique: ").append(_unique).append("\n"); } + if (_enum != null) { + sb.append(indent).append("enum:\n"); + for (Iterator it = _enum.iterator(); it.hasNext(); ) { + sb.append(indent).append(" - ").append(it.next().toString()).append("\n"); + } + } + if (_range != null) { + sb.append(indent).append("range: { "); + String[] keys = new String[] { "max", "max-ex", "min", "min-ex", }; + String colon = ""; + for (int i = 0; i < keys.length; i++) { + Object val = _range.get(keys[i]); + if (val != null) { + sb.append(colon).append(keys[i]).append(": ").append(val); + colon = ", "; + } + } + sb.append(" }\n"); + } + if (_sequence != null) { + for (Iterator it = _sequence.iterator(); it.hasNext(); ) { + Rule rule = (Rule)it.next(); + if (done.containsKey(rule)) { + sb.append(indent).append(" ").append("- ...\n"); + } else { + sb.append(indent).append(" ").append("- \n"); + rule.inspect(sb, level + 2, done); + } + } + } + if (_mapping != null) { + for (Iterator it = _mapping.entrySet().iterator(); it.hasNext(); ) { + Map.Entry entry = (Map.Entry)it.next(); + Object key = entry.getKey(); + Rule rule = (Rule)entry.getValue(); + sb.append(indent).append(" ").append(Util.inspect(key)); + if (done.containsKey(rule)) { + sb.append(": ...\n"); + } else { + sb.append(":\n"); + rule.inspect(sb, level + 2, done); + } + } + } + } + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/SchemaException.java b/javatoscachecker/kwalify/src/main/java/kwalify/SchemaException.java new file mode 100644 index 0000000..5d53bd1 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/SchemaException.java @@ -0,0 +1,22 @@ +/* + * @(#)SchemaException.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * exception class thrown by Rule constructor + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class SchemaException extends BaseException { + private static final long serialVersionUID = 4750598728284538818L; + + public SchemaException(String message, String ypath, Object value, Rule rule, String errorSymbol) { + super(message, ypath, value, rule, errorSymbol); + } + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/SyntaxException.java b/javatoscachecker/kwalify/src/main/java/kwalify/SyntaxException.java new file mode 100644 index 0000000..8c36b66 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/SyntaxException.java @@ -0,0 +1,28 @@ +/* + * @(#)SyntaxException.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * exception class thrown by parser when syntax is wrong. + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + * @see Parser, YamlSyntaxException + */ +public class SyntaxException extends KwalifyException { + private static final long serialVersionUID = 2480059811372002740L; + + private int _linenum; + + public SyntaxException(String message, int linenum) { + super(message); + _linenum = linenum; + } + + public int getLineNumber() { return _linenum; } + public void setLineNumber(int linenum) { _linenum = linenum; } +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/Types.java b/javatoscachecker/kwalify/src/main/java/kwalify/Types.java new file mode 100644 index 0000000..fbe655c --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/Types.java @@ -0,0 +1,107 @@ +/* + * @(#)Types.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.util.Map; +import java.util.HashMap; +import java.util.List; +import java.util.Date; + +/** + * utility methods for type (str, int, ...). + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class Types { + + public static Class typeClass(String type) { + return (Class)__type_classes.get(type); + } + + public static String typeName(String type) { + String name = (String)__type_names.get(type); + if (name == null) name = type; + return name; + } + + public static final String DEFAULT_TYPE = "str"; + + public static String getDefaultType() { return DEFAULT_TYPE; } + + private static Map __type_classes; + private static Map __type_names; + static { + // + __type_classes = new HashMap(); + __type_classes.put("str", String.class); + __type_classes.put("int", Integer.class); + __type_classes.put("float", Double.class); + __type_classes.put("number", Number.class); + __type_classes.put("text", null); + __type_classes.put("bool", Boolean.class); + __type_classes.put("map", Map.class); + __type_classes.put("seq", List.class); + __type_classes.put("timestamp", Date.class); + __type_classes.put("date", Date.class); + __type_classes.put("symbol", String.class); + __type_classes.put("scalar", null); + __type_classes.put("any", Object.class); + __type_classes.put("ref", Object.class); // by jora + //__type_classes.put("null", null); + + // + __type_names = new HashMap(); + __type_names.put("map", "mapping"); + __type_names.put("seq", "sequence"); + __type_names.put("str", "string"); + __type_names.put("int", "integer"); + __type_names.put("bool", "boolean"); + __type_names.put("ref", "reference"); //by jora + } + + + public static boolean isBuiltinType(String type) { + return __type_classes.containsKey(type); + } + + public static boolean isCollectionType(String type) { + return type.equals("map") || type.equals("seq"); + } + + public static boolean isMapType(String type) { + return type.equals("map"); + } + + public static boolean isScalarType(String type) { + return !isCollectionType(type); + } + + public static boolean isCollection(Object obj) { + return obj instanceof Map || obj instanceof List; + } + + public static boolean isScalar(Object obj) { + return !isCollection(obj); + } + + public static boolean isCorrectType(Object obj, String type) { + Class type_class = typeClass(type); + if (type_class != null) { + return type_class.isInstance(obj); + } + if (type.equals("null")) { + return obj == null; + } else if (type.equals("text")) { + return obj instanceof String || obj instanceof Number; + } else if (type.equals("scalar")) { + return obj instanceof Number || obj instanceof String || obj instanceof Boolean || obj instanceof Date; + } + return false; + } + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/Util.java b/javatoscachecker/kwalify/src/main/java/kwalify/Util.java new file mode 100644 index 0000000..c27c947 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/Util.java @@ -0,0 +1,646 @@ +/* + * @(#)Util.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.util.Collections; +import java.util.List; +import java.util.ArrayList; +import java.util.Map; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.Iterator; +import java.util.regex.Pattern; +import java.util.regex.Matcher; +import java.util.Date; +import java.io.Reader; +import java.io.InputStreamReader; +import java.io.InputStream; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.Writer; +import java.io.FileWriter; +import java.io.File; + + +/** + * set of utility methods + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ + +public class Util { + + /** + * inspect List or Map + */ + public static String inspect(Object obj) { + StringBuffer sb = new StringBuffer(); + inspect(obj, sb, null); + return sb.toString(); + } + + private static void inspect(Object obj, StringBuffer sb, Map done) { + if (obj == null) { + sb.append("nil"); // null? + } else if (obj instanceof String) { + inspect((String)obj, sb, done); + } else if (obj instanceof Map) { + if (done == null) { + done = new IdentityHashMap(); + } + if (done.containsKey(obj)) { + sb.append("{...}"); + } else { + done.put(obj, Boolean.TRUE); + inspect((Map)obj, sb, done); + } + } else if (obj instanceof List) { + if (done == null) { + done = new IdentityHashMap(); + } + if (done.containsKey(obj)) { + sb.append("[...]"); + } else { + done.put(obj, Boolean.TRUE); + inspect((List)obj, sb, done); + } + } else { + sb.append(obj.toString()); + } + } + + private static void inspect(Map map, StringBuffer sb, Map done) { + sb.append('{'); + List list = new ArrayList(map.keySet()); + Collections.sort(list); + int i = 0; + for (Iterator it = list.iterator(); it.hasNext(); i++) { + Object key = it.next(); + Object value = map.get(key); + if (i > 0) { + sb.append(", "); + } + inspect(key, sb, done); + sb.append("=>"); + inspect(value, sb, done); + } + sb.append('}'); + } + + private static void inspect(List list, StringBuffer sb, Map done) { + sb.append('['); + int i = 0; + for (Iterator it = list.iterator(); it.hasNext(); i++) { + if (i > 0) { + sb.append(", "); + } + Object item = it.next(); + inspect(item, sb, null); + } + sb.append(']'); + } + + private static void inspect(String str, StringBuffer sb, Map done) { + sb.append('"'); + for (int i = 0; i < str.length(); i++) { + char ch = str.charAt(i); + switch (ch) { + case '"': sb.append("\\\""); break; + case '\n': sb.append("\\n"); break; + case '\r': sb.append("\\r"); break; + case '\t': sb.append("\\t"); break; + default: sb.append(ch); break; + } + } + sb.append('"'); + } + + + + /** + * + */ + protected static HashMap __patterns = new HashMap(); + + /** + * match pattern and return Mather object. + * + * ex. + *
      +     *   String target = " name = foo\n mail = foo@mail.com\m";
      +     *   Matcher m = Util.matcher(target, "^\\s*(\\w+)\\s*=\\s*(.*)$");
      +     *   while (m.find()) {
      +     *     String key   = m.group(1);
      +     *     String value = m.gropu(2);
      +     *   }
      +     *  
      + */ + public static Matcher matcher(String target, String regexp) { + Pattern pat = (Pattern)__patterns.get(regexp); + if (pat == null) { + pat = Pattern.compile(regexp); + __patterns.put(regexp, pat); + } + return pat.matcher(target); + } + + + public static Matcher matcher(String target, Pattern regexp) { + return regexp.matcher(target); + } + + + /** + * return if pattern matched or not. + * + * ex. + *
      +     *   String target = " name = foo\n";
      +     *   if (Util.matches(target, "^\\s*(\\w+)\\s*=\\s*(.*)$")) {
      +     *     System.out.println("matched.");
      +     *   }
      +     *  
      + */ + public static boolean matches(String target, String regexp) { + Matcher m = matcher(target, regexp); + return m.find(); + } + + + public static boolean matches(String target, Pattern regexp) { + Matcher m = regexp.matcher(target); + return m.find(); + } + + + /** + * shift array and return new array shifted + */ + public static String[] arrayShift(String[] array) { + String[] new_array = new String[array.length - 1]; + for (int i = 0; i < new_array.length; i++) { + new_array[i] = array[i + 1]; + } + return new_array; + } + + + /** + * pop up array an dreturn new array popped + */ + public static String[] arrayPop(String[] array) { + String[] new_array = new String[array.length - 1]; + for (int i = 0; i < new_array.length; i++) { + new_array[i] = array[i]; + } + return new_array; + } + + + /** + * concatenate all elements of array with separator + */ + public static String join(Object[] array, String separator) { + StringBuffer sb = new StringBuffer(); + for (int i = 0; i < array.length; i++) { + if (i > 0) { + sb.append(separator); + } + sb.append(array[i]); + } + return sb.toString(); + } + + + /** + * concatenate all elements of list with separator + */ + public static String join(List list, String separator) { + StringBuffer sb = new StringBuffer(); + int i = 0; + for (Iterator it = list.iterator(); it.hasNext(); i++) { + Object item = it.next(); + if (i > 0) { + sb.append(separator); + } + sb.append(item); + } + return sb.toString(); + } + + + /** + * split string into list of line + */ + public static List toListOfLines(String str) { + List list = new ArrayList(); + int len = str.length(); + int head = 0; + for (int i = 0; i < len; i++) { + char ch = str.charAt(i); + if (ch == '\n') { + int tail = i + 1; + String line = str.substring(head, tail); + list.add(line); + head = tail; + } + } + if (head != len) { + String line = str.substring(head, len); + list.add(line); + } + return list; + } + + + /** + * split string into array of line + */ + public static String[] toLines(String str) { + List list = toListOfLines(str); + String[] lines = new String[list.size()]; + list.toArray(lines); + return lines; + } + + + /** + * return object id + */ + public static Integer getId(Object obj) { + int id = System.identityHashCode(obj); + return new Integer(id); + } + + + /** + * return true if 'instance' is an instance of 'klass' + */ + public static boolean isInstanceOf(Object instance, Class klass) { + if (instance == null || klass == null) { + return false; + } + Class c = instance.getClass(); + if (klass.isInterface()) { + while (c != null) { + Class[] interfaces = c.getInterfaces(); + for (int i = 0; i < interfaces.length; i++) { + if (interfaces[i] == klass) { + return true; + } + } + c = c.getSuperclass(); + } + } else { + while (c != null) { + if (c == klass) { + return true; + } + c = c.getSuperclass(); + } + } + return false; + } + + + /** + * read file content with default encoding of system + */ + public static String readFile(String filename) throws IOException { + String charset = System.getProperty("file.encoding"); + return readFile(filename, charset); + } + + + /** + * read file content with specified encoding + */ + public static String readFile(String filename, String encoding) throws IOException { + InputStream stream = null; + String content = null; + try { + stream = new FileInputStream(filename); + content = readInputStream(stream, encoding); + } finally { + if (stream != null) { + try { + stream.close(); + } catch (IOException ignore) {} + } + } + return content; + } + + /** + * + */ + public static String readInputStream(InputStream stream) throws IOException { + String encoding = System.getProperty("file.encoding"); + return readInputStream(stream, encoding); + } + + + /** + * + */ + public static String readInputStream(InputStream stream, String encoding) throws IOException { + Reader reader = null; + String content = null; + try { + reader = new InputStreamReader(stream, encoding); + StringBuffer sb = new StringBuffer(); + int ch; + while ((ch = reader.read()) >= 0) { + sb.append((char)ch); + } + content = sb.toString(); + } finally { + if (reader != null) { + try { + reader.close(); + } catch (IOException ignore) {} + } + } + return content; + } + + + /** + * + */ + public static void writeFile(String filename, String content) throws IOException { + Writer writer = null; + try { + writer = new FileWriter(filename); + writer.write(content); + } finally { + if (writer != null) { + writer.close(); + } + } + } + + + public static void makeDir(String path) throws IOException { + File dir = new File(path); + dir.mkdir(); + } + + + public static void renameFile(String old_path, String new_path) throws IOException { + File old_file = new File(old_path); + File new_file = new File(new_path); + new_file.delete(); + old_file.renameTo(new_file); + } + + + public static void moveFile(String filepath, String dirpath) throws IOException { + File old_file = new File(filepath); + File new_file = new File(dirpath + "/" + old_file.getName()); + new_file.delete(); + old_file.renameTo(new_file); + } + + + public static String untabify(CharSequence str) { + return untabify(str, 8); + } + + + public static String untabify(CharSequence str, int tab_width) { + StringBuffer sb = new StringBuffer(); + int len = str.length(); + int col = -1; + for (int i = 0; i < len; i++) { + col = ++col % tab_width; + char ch = str.charAt(i); + //if (ch == '\t') { + // int n = tab_width - col; + // while (--n >= 0) + // sb.append(' '); + // col = -1; // reset col + //} else { + // sb.append(ch); + // if (ch == '\n') + // col = -1; // reset col + //} + switch (ch) { + case '\t': + int n = tab_width - col; + while (--n >= 0) { + sb.append(' '); + } + col = -1; // reset col + break; + case '\n': + sb.append(ch); + col = -1; // reset col; + break; + default: + sb.append(ch); + } + } + return sb.toString(); + } + + + private static final int VALUE_INTEGER = 1; + private static final int VALUE_DOUBLE = 2; + private static final int VALUE_STRING = 4; + private static final int VALUE_BOOLEAN = 8; + private static final int VALUE_DATE = 16; + private static final int VALUE_OBJECT = 32; + + public static int compare(Object value1, Object value2) throws InvalidTypeException { + if (! (value1 instanceof Comparable)) { + throw new InvalidTypeException(value1.toString() + "is not Comparable."); + } + if (! (value2 instanceof Comparable)) { + throw new InvalidTypeException(value2.toString() + "is not Comparable."); + } + return ((Comparable)value1).compareTo((Comparable)value2); + } + + public static int compareValues(Object value1, Object value2) throws InvalidTypeException { + int vtype = (valueType(value1) << 8) | valueType(value2); + switch (vtype) { + case (VALUE_INTEGER << 8) | VALUE_INTEGER : + return ((Integer)value1).compareTo((Integer)value2); + case (VALUE_DOUBLE << 8) | VALUE_DOUBLE : + return ((Double)value1).compareTo((Double)value2); + case (VALUE_STRING << 8) | VALUE_STRING : + return ((String)value1).compareTo((String)value2); + case (VALUE_BOOLEAN << 8) | VALUE_BOOLEAN : + //return ((Boolean)value1).compareTo((Boolean)value2); // J2SDK1.4 doesn't support Boolean#compareTo()! + boolean b1 = ((Boolean)value1).booleanValue(); + boolean b2 = ((Boolean)value2).booleanValue(); + return b1 == b2 ? 0 : (b1 ? 1 : -1); + //if (b1 == b2) return 0; + //if (b1 && !b2) return 1; + //if (!b1 && b2) return -1; + //assert false; + case (VALUE_DATE << 8) | VALUE_DATE : + return ((Date)value1).compareTo((Date)value2); + // + case (VALUE_DOUBLE << 8) | VALUE_INTEGER : + case (VALUE_INTEGER << 8) | VALUE_DOUBLE : + double d1 = ((Number)value1).doubleValue(); + double d2 = ((Number)value2).doubleValue(); + return d1 > d2 ? 1 : (d1 < d2 ? -1 : 0); + } + throw new InvalidTypeException("cannot compare '" + value1.getClass().getName() + "' with '" + value2.getClass().getName()); + } + + private static int valueType(Object value) { + if (value instanceof Integer) return VALUE_INTEGER; + if (value instanceof Double) return VALUE_DOUBLE; + if (value instanceof String) return VALUE_STRING; + if (value instanceof Boolean) return VALUE_BOOLEAN; + if (value instanceof Date) return VALUE_DATE; + return VALUE_OBJECT; + } + + public static String repeatString(String str, int times) { + StringBuffer sb = new StringBuffer(); + for (int i = 0; i < times; i++) { + sb.append(str); + } + return sb.toString(); + } + + + public static String[] subarray(String[] array, int begin, int end) { + if (begin >= end) { + return null; + } + if (end > array.length) { + end = array.length; + } + int size = end - begin; + String[] array2 = new String[size]; + int i, j; + for (i = begin, j = 0; i < end; i++, j++) { + array2[j] = array[i]; + } + return array2; + } + + public static String[] subarray(String[] array, int begin) { + if (begin < 0) { + begin += array.length; + } + return subarray(array, begin, array.length); + } + + + /** + * parse command-line options. + * + * ex. + *
      +     *   public static void main(String[] arg) {
      +     *      String singles = "hv";    // options which takes no argument.
      +     *      String requireds = "fI";  // options which requires an argument.
      +     *      String optionals = "i";   // options which can take optional argument.
      +     *      try {
      +     *         Object[] ret = parseCommandOptions(args, singles, requireds, optionals);
      +     *         Map options        = (Map)ret[0];
      +     *         Map properties     = (Map)ret[1];
      +     *         String[] filenames = (String[])ret[2];
      +     *         //...
      +     *      } catch (CommandOptionException ex) {
      +     *         char option = ex.getOption();
      +     *         String error_symbol = ex.getErrorSymbol();
      +     *         Systen.err.println("*** error: " + ex.getMessage());
      +     *      }
      +     *   }
      +     * 
      + * + * @param args command-line strings + * @param singles options which takes no argument + * @param requireds options which requires an argument. + * @param optionals otpions which can take optional argument. + * @return array of options(Map), properties(Map), and filenames(String[]) + */ + public static Object[] parseCommandOptions(String[] args, String singles, String requireds, String optionals) throws CommandOptionException { + Map options = new HashMap(); + Map properties = new HashMap(); + String[] filenames = null; + // + int i; + for (i = 0; i < args.length; i++) { + if (args[i].length() == 0 || args[i].charAt(0) != '-') { + break; + } + String opt = args[i]; + int len = opt.length(); + if (len == 1) { // option '-' means "don't parse arguments!" + i++; + break; + } + assert len > 1; + if (opt.charAt(1) == '-') { // properties (--pname=pvalue) + String pname; + Object pvalue; + int idx = opt.indexOf('='); + if (idx >= 0) { + pname = opt.substring(2, idx); + pvalue = idx + 1 < opt.length() ? opt.substring(idx + 1) : ""; + } else { + pname = opt.substring(2); + pvalue = Boolean.TRUE; + } + properties.put(pname, pvalue); + } else { // command-line options + for (int j = 1; j < len; j++) { + char ch = opt.charAt(j); + String chstr = Character.toString(ch); + if (singles != null && singles.indexOf(ch) >= 0) { + options.put(chstr, Boolean.TRUE); + } else if (requireds != null && requireds.indexOf(ch) >= 0) { + String arg = null; + if (++j < len) { + arg = opt.substring(j); + } else if (++i < args.length) { + arg = args[i]; + } else { + throw new CommandOptionException("-" + ch + ": filename required.", ch, "command.option.noarg"); + } + options.put(chstr, arg); + break; + } else if (optionals != null && optionals.indexOf(ch) >= 0) { + Object arg = null; + if (++j < len) { + arg = opt.substring(j); + } else { + arg = Boolean.TRUE; + } + options.put(chstr, arg); + break; + } else { + throw new CommandOptionException("-" + ch + "invalid option.", ch, "command.option.invalid"); + } + } + } + } + + // filenames + //String[] filenames = i == args.length ? new String[0] : Util.subarray(args, i); + assert i <= args.length; + int n = args.length - i; + filenames = new String[n]; + for (int j = 0; i < args.length; i++, j++) { + filenames[j] = args[i]; + } + + // + return new Object[] { options, properties, filenames }; + } + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/ValidationException.java b/javatoscachecker/kwalify/src/main/java/kwalify/ValidationException.java new file mode 100644 index 0000000..5723e01 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/ValidationException.java @@ -0,0 +1,26 @@ +/* + * @(#)ValidationException.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * exception class which represents validation error. + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + */ +public class ValidationException extends BaseException { + private static final long serialVersionUID = -2991121377463453973L; + + public ValidationException(String message, String path, Object value, Rule rule, String error_symbol) { + super(message, path, value, rule, error_symbol); + } + + public ValidationException(String message, String path) { + this(message, path, null, null, null); + } + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/Validator.java b/javatoscachecker/kwalify/src/main/java/kwalify/Validator.java new file mode 100644 index 0000000..1b3dd53 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/Validator.java @@ -0,0 +1,415 @@ +/* + * @(#)Validator.java $Rev: 3 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.util.Map; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.LinkedList; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.Collections; + +/** + * validation engine + * + * ex. + *
      + *
      + *    // load YAML document
      + *    String str = Util.readFile("document.yaml");
      + *    YamlParser parser = new YamlParser(str);
      + *    Object document = parser.parse();
      + *
      + *    // load schema
      + *    Object schema = YamlUtil.loadFile("schema.yaml");
      + *
      + *    // generate validator and validate document
      + *    Validator validator = new Validator(shema);
      + *    List errors = validator.validate(document);
      + *
      + *    // show errors
      + *    if (errors != null && errors.size() > 0) {
      + *        parser.setErrorsLineNumber(errors);
      + *        java.util.Collections.sort(errors);
      + *        for (Iterator it = errors.iterator(); it.hasNext(); ) {
      + *            ValidationError error = (ValidationError)it.next();
      + *            int linenum = error.getLineNumber();
      + *            String path = error.getPath();
      + *            String mesg = error.getMessage();
      + *            String s = "- (" + linenum + ") [" + path + "] " + mesg;
      + *            System.err.println(s);
      + *        }
      + *    }
      + *  
      + * + * @version $Rev: 3 $ + * @release $Release: 0.5.1 $ + */ +public class Validator { + private Rule _rule; + + public Validator(Map schema) throws SchemaException { + _rule = new Rule(schema); + } + + public Validator(Object schema) throws SchemaException { + _rule = new Rule(schema); + } + + public Rule getRule() { return _rule; } + //public void setRule(Rule rule) { _rule = rule; } + + public List validate(Object value) { + ValidationContext vctx = new ValidationContext(); + _validateRule(value, _rule, vctx); + return vctx.getErrors(); + } + + protected boolean preValidationHook(Object value, Rule rule, ValidationContext context) { + // nothing + return false; + } + + protected void postValidationHook(Object value, Rule rule, ValidationContext context) { + } + + private void _validateRule(Object value, Rule rule, ValidationContext context) { + //why is done necessary? why would one end up having to validate twice the same collection?? + if (Types.isCollection(value)) { + if (context.done(value)) + return; + } + if (rule.isRequired() && value == null) { + Object[] args = new Object[] { Types.typeName(rule.getType()) }; + context.addError("required.novalue", rule, value, args); + return; + } + + if (preValidationHook(value, rule, context)) { + /* a 'higher power says is ok */ + postValidationHook(value, rule, context); + return; + } + + //Class klass = rule.getTypeClass(); + //if (klass != null && value != null && !klass.isInstance(value)) { + + int n = context.errorCount(); + validateRule(value, rule, context); + if (context.errorCount() != n) { + return; + } + // + postValidationHook(value, rule, context); + } + + /* this is the default validation process */ + protected void validateRule(Object value, Rule rule, ValidationContext context) { + + if (value != null && ! Types.isCorrectType(value, rule.getType())) { + Object[] args = new Object[] { Types.typeName(rule.getType()) }; + context.addError("type.unmatch", rule, value, args); + return; + } + // + if (rule.getSequence() != null) { + assert value == null || value instanceof List; + validateSequence((List)value, rule, context); + } else if (rule.getMapping() != null) { + assert value == null || value instanceof Map; + validateMapping((Map)value, rule, context); + } else if (rule.getReference() != null) { + validateReference(value, rule, context); + } else { + validateScalar(value, rule, context); + } + } + + private void validateScalar(Object value, Rule rule, ValidationContext context) { + assert rule.getSequence() == null; + assert rule.getMapping() == null; + if (rule.getAssert() != null) { + //boolean result = evaluate(rule.getAssert()); + //if (! result) { + // errors.add("asset.failed", rule, path, value, new Object[] { rule.getAssert() }); + //} + } + if (rule.getEnum() != null) { + if (! rule.getEnum().contains(value)) { + //if (Util.matches(keyname, "\\A\\d+\\z") keyname = "enum"; + context.addError("enum.notexist", rule, value, new Object[] { context.getPathElement() }); + } + } + // + if (value == null) { + return; + } + // + if (rule.getPattern() != null) { + if (! Util.matches(value.toString(), rule.getPatternRegexp())) { + context.addError("pattern.unmatch", rule, value, new Object[] { rule.getPattern() }); + } + } + if (rule.getRange() != null) { + assert Types.isScalar(value); + Map range = rule.getRange(); + Object v; + if ((v = range.get("max")) != null && Util.compareValues(v, value) < 0) { + context.addError("range.toolarge", rule, value, new Object[] { v.toString() }); + } + if ((v = range.get("min")) != null && Util.compareValues(v, value) > 0) { + context.addError("range.toosmall", rule, value, new Object[] { v.toString() }); + } + if ((v = range.get("max-ex")) != null && Util.compareValues(v, value) <= 0) { + context.addError("range.toolargeex", rule, value, new Object[] { v.toString() }); + } + if ((v = range.get("min-ex")) != null && Util.compareValues(v, value) >= 0) { + context.addError("range.toosmallex", rule, value, new Object[] { v.toString() }); + } + } + if (rule.getLength() != null) { + assert value instanceof String; + Map length = rule.getLength(); + int len = value.toString().length(); + Integer v; + if ((v = (Integer)length.get("max")) != null && v.intValue() < len) { + context.addError("length.toolong", rule, value, new Object[] { new Integer(len), v }); + } + if ((v = (Integer)length.get("min")) != null && v.intValue() > len) { + context.addError("length.tooshort", rule, value, new Object[] { new Integer(len), v }); + } + if ((v = (Integer)length.get("max-ex")) != null && v.intValue() <= len) { + context.addError("length.toolongex", rule, value, new Object[] { new Integer(len), v }); + } + if ((v = (Integer)length.get("min-ex")) != null && v.intValue() >= len) { + context.addError("length.tooshortex", rule, value, new Object[] { new Integer(len), v }); + } + } + } + + + private void validateSequence(List sequence, Rule seq_rule, ValidationContext context) { + assert seq_rule.getSequence() instanceof List; + assert seq_rule.getSequence().size() == 1; + if (sequence == null) { + return; + } + Rule rule = (Rule)seq_rule.getSequence().get(0); + int i = 0; + for (Iterator it = sequence.iterator(); it.hasNext(); i++) { + Object val = it.next(); + context.addPathElement(String.valueOf(i)); + _validateRule(val, rule, context); // validate recursively + context.removePathElement(); + } + if (rule.getType().equals("map")) { + Map mapping = rule.getMapping(); + List unique_keys = new ArrayList(); + for (Iterator it = mapping.keySet().iterator(); it.hasNext(); ) { + Object key = it.next(); + Rule map_rule = (Rule)mapping.get(key); + if (map_rule.isUnique() || map_rule.isIdent()) { + unique_keys.add(key); + } + } + // + if (unique_keys.size() > 0) { + for (Iterator it = unique_keys.iterator(); it.hasNext(); ) { + Object key = it.next(); + Map table = new HashMap(); // val => index + int j = 0; + for (Iterator it2 = sequence.iterator(); it2.hasNext(); j++) { + Map map = (Map)it2.next(); + Object val = map.get(key); + if (val == null) { + continue; + } + if (table.containsKey(val)) { + String path = context.getPath(); + String prev_path = path + "/" + table.get(val) + "/" + key; + context.addPathElement(String.valueOf(j)) + .addPathElement(key.toString()); + context.addError("value.notunique", rule, val, new Object[] { prev_path }); + context.removePathElement() + .removePathElement(); + } else { + table.put(val, new Integer(j)); + } + } + } + } + } else if (rule.isUnique()) { + Map table = new HashMap(); // val => index + int j = 0; + for (Iterator it = sequence.iterator(); it.hasNext(); j++) { + Object val = it.next(); + if (val == null) { + continue; + } + if (table.containsKey(val)) { + String path = context.getPath(); + String prev_path = path + "/" + table.get(val); + context.addPathElement(String.valueOf(j)) + .addError("value.notunique", rule, val, new Object[] { prev_path }) + .removePathElement(); + } else { + table.put(val, new Integer(j)); + } + } + } + } + + + private void validateMapping(Map mapping, Rule map_rule, ValidationContext context) { + assert map_rule.getMapping() instanceof Map; + if (mapping == null) { + return; + } + Map m = map_rule.getMapping(); + for (Iterator it = m.keySet().iterator(); it.hasNext(); ) { + Object key = it.next(); + Rule rule = (Rule)m.get(key); + if (rule.isRequired() && !mapping.containsKey(key)) { + context.addError("required.nokey", rule, mapping, new Object[] { key }); + } + } + for (Iterator it = mapping.keySet().iterator(); it.hasNext(); ) { + Object key = it.next(); + Object val = mapping.get(key); + Rule rule = (Rule)m.get(key); + context.addPathElement(key.toString()); + if (rule == null) { + context.addError("key.undefined", rule, mapping, new Object[] { key.toString() + ":", map_rule.getName() + m.keySet().toString() }); + } else { + _validateRule(val, rule, context); // validate recursively + } + context.removePathElement(); + } + } + + private void validateReference(Object value, Rule ref_rule, ValidationContext context) { + //look only up the rule chain. This is a limitation + Rule refed = ref_rule; + while ((refed = refed.getParent()) != null) { + if (refed.getName() != null && refed.getName().equals(ref_rule.getReference())) { + validateRule(value, refed, context); + return; + } + } + context.addError("ref.nosuchrule", ref_rule, value, new Object[] { ref_rule.getReference() }); + } + + public class ValidationContext { + + private StringBuilder path = new StringBuilder(""); + private List errors = new LinkedList(); + private Map done = new IdentityHashMap(); //completion tracker + + private ValidationContext() { + } + + public String getPath() { + return this.path.toString(); + } + + public Validator getValidator() { + return Validator.this; + } + + public ValidationContext addPathElement(String theElement) { + this.path.append("/") + .append(theElement); + return this; + } + + public String getPathElement() { + int index = this.path.lastIndexOf("/"); + return index >= 0 ? this.path.substring(index + 1) : this.path.toString(); + } + + public ValidationContext removePathElement() { + int index = this.path.lastIndexOf("/"); + if (index >= 0) + this.path.delete(index, this.path.length()); + return this; + } + + protected ValidationContext addError(String error_symbol, Rule rule, Object value, Object[] args) { + addError( + new ValidationException( + Messages.buildMessage(error_symbol, value, args), getPath(), value, rule, error_symbol)); + return this; + } + + protected ValidationContext addError(String error_symbol, Rule rule, String relpath, Object value, Object[] args) { + addError( + new ValidationException( + Messages.buildMessage(error_symbol, value, args), getPath()+"/"+relpath, value, rule, error_symbol)); + return this; + } + + public ValidationContext addError(String message, Rule rule, Object value, Throwable cause) { + addError( + new ValidationException( + message + ((cause == null) ? "" : ", cause " + cause), getPath(), value, rule, "")); + return this; + } + + public ValidationContext addError(ValidationException theError) { + this.errors.add(theError); + return this; + } + + + public List getErrors() { + return Collections.unmodifiableList(this.errors); + } + + public boolean hasErrors() { + return this.errors.isEmpty(); + } + + public int errorCount() { + return this.errors.size(); + } + + private boolean done(Object theTarget) { + if (this.done.get(theTarget) != null) { + return true; + } + this.done.put(theTarget, Boolean.TRUE); + return false; + } + + private boolean isDone(Object theTarget) { + return this.done.get(theTarget) != null; + } + } + +/* + public static void main(String[] args) throws Exception { + Map schema = (Map)YamlUtil.loadFile("schema.yaml"); + Validator validator = new Validator(schema); + String filename = args.length > 0 ? args[0] : "document.yaml"; + Object document = YamlUtil.loadFile(filename); + List errors = validator.validate(document); + if (errors != null && errors.size() > 0) { + for (Iterator it = errors.iterator(); it.hasNext(); ) { + ValidationException error = (ValidationException)it.next(); + //String s = "- [" + error.getPath() + "] " + error.getMessage(); + String s = "- <" + error.getErrorSymbol() + ">[" + error.getPath() + "] " + error.getMessage(); + System.out.println(s); + } + } else { + System.out.println("validtion OK."); + } + } +*/ + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/YamlParser.java b/javatoscachecker/kwalify/src/main/java/kwalify/YamlParser.java new file mode 100644 index 0000000..fbe351c --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/YamlParser.java @@ -0,0 +1,156 @@ +/* + * @(#)YamlParser.java $Rev: 3 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ +package kwalify; + +import java.util.Map; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.ArrayList; +import java.util.Iterator; + +/** + * yaml parser which can keep line number of path. + * + * ex. + *
      + *  String yaml_str = Util.readFile("document.yaml");
      + *  YamlParser parser = new YamlParser(yaml_str);
      + *  Object document = parser.parse();
      + *  
      + */ +public class YamlParser extends PlainYamlParser { + private Map _linenums_table = new IdentityHashMap(); // object => sequence or mapping + private int _first_linenum = -1; + private Object _document = null; + + public YamlParser(String yaml_str) { + super(yaml_str); + } + + public Object parse() throws SyntaxException { + _document = super.parse(); + return _document; + } + + protected String getLine() { + String line = super.getLine(); + if (_first_linenum < 0) { + _first_linenum = currentLineNumber(); + } + return line; + } + + + public int getPathLineNumber(String ypath) throws InvalidPathException { + if (_document == null) { + return -1; + } + if (ypath.length() == 0 || ypath.equals("/")) { + return 1; + } + String[] elems = ypath.split("/"); + String last_elem = elems.length > 0 ? elems[elems.length - 1] : null; + int i = ypath.charAt(0) == '/' ? 1 : 0; + int len = elems.length - 1; + Object c = _document; // collection + for ( /* nothing */ ; i < len; i++) { + if (c == null) { + throw new InvalidPathException(ypath); + } else if (c instanceof Map) { + c = ((Map)c).get(elems[i]); + } else if (c instanceof List) { + int index = Integer.parseInt(elems[i]); + if (index < 0 || ((List)c).size() < index) { + throw new InvalidPathException(ypath); + } + c = ((List)c).get(index); + } else { + throw new InvalidPathException(ypath); + } + } + + if (c == null) { + throw new InvalidPathException(ypath); + } + Object linenums = _linenums_table.get(c); // Map or List + int linenum = -1; + if (c instanceof Map) { + assert linenums instanceof Map; + Object d = ((Map)linenums).get(last_elem); + linenum = ((Integer)d).intValue(); + } else if (c instanceof List) { + assert linenums instanceof List; + int index = Integer.parseInt(last_elem); + if (index < 0 || ((List)linenums).size() <= index) { + throw new InvalidPathException(ypath); + } + Object d = ((List)linenums).get(index); + linenum = ((Integer)d).intValue(); + } else { + throw new InvalidPathException(ypath); + } + return linenum; + } + + public void setErrorsLineNumber(List errors) throws InvalidPathException { + for (Iterator it = errors.iterator(); it.hasNext(); ) { + ValidationException ex = (ValidationException)it.next(); + ex.setLineNumber(getPathLineNumber(ex.getPath())); + } + } + + protected List createSequence(int linenum) { + List seq = new ArrayList(); + _linenums_table.put(seq, new ArrayList()); + return seq; + } + + protected void addSequenceValue(List seq, Object value, int linenum) { + seq.add(value); + List linenums = (List)_linenums_table.get(seq); + linenums.add(new Integer(linenum)); + } + + protected void setSequenceValueAt(List seq, int index, Object value, int linenum) { + seq.set(index, value); + List linenums = (List)_linenums_table.get(seq); + linenums.set(index, new Integer(linenum)); + } + + protected Map createMapping(int linenum) { + Map map = super.createMapping(linenum); + _linenums_table.put(map, new HashMap()); + return map; + } + + protected void setMappingValueWith(Map map, Object key, Object value, int linenum) { + map.put(key, value); + Map linenums = (Map)_linenums_table.get(map); + assert linenums != null; + linenums.put(key, new Integer(linenum)); + } + + protected void setMappingDefault(Map map, Object value, int linenum) { + super.setMappingDefault(map, value, linenum); + Map linenums = (Map)_linenums_table.get(map); + linenums.put(new Character('='), new Integer(linenum)); + } + + protected void mergeMapping(Map map, Map map2, int linenum) { + Map linenums = (Map)_linenums_table.get(map); + Map linenums2 = (Map)_linenums_table.get(map2); + assert linenums2 != null; + for (Iterator it = map2.keySet().iterator(); it.hasNext(); ) { + Object key = it.next(); + if (! map.containsKey(key)) { + map.put(key, map2.get(key)); + linenums.put(key, linenums2.get(key)); + } + } + } + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/YamlSyntaxException.java b/javatoscachecker/kwalify/src/main/java/kwalify/YamlSyntaxException.java new file mode 100644 index 0000000..a8b1011 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/YamlSyntaxException.java @@ -0,0 +1,23 @@ +/* + * @(#)YamlSyntaxException.java $Rev: 4 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +/** + * exception class thrown by YamlParser when syntax of YAML document is wrong + * + * @revision $Rev: 4 $ + * @release $Release: 0.5.1 $ + * @see SyntaxException + */ +public class YamlSyntaxException extends SyntaxException { + private static final long serialVersionUID = 2951669148531823857L; + + public YamlSyntaxException(String message, int linenum) { + super(message, linenum); + } + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/YamlUtil.java b/javatoscachecker/kwalify/src/main/java/kwalify/YamlUtil.java new file mode 100644 index 0000000..90dc17c --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/YamlUtil.java @@ -0,0 +1,62 @@ +/* + * @(#)YamlUtil.java $Rev: 3 $ $Release: 0.5.1 $ + * + * copyright(c) 2005 kuwata-lab all rights reserved. + */ + +package kwalify; + +import java.io.InputStream; +import java.io.FileInputStream; +import java.io.Reader; +import java.io.InputStreamReader; +import java.io.IOException; + +/** + * utilify class for yaml. + * + * @version $Rev: 3 $ + * @release $Release: 0.5.1 $ + */ +public class YamlUtil { + + public static Object load(String yaml_str) throws SyntaxException { + PlainYamlParser parser = new PlainYamlParser(yaml_str); + Object doc = parser.parse(); + return doc; + } + + public static Object loadFile(String filename, String charset) throws IOException, SyntaxException { + Object doc = null; + InputStream input = null; + Reader reader = null; + try { + input = new FileInputStream(filename); + reader = new InputStreamReader(input, charset); + StringBuffer sb = new StringBuffer(); + int ch; + while ((ch = reader.read()) >= 0) { + sb.append((char)ch); + } + doc = load(sb.toString()); + } finally { + if (reader != null) { + try { + reader.close(); + } catch (Exception ignore) {} + } + if (input != null) { + try { + input.close(); + } catch (Exception ignore) {} + } + } + return doc; + } + + public static Object loadFile(String filename) throws IOException, SyntaxException { + String encoding = System.getProperty("file.encoding"); + return loadFile(filename, encoding); + } + +} diff --git a/javatoscachecker/kwalify/src/main/java/kwalify/messages.properties b/javatoscachecker/kwalify/src/main/java/kwalify/messages.properties new file mode 100644 index 0000000..c4b45c0 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/java/kwalify/messages.properties @@ -0,0 +1,110 @@ +command.help = \ + Usage1: %s [-hvstlE] -f schema.yaml doc.yaml [doc2.yaml ...]\n\ + Usage2: %s [-hvstlE] -m schema.yaml [schema2.yaml ...]\n\ + \ \ -h, --help : help\n\ + \ \ -v : version\n\ + \ \ -s : silent\n\ + \ \ -f schema.yaml : schema definition file\n\ + \ \ -m : meta-validation mode\n\ + \ \ -t : expand tab character automatically\n\ + \ \ -l : show linenumber when errored (experimental)\n\ + \ \ -E : show errors in emacs-style (implies '-l')\n +command.option.noaction = command-line option '-f' or '-m' required. +meta.empty = %s: empty. +meta.valid = %s: ok. +meta.invalid = %s: NG! +schema.empty = %s#%d: empty. +validation.empty = %s#%d: empty. +validation.valid = %s#%d: valid. +validation.invalid = %s#%d: INVALID +command.property.invalid = %s: invalid property. +command.option.noarg = -%s: argument required. +command.option.noschema = -%s: schema filename required. +command.option.invalid = -%s: invalid command option. +schema.notmap = schema definition is not a mapping. +key.unknown = unknown key. +type.notstr = not a string. +type.unknown = unknown type. +required.notbool = not a boolean. +pattern.notstr = not a string (or regexp) +pattern.notmatch = should be '/..../'. +pattern.syntaxerr = has regexp error. +enum.notseq = not a sequence. +enum.notscalar = not available with seq or map. +enum.type.unmatch = %s type expected. +enum.duplicate = duplicated enum value. +assert.notstr = not a string. +assert.noval = 'val' is not used. +assert.syntaxerr = expression syntax error. +range.notmap = not a mapping. +range.notscalar = is available only with scalar type. +range.notcollection = not a collection type. +range.type.unmatch = not a %s. +range.undefined = undefined key. +range.twomax = both 'max' and 'max-ex' are not available at once. +range.twomin = both 'min' and 'min-ex' are not available at once. +range.maxltmin = max '%s' is less than min '%s'. +range.maxleminex = max '%s' is less than or equal to min-ex '%s'. +range.maxexlemin = max-ex '%s' is less than or equal to min '%s'. +range.maxexleminex = max-ex '%s' is less than or equal to min-ex '%s'. +length.notmap = not a mapping. +length.nottext = is available only with string or text. +length.notint = not an integer. +length.undefined = undefined key. +length.twomax = both 'max' and 'max-ex' are not available at once. +length.twomin = both 'min' and 'min-ex' are not available at once. +length.maxltmin = max '%s' is less than min '%s'. +length.maxleminex = max '%s' is less than or equal to min-ex '%s'. +length.maxexlemin = max-ex '%s' is less than or equal to min '%s'. +length.maxexleminex = max-ex '%s' is less than or equal to min-ex '%s'. +ident.notbool = not a boolean. +ident.notscalar = is available only with a scalar type. +ident.onroot = is not available on root element. +ident.notmap = is available only with an element of mapping. +unique.notbool = not a boolean. +unique.notscalar = is available only with a scalar type. +unique.onroot = is not available on root element. +sequence.notseq = not a sequence. +sequence.noelem = required one element. +sequence.toomany = required just one element. +mapping.notmap = not a mapping. +mapping.noelem = required at least one element. +seq.nosequence = type 'seq' requires 'sequence:'. +seq.conflict = not available with sequence. +map.nomapping = type 'map' requires 'mapping:'. +map.conflict = not available with mapping. +scalar.conflict = not available with scalar type. +enum.conflict = not available with 'enum:'. +required.novalue = value required but none. +type.unmatch = not a %s. +assert.failed = assertion expression failed (%s). +enum.notexist = invalid %s value. +pattern.unmatch = not matched to pattern %s. +range.toolarge = too large (> max %s). +range.toosmall = too small (< min %s). +range.toolargeex = too large (>= max %s). +range.toosmallex = too small (<= min %s). +length.toolong = too long (length %d > max %d). +length.tooshort = too short (length %d < min %d). +length.toolongex = too long (length %d >= max %d). +length.tooshortex = too short (length %d <= min %d). +value.notunique = is already used at '%s'. +required.nokey = key '%s:' is required. +key.undefined = key '%s' is undefined. Expecting one of %s. +flow.hastail = flow style sequence is closed but got '%s'. +flow.eof = found EOF when parsing flow style. +flow.noseqitem = sequence item required (or last comma is extra). +flow.seqnotclosed = flow style sequence requires ']'. +flow.mapnoitem = mapping item required (or last comma is extra). +flow.mapnotclosed = flow style mapping requires '}'. +flow.nocolon = ':' expected but got '%s'. +anchor.duplicated = anchor '%s' is already used. +alias.extradata = alias cannot take any data. +anchor.notfound = anchor '%s' not found +sequence.noitem = sequence item is expected. +sequence.badindent = illegal indent of sequence. +mapping.noitem = mapping item is expected. +mapping.badindent = illegal indent of mapping. +collection.notcollection = not a collection +ref.nosuchrule = no rule '%s' in scope + diff --git a/javatoscachecker/kwalify/src/main/resources/kwalify/messages.properties b/javatoscachecker/kwalify/src/main/resources/kwalify/messages.properties new file mode 100644 index 0000000..c4b45c0 --- /dev/null +++ b/javatoscachecker/kwalify/src/main/resources/kwalify/messages.properties @@ -0,0 +1,110 @@ +command.help = \ + Usage1: %s [-hvstlE] -f schema.yaml doc.yaml [doc2.yaml ...]\n\ + Usage2: %s [-hvstlE] -m schema.yaml [schema2.yaml ...]\n\ + \ \ -h, --help : help\n\ + \ \ -v : version\n\ + \ \ -s : silent\n\ + \ \ -f schema.yaml : schema definition file\n\ + \ \ -m : meta-validation mode\n\ + \ \ -t : expand tab character automatically\n\ + \ \ -l : show linenumber when errored (experimental)\n\ + \ \ -E : show errors in emacs-style (implies '-l')\n +command.option.noaction = command-line option '-f' or '-m' required. +meta.empty = %s: empty. +meta.valid = %s: ok. +meta.invalid = %s: NG! +schema.empty = %s#%d: empty. +validation.empty = %s#%d: empty. +validation.valid = %s#%d: valid. +validation.invalid = %s#%d: INVALID +command.property.invalid = %s: invalid property. +command.option.noarg = -%s: argument required. +command.option.noschema = -%s: schema filename required. +command.option.invalid = -%s: invalid command option. +schema.notmap = schema definition is not a mapping. +key.unknown = unknown key. +type.notstr = not a string. +type.unknown = unknown type. +required.notbool = not a boolean. +pattern.notstr = not a string (or regexp) +pattern.notmatch = should be '/..../'. +pattern.syntaxerr = has regexp error. +enum.notseq = not a sequence. +enum.notscalar = not available with seq or map. +enum.type.unmatch = %s type expected. +enum.duplicate = duplicated enum value. +assert.notstr = not a string. +assert.noval = 'val' is not used. +assert.syntaxerr = expression syntax error. +range.notmap = not a mapping. +range.notscalar = is available only with scalar type. +range.notcollection = not a collection type. +range.type.unmatch = not a %s. +range.undefined = undefined key. +range.twomax = both 'max' and 'max-ex' are not available at once. +range.twomin = both 'min' and 'min-ex' are not available at once. +range.maxltmin = max '%s' is less than min '%s'. +range.maxleminex = max '%s' is less than or equal to min-ex '%s'. +range.maxexlemin = max-ex '%s' is less than or equal to min '%s'. +range.maxexleminex = max-ex '%s' is less than or equal to min-ex '%s'. +length.notmap = not a mapping. +length.nottext = is available only with string or text. +length.notint = not an integer. +length.undefined = undefined key. +length.twomax = both 'max' and 'max-ex' are not available at once. +length.twomin = both 'min' and 'min-ex' are not available at once. +length.maxltmin = max '%s' is less than min '%s'. +length.maxleminex = max '%s' is less than or equal to min-ex '%s'. +length.maxexlemin = max-ex '%s' is less than or equal to min '%s'. +length.maxexleminex = max-ex '%s' is less than or equal to min-ex '%s'. +ident.notbool = not a boolean. +ident.notscalar = is available only with a scalar type. +ident.onroot = is not available on root element. +ident.notmap = is available only with an element of mapping. +unique.notbool = not a boolean. +unique.notscalar = is available only with a scalar type. +unique.onroot = is not available on root element. +sequence.notseq = not a sequence. +sequence.noelem = required one element. +sequence.toomany = required just one element. +mapping.notmap = not a mapping. +mapping.noelem = required at least one element. +seq.nosequence = type 'seq' requires 'sequence:'. +seq.conflict = not available with sequence. +map.nomapping = type 'map' requires 'mapping:'. +map.conflict = not available with mapping. +scalar.conflict = not available with scalar type. +enum.conflict = not available with 'enum:'. +required.novalue = value required but none. +type.unmatch = not a %s. +assert.failed = assertion expression failed (%s). +enum.notexist = invalid %s value. +pattern.unmatch = not matched to pattern %s. +range.toolarge = too large (> max %s). +range.toosmall = too small (< min %s). +range.toolargeex = too large (>= max %s). +range.toosmallex = too small (<= min %s). +length.toolong = too long (length %d > max %d). +length.tooshort = too short (length %d < min %d). +length.toolongex = too long (length %d >= max %d). +length.tooshortex = too short (length %d <= min %d). +value.notunique = is already used at '%s'. +required.nokey = key '%s:' is required. +key.undefined = key '%s' is undefined. Expecting one of %s. +flow.hastail = flow style sequence is closed but got '%s'. +flow.eof = found EOF when parsing flow style. +flow.noseqitem = sequence item required (or last comma is extra). +flow.seqnotclosed = flow style sequence requires ']'. +flow.mapnoitem = mapping item required (or last comma is extra). +flow.mapnotclosed = flow style mapping requires '}'. +flow.nocolon = ':' expected but got '%s'. +anchor.duplicated = anchor '%s' is already used. +alias.extradata = alias cannot take any data. +anchor.notfound = anchor '%s' not found +sequence.noitem = sequence item is expected. +sequence.badindent = illegal indent of sequence. +mapping.noitem = mapping item is expected. +mapping.badindent = illegal indent of mapping. +collection.notcollection = not a collection +ref.nosuchrule = no rule '%s' in scope + diff --git a/javatoscachecker/pom.xml b/javatoscachecker/pom.xml new file mode 100644 index 0000000..10372d7 --- /dev/null +++ b/javatoscachecker/pom.xml @@ -0,0 +1,41 @@ + + + 4.0.0 + org.onap.tosca + checker + 0.0.1-SNAPSHOT + pom + TOSCA Checker tools + + + kwalify + checker + service + + + + scm:git:https:// + scm:git:https:// + HEAD + https:// + + + + + + JCenter + JCenter Repository + http://jcenter.bintray.com + + + + Restlet + Restlet Repository + http://maven.restlet.com + + + + diff --git a/javatoscachecker/service/README b/javatoscachecker/service/README new file mode 100644 index 0000000..94c5dde --- /dev/null +++ b/javatoscachecker/service/README @@ -0,0 +1,6 @@ +checker service usage + +A checking request is done with respect to a namespace that a client names. Each such namespace is backed by an +in-memory catalog. +A checking request can + diff --git a/javatoscachecker/service/application.properties b/javatoscachecker/service/application.properties new file mode 100644 index 0000000..ad2bf49 --- /dev/null +++ b/javatoscachecker/service/application.properties @@ -0,0 +1,8 @@ +#beans.config=config/checker.xml +spring.profiles.active=default +server.port = 8080 + +logging.level.root=INFO +logging.level.org.springframework.web=INFO +logging.level.org.onap.tosca.checker=DEBUG +logging.level.org.onap.tosca.checker.service=DEBUG diff --git a/javatoscachecker/service/pom.xml b/javatoscachecker/service/pom.xml new file mode 100644 index 0000000..7040aec --- /dev/null +++ b/javatoscachecker/service/pom.xml @@ -0,0 +1,148 @@ + + 4.0.0 + + + org.onap.tosca + checker + 0.0.1-SNAPSHOT + + Service + jar + Checker Service + + + 1.8 + yyyy-MM-dd HH:mm:ss a + + + + src/main/java + + + maven-compiler-plugin + 3.1 + + 1.8 + 1.8 + ${project.build.sourceEncoding} + + + + + org.codehaus.mojo + buildnumber-maven-plugin + 1.4 + + + validate + + create + + + + + false + false + + + + org.apache.maven.plugins + maven-jar-plugin + 2.4 + + + + true + + + ${buildNumber} + ${maven.build.timestamp} + + + + + + org.springframework.boot + spring-boot-maven-plugin + 1.3.3.RELEASE + + org.onap.tosca.checker.service.CheckerEngine + + + + + repackage + + + + + + + + + + + com.fasterxml.jackson.core + jackson-databind + [2.7.8,) + + + org.springframework + spring-core + [4.3.4.RELEASE,) + + + org.springframework + spring-web + [4.3.4.RELEASE,) + + + org.springframework.boot + spring-boot-starter-web + [1.3.3.RELEASE,) + + + org.springframework + spring-webmvc + [4.3.4.RELEASE,) + + + org.springframework.boot + spring-boot-autoconfigure + [1.3.3.RELEASE,) + + + org.json + json + 20160212 + + + + org.onap.tosca + Checker + 0.0.1-SNAPSHOT + + + + diff --git a/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/CachedTarget.java b/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/CachedTarget.java new file mode 100644 index 0000000..943891f --- /dev/null +++ b/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/CachedTarget.java @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.service; + +import java.io.FilterReader; +import java.io.Reader; +import java.io.StringReader; +import java.io.IOException; + +import java.net.URI; + +import org.springframework.core.io.Resource; + +import org.apache.commons.io.input.CharSequenceReader; + +import org.onap.tosca.checker.Target; + + +/** + * Cache of the target content until invalidation .. + */ +public class CachedTarget extends Target { + + private StringBuilder content; + + public CachedTarget(String theName, URI theResource) { + super(theName, theResource); + } + + public CachedTarget(Resource theResource) throws IOException { + super(theResource.getFilename(), theResource.getURI()); + } + + protected CachedTarget(CachedTarget theSource) { + super(theSource.getName(), theSource.getLocation()); + if (theSource.hasContent()) { + setContent(theSource.getContent()); + } + } + + protected boolean hasContent() { + return this.content != null; + } + + protected CharSequence getContent() { + return this.content == null ? null : this.content; + } + + protected void setContent(CharSequence theContent) { + this.content = new StringBuilder(theContent); + } + + /* + */ + public void invalidate() { + this.content = null; + setTarget(null); + } + + @Override + public Reader open() throws IOException { + return this.content != null ? + new CharSequenceReader(this.content) : + new FilterReader(super.open()) { + { + content = new StringBuilder(); + } + + public int read(char[] cbuf, int off, int len) throws IOException { + int res = super.read(cbuf, off, len); + if (res > 0) + content.append(cbuf, off, res); + return res; + } + + public int read() throws IOException { + int res = super.read(); + if (res > 0) + /* the cast here is troublesome: the original stream had an encoding and this cast has to + done with respect to that. */ + content.append((char)res); + return res; + } + + }; + } + +} + diff --git a/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/Catalogs.java b/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/Catalogs.java new file mode 100644 index 0000000..e792286 --- /dev/null +++ b/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/Catalogs.java @@ -0,0 +1,117 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.service; + +import java.net.URI; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.Arrays; +import java.util.Set; +import java.util.Map; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Collection; +import java.util.Collections; + +import java.util.stream.Collectors; + +import java.util.logging.Logger; +import java.util.logging.Level; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; + +import org.onap.tosca.checker.Checker; +import org.onap.tosca.checker.Catalog; +import org.onap.tosca.checker.Target; + +import org.springframework.beans.BeansException; +import org.springframework.beans.FatalBeanException; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; + +import org.springframework.core.io.Resource; +import org.springframework.core.io.support.ResourcePatternResolver; +import org.springframework.core.io.support.PathMatchingResourcePatternResolver; + +import org.springframework.stereotype.Component; +import org.springframework.context.annotation.Scope; +import org.springframework.boot.context.properties.ConfigurationProperties; + +@Component("catalogs") +@Scope("singleton") +@ConfigurationProperties(prefix="catalogs") +public class Catalogs implements ApplicationContextAware { + + private static Logger logger = Logger.getLogger(Catalogs.class.getName()); + + private ApplicationContext appCtx; + private ResourcePatternResolver resolver; + private Map catalogs = new HashMap(); + + public Catalogs() { + resolver = new PathMatchingResourcePatternResolver(); + } + + public void setApplicationContext(ApplicationContext theCtx) throws BeansException { + this.appCtx = theCtx; + } + + @PostConstruct + public void initCatalogs() { + logger.entering(getClass().getName(), "initCatalogs"); + + // Done + logger.log(Level.INFO, "Catalogs available"); + } + + @PreDestroy + public void cleanupCatalogs() { + logger.entering(getClass().getName(), "destroyCatalogs"); + } + + public Catalog getCatalog(String theName) { + +System.out.println("getCatalog: " + theName + ". Known catalogs: " + this.catalogs.keySet()); + + return this.catalogs.get(theName); + } + + public void setCatalog(String theName, Catalog theCatalog) { + this.catalogs.put(theName, theCatalog); + } + + public Catalog removeCatalog(String theName) { + return this.catalogs.remove(theName); + } + + /* configuration interface */ + public void setCatalogs(Map theCatalogs) { + //just look here at the pain of creating a checker ever time .. + for (Map.Entry catalogEntry: theCatalogs.entrySet()) { + try { + Checker checker = new Checker(); + checker.check((String)catalogEntry.getValue()); + setCatalog(catalogEntry.getKey(), checker.catalog()); + } + catch (Exception x) { + throw new FatalBeanException("Failed to add catalog " + catalogEntry.getKey() + " from " + catalogEntry.getValue(), x); + } + } + } + + public Map getCatalogs() { + return this.catalogs; + } + +} diff --git a/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/CheckerController.java b/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/CheckerController.java new file mode 100644 index 0000000..cde0323 --- /dev/null +++ b/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/CheckerController.java @@ -0,0 +1,260 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.service; + +import java.util.logging.Logger; +import java.util.logging.Level; + +import java.util.concurrent.Callable; + +import java.net.URI; +import java.net.URISyntaxException; + +import javax.servlet.http.HttpServletRequest; + +import org.springframework.beans.BeansException; +import org.springframework.beans.factory.BeanInitializationException; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; + +import org.onap.tosca.checker.Checker; +import org.onap.tosca.checker.CheckerException; +import org.onap.tosca.checker.Report; +import org.onap.tosca.checker.Catalog; +import org.onap.tosca.checker.Target; +import org.onap.tosca.checker.TargetLocator; +import org.onap.tosca.checker.CommonLocator; + + + +@RestController +public class CheckerController implements ApplicationContextAware { + + private static Logger log = Logger.getLogger(CheckerController.class.getName()); + + private ApplicationContext appCtx; + @Autowired + private Catalogs catalogs; + private Checker checker; + + public void setApplicationContext(ApplicationContext theCtx) throws BeansException { + this.appCtx = theCtx; + } + + /** + * standalone checking, everything will be forgotten + */ + @RequestMapping(value={"/check_template/"}, method={RequestMethod.POST}, produces={"application/json"}) + public Report validate(@RequestBody String theTemplate, + HttpServletRequest theRequest) { + +System.out.println("Posting unnamed template"); + CachedTarget target = new CachedTarget("", requestURI(theRequest)); + target.setContent(theTemplate); + ((InCatalogLocator)this.checker.getTargetLocator()).setCatalog(null); + + try { + this.checker.check(target); + } + catch (CheckerException cx) { + log.log(Level.WARNING, "Failed to check tosca template", cx); + target.getReport().add(cx); + } + + return target.getReport(); + } + + /** + * checking with respect to a namespace/catalog but the outcome is forgotten (not added to the catalog) + */ + @RequestMapping(value={"/check_template/{catalog}"}, method={RequestMethod.POST}, produces={"application/json"}) + public Report validate(@RequestBody String theTemplate, + @PathVariable(value="catalog") String theCatalog, + HttpServletRequest theRequest) + throws NoSuchCatalogException { + +System.out.println("Posting unnamed template to catalog " + theCatalog); + Catalog catalog = this.catalogs.getCatalog(theCatalog); + if (catalog == null) + throw new NoSuchCatalogException(theCatalog); + else + ((InCatalogLocator)this.checker.getTargetLocator()).setCatalog(catalog); + + CachedTarget target = new CachedTarget("", requestURI(theRequest)); + target.setContent(theTemplate); + + try { + this.checker.check(target, new Catalog(catalog)); + } + catch (CheckerException cx) { + log.log(Level.WARNING, "Failed to check tosca template", cx); + target.getReport().add(cx); + } + + return target.getReport(); + } + + /** + * checking with respect to a namespace/catalog, the outcome is registered within the catalog + */ + @RequestMapping(value={"/check_template/{catalog}/{name}"}, method={RequestMethod.POST}, produces={"application/json"}) + public Report validate(@RequestBody String theTemplate, + @PathVariable(value="catalog") String theCatalog, + @PathVariable(value="name") String theName, + HttpServletRequest theRequest) + throws TargetConflictException { +System.out.println("Posting template named " + theName + " to catalog " + theCatalog); + + Catalog catalog = this.catalogs.getCatalog(theCatalog); + ((InCatalogLocator)this.checker.getTargetLocator()).setCatalog(catalog); + + URI targetURI = requestURI(theRequest); + if (catalog != null) { + if (catalog.getTarget(targetURI) != null) + throw new TargetConflictException(theName, theCatalog); + } + + CachedTarget target = new CachedTarget("", targetURI); + target.setContent(theTemplate); + + try { + if (catalog == null) + this.checker.check(target); + else + this.checker.check(target, catalog); + } + catch (CheckerException cx) { + log.log(Level.WARNING, "Failed to check tosca template", cx); + target.getReport().add(cx); + } + + if (target.getReport().isEmpty() && catalog == null) + this.catalogs.setCatalog(theCatalog, checker.catalog()); + + return target.getReport(); + } + + @RequestMapping(value={"/check_template/{catalog}"}, method={RequestMethod.GET}) + public ResponseEntity validate(@PathVariable(value="catalog") String theCatalog) { + + Catalog cat = catalogs.getCatalog(theCatalog); + if (cat == null) { + return new ResponseEntity(HttpStatus.NOT_FOUND); + } + + return new ResponseEntity(HttpStatus.OK); + } + + @RequestMapping(value={"/check_template/{catalog}"}, method={RequestMethod.DELETE}) + public ResponseEntity deleteCatalog(@PathVariable(value="catalog") String theCatalog) { + + Catalog cat = catalogs.removeCatalog(theCatalog); + if (cat == null) { + return new ResponseEntity(HttpStatus.NOT_FOUND); + } + + return new ResponseEntity(HttpStatus.OK); + } + + @RequestMapping(value={"/check_template/{catalog}/{name}"}, method={RequestMethod.GET}) + public ResponseEntity retrieve(@PathVariable(value="catalog") String theCatalog, + @PathVariable(value="name") String theTemplateName, + HttpServletRequest theRequest) { + + Catalog cat = catalogs.getCatalog(theCatalog); + if (cat == null) { + return new ResponseEntity(HttpStatus.NOT_FOUND); + } + + Target t = cat.getTarget(requestURI(theRequest)); + if (t == null) { + return new ResponseEntity(HttpStatus.NOT_FOUND); + } + + return new ResponseEntity("{}", HttpStatus.OK); + } + + + @PostConstruct + public void initController() { + log.entering(getClass().getName(), "initCheckerController"); + + try { + this.checker = new Checker(); + this.checker.setTargetLocator(new InCatalogLocator()); + } + catch (CheckerException cx) { + log.log(Level.WARNING, "CheckerController setup failed", cx); + throw new BeanInitializationException("Failed to create a checker", cx); + } + + log.log(Level.INFO, "CheckerController started"); + } + + @PreDestroy + public void cleanupController() { + log.entering(getClass().getName(), "cleanupCheckerController"); + } + + private URI requestURI(HttpServletRequest theRequest) { + try { + return new URI(String.format("%s://%s:%d%s", theRequest.getScheme(), + theRequest.getServerName(), + theRequest.getServerPort(), + theRequest.getRequestURI().toString())); + } + catch(URISyntaxException urisx) { + throw new RuntimeException(urisx); + } + } + + /** + */ + public static class InCatalogLocator extends CommonLocator { + + private ThreadLocal catalog = new ThreadLocal(); + + public InCatalogLocator() { + } + + protected void setCatalog(Catalog theCatalog) { + this.catalog.set(theCatalog); + } + + /** */ + @Override + public Target resolve(String theName) { + Target target = null; + if (this.catalog.get() != null) + target = this.catalog.get() + .targets() + .stream() + .filter(t -> t.getName().equals(theName)) + .findFirst() + .orElse(null); + return target == null ? super.resolve(theName) : target; + } + } + +} diff --git a/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/CheckerEngine.java b/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/CheckerEngine.java new file mode 100644 index 0000000..24c168b --- /dev/null +++ b/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/CheckerEngine.java @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.service; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.context.annotation.ImportResource; +//import org.springframework.context.ApplicationContext; +import org.springframework.context.support.ClassPathXmlApplicationContext; +import org.springframework.scheduling.annotation.EnableScheduling; +import org.springframework.core.env.ConfigurableEnvironment; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.beans.factory.support.DefaultListableBeanFactory; + +/** + * This is the entry point of the validation engine. + * The maven build script packages the application as a 'all-in-one' jar. As such, there are 2 ways to start + * the application: + * java -jar ASC-Validator-some_version.jar + * or + * java -cp some_path/ASC-Validator-some_version.jar org.springframework.boot.loader.JarLauncher + * + * The second version is important because it gets around the known java issue/bug of not being able to specify + * additional classpath elements when using the '-jar' option (additional '-cp' options are being ignored). This + * version allows to add to the classpath the location of additional configuration. + * + * The approach in packaging the application was to include in the (all-in-one) package the known configurations + * (such that it is as easy as possible to install/setup/start the applications) while still having the possibility + * of re-specifying (some of) the configuration. + * + * All out beans are specified within 'checker.xml'; use spring profiles to distinguish between the + * configurations for each environment. + * .. + * .. + * + * The default configuration is for the development environment. In order to run a particular configuration we use the + * spring.profiles.active environment variable, as in: + * java -Dspring.profiles.active=ist -jar some_path/ASC-Validator-Service-some_version.jar + * or + * java -Dspring.profiles.active=ist -cp some_path/ASC-Validator-some_version.jar org.springframework.boot.loader.JarLauncher + * + * Note: we can have a 'common' profile to be always activated that includes those bean specifications least likely to + * change. + * + * Resource reference syntax: + * classpath*:validator.xml + * vs + * classpath:validator.xml + * + * In the first case we direct the bean loader to find *ALL* validator.xml files in the classpath and merge them + * (merge the parts for the active profile(s)) + * In the second case we instruct the bean loader to locate the *FIRST* validator.xml file and process it. All others + * (validator.xml) files will be ignored. + * + * Overwriting pre-packaged bean configuration: + * The first version would theoretically allow us to specify beans configuration deltas (with respect to the + * pre-packaged version). In practice I did not manage to have this approach working reliably: it relies on a + * deterministic order of classpath processing and on overriding some of the spring framework defaults (in not so + * obvious ways, bean indexing for example). + * Currently we enable the second option, i.e. one has to provide a FULL alternative bean configuration. This + * alternative configuration can be specified as overwriting an existing profile or as an entirely new profile. + * (when using alternative profiles the resourcec reference syntax is irrelevant ..) + */ + +@SpringBootApplication(scanBasePackages={"org.onap.tosca.checker.service"}) +@ImportResource({"classpath:${beans.config?:checker}.xml"}) +@EnableScheduling +public class CheckerEngine +{ + + public CheckerEngine() { + } + + public static void main(String theArgs[]) { + SpringApplication sapp = new SpringApplication(CheckerEngine.class); + ConfigurableApplicationContext ctx = sapp.run(theArgs); + } + +} diff --git a/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/NoSuchCatalogException.java b/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/NoSuchCatalogException.java new file mode 100644 index 0000000..5f0d29d --- /dev/null +++ b/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/NoSuchCatalogException.java @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.service; + +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.http.HttpStatus; + +@ResponseStatus(value=HttpStatus.PRECONDITION_FAILED, reason="No such catalog exists in the service") +public class NoSuchCatalogException extends Exception { + + public NoSuchCatalogException(String theCatalog) { + super("This service instance has not catalog with the name " + theCatalog); + } +} diff --git a/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/TargetConflictException.java b/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/TargetConflictException.java new file mode 100644 index 0000000..928412f --- /dev/null +++ b/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/TargetConflictException.java @@ -0,0 +1,24 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.service; + +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.http.HttpStatus; + +@ResponseStatus(value=HttpStatus.PRECONDITION_FAILED, reason="Same target exists in given catalog") +public class TargetConflictException extends Exception { + + public TargetConflictException(String theTarget, String theCatalog) { + super("A target with name " + theTarget + " already exists in " + theCatalog); + } +} diff --git a/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/TemplateChecker.java b/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/TemplateChecker.java new file mode 100644 index 0000000..6227236 --- /dev/null +++ b/javatoscachecker/service/src/main/java/org/onap/tosca/checker/service/TemplateChecker.java @@ -0,0 +1,121 @@ +/* + * Copyright (c) 2017 . All rights reserved. + * =================================================================== + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for + * the specific language governing permissions and limitations under the License. + */ +package org.onap.tosca.checker.service; + +import java.net.URI; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Collections; +import java.util.logging.Logger; +import java.util.logging.Level; +import java.util.concurrent.Callable; + +import org.onap.tosca.checker.Target; +import org.onap.tosca.checker.CommonLocator; +import org.onap.tosca.checker.Checker; +import org.onap.tosca.checker.Catalog; +import org.onap.tosca.checker.Report; +import org.onap.tosca.checker.CheckerException; + +import org.springframework.stereotype.Component; +import org.springframework.context.annotation.Scope; +import org.springframework.beans.factory.annotation.Autowired; + +/** + */ +@Component("templateChecker") +@Scope("prototype") +public class TemplateChecker implements Callable { + + private static Logger log = Logger.getLogger(TemplateChecker.class.getName()); + + private String template, + name, + catalog; + private URI uri; + + @Autowired + private Catalogs catalogs; + + /* */ + public TemplateChecker(String theTemplate, String theName, String theCatalog, URI theRef) { + this.template = theTemplate; + this.name = theName; + this.catalog = theCatalog; + this.uri = theRef; + } + + /* + */ + @Override + public Report call() throws Exception { + + if (log.isLoggable(Level.FINEST)) + log.log(Level.FINEST, "Processing template " + this.template); + + Catalog cat = catalogs.getCatalog(this.catalog); + + log.log(Level.FINER, "validating template " + this.name + " at " + this.uri + ". Active catalog: " + cat); + + //because this is a new checker we do not care about the top target name .. + CachedTarget target = new CachedTarget(this.name == null ? "" : this.name, this.uri); + target.setContent(this.template); + Checker checker = new Checker(); + checker.setTargetLocator(new InCatalogLocator(cat)); + try { + if (cat != null) { + checker.check(target, cat); + } + else { + checker.check(target); + } + } + catch (CheckerException cx) { + log.log(Level.WARNING, "Failed to check tosca template", cx); + target.getReport().add(cx); + } + + //if named template is succesfull and this is a new catalog, register it + if (this.name != null && target.getReport().isEmpty() && cat == null) { + this.catalogs.setCatalog(this.catalog, checker.catalog()); + } + + return target.getReport().isEmpty() ? null : target.getReport(); + + } + + + public static class InCatalogLocator extends CommonLocator { + + private Catalog catalog; + + public InCatalogLocator(Catalog theCatalog) { + this.catalog = theCatalog; + } + + /** */ + @Override + public Target resolve(String theName) { + Target target = null; + if (this.catalog != null) + target = this.catalog.targets() + .stream() + .filter(t -> t.getName().equals(theName)) + .findFirst() + .orElse(null); + return target == null ? super.resolve(theName) : target; + } + } +} diff --git a/javatoscachecker/service/src/main/resources/checker.xml b/javatoscachecker/service/src/main/resources/checker.xml new file mode 100644 index 0000000..b0e254a --- /dev/null +++ b/javatoscachecker/service/src/main/resources/checker.xml @@ -0,0 +1,21 @@ + + + + + + + -- cgit 1.2.3-korg